diff --git a/nipype/__init__.py b/nipype/__init__.py index 0773845b89..76b2ba58f9 100644 --- a/nipype/__init__.py +++ b/nipype/__init__.py @@ -4,8 +4,12 @@ import os from distutils.version import LooseVersion -from .info import (LONG_DESCRIPTION as __doc__, URL as __url__, STATUS as - __status__, __version__) +from .info import ( + LONG_DESCRIPTION as __doc__, + URL as __url__, + STATUS as __status__, + __version__, +) from .utils.config import NipypeConfig from .utils.logger import Logging from .refs import due @@ -13,6 +17,7 @@ try: import faulthandler + faulthandler.enable() except (ImportError, IOError) as e: pass @@ -26,18 +31,16 @@ def __call__(self, doctests=True, parallel=False): try: import pytest except ImportError: - raise RuntimeError( - 'py.test not installed, run: pip install pytest') + raise RuntimeError("py.test not installed, run: pip install pytest") args = [] if not doctests: - args.extend(['-p', 'no:doctest']) + args.extend(["-p", "no:doctest"]) if parallel: try: import xdist except ImportError: - raise RuntimeError( - "pytest-xdist required for parallel run") - args.append('-n auto') + raise RuntimeError("pytest-xdist required for parallel run") + args.append("-n auto") args.append(os.path.dirname(__file__)) pytest.main(args=args) @@ -51,8 +54,16 @@ def get_info(): from .pipeline import Node, MapNode, JoinNode, Workflow -from .interfaces import (DataGrabber, DataSink, SelectFiles, IdentityInterface, - Rename, Function, Select, Merge) +from .interfaces import ( + DataGrabber, + DataSink, + SelectFiles, + IdentityInterface, + Rename, + Function, + Select, + Merge, +) def check_latest_version(raise_exception=False): @@ -63,11 +74,12 @@ def check_latest_version(raise_exception=False): Raise a RuntimeError if a bad version is being used """ import etelemetry - logger = logging.getLogger('nipype.utils') + + logger = logging.getLogger("nipype.utils") INIT_MSG = "Running {packname} version {version} (latest: {latest})".format - latest = {"version": 'Unknown', "bad_versions": []} + latest = {"version": "Unknown", "bad_versions": []} result = None try: result = etelemetry.get_project("nipy/nipype") @@ -77,24 +89,34 @@ def check_latest_version(raise_exception=False): if result: latest.update(**result) if LooseVersion(__version__) != LooseVersion(latest["version"]): - logger.info(INIT_MSG(packname='nipype', - version=__version__, - latest=latest["version"])) - if latest["bad_versions"] and \ - any([LooseVersion(__version__) == LooseVersion(ver) - for ver in latest["bad_versions"]]): - message = ('You are using a version of Nipype with a critical ' - 'bug. Please use a different version.') + logger.info( + INIT_MSG( + packname="nipype", version=__version__, latest=latest["version"] + ) + ) + if latest["bad_versions"] and any( + [ + LooseVersion(__version__) == LooseVersion(ver) + for ver in latest["bad_versions"] + ] + ): + message = ( + "You are using a version of Nipype with a critical " + "bug. Please use a different version." + ) if raise_exception: raise RuntimeError(message) else: logger.critical(message) return latest + # Run telemetry on import for interactive sessions, such as IPython, Jupyter notebooks, Python REPL -if config.getboolean('execution', 'check_version'): +if config.getboolean("execution", "check_version"): import __main__ - if not hasattr(__main__, '__file__'): + + if not hasattr(__main__, "__file__"): from .interfaces.base import BaseInterface + if BaseInterface._etelemetry_version_data is None: BaseInterface._etelemetry_version_data = check_latest_version() diff --git a/nipype/algorithms/__init__.py b/nipype/algorithms/__init__.py index a2909a3501..b28fc516d2 100644 --- a/nipype/algorithms/__init__.py +++ b/nipype/algorithms/__init__.py @@ -7,4 +7,4 @@ Exaples: artifactdetect """ -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 4e09a1700a..428812b842 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -1,9 +1,9 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -''' +""" Algorithms to compute confounds in :abbr:`fMRI (functional MRI)` -''' +""" import os import os.path as op from collections import OrderedDict @@ -15,14 +15,21 @@ from .. import config, logging from ..external.due import BibTeX -from ..interfaces.base import (traits, TraitedSpec, BaseInterface, - BaseInterfaceInputSpec, File, isdefined, - InputMultiPath, OutputMultiPath, - SimpleInterface) +from ..interfaces.base import ( + traits, + TraitedSpec, + BaseInterface, + BaseInterfaceInputSpec, + File, + isdefined, + InputMultiPath, + OutputMultiPath, + SimpleInterface, +) from ..utils import NUMPY_MMAP from ..utils.misc import normalize_mc_params -IFLOGGER = logging.getLogger('nipype.interface') +IFLOGGER = logging.getLogger("nipype.interface") def fallback_svd(a, full_matrices=True, compute_uv=True): @@ -32,69 +39,76 @@ def fallback_svd(a, full_matrices=True, compute_uv=True): pass from scipy.linalg import svd - return svd(a, full_matrices=full_matrices, compute_uv=compute_uv, lapack_driver='gesvd') + + return svd( + a, full_matrices=full_matrices, compute_uv=compute_uv, lapack_driver="gesvd" + ) class ComputeDVARSInputSpec(BaseInterfaceInputSpec): - in_file = File( - exists=True, mandatory=True, desc='functional data, after HMC') - in_mask = File(exists=True, mandatory=True, desc='a brain mask') + in_file = File(exists=True, mandatory=True, desc="functional data, after HMC") + in_mask = File(exists=True, mandatory=True, desc="a brain mask") remove_zerovariance = traits.Bool( - True, usedefault=True, desc='remove voxels with zero variance') - save_std = traits.Bool( - True, usedefault=True, desc='save standardized DVARS') - save_nstd = traits.Bool( - False, usedefault=True, desc='save non-standardized DVARS') + True, usedefault=True, desc="remove voxels with zero variance" + ) + save_std = traits.Bool(True, usedefault=True, desc="save standardized DVARS") + save_nstd = traits.Bool(False, usedefault=True, desc="save non-standardized DVARS") save_vxstd = traits.Bool( - False, usedefault=True, desc='save voxel-wise standardized DVARS') - save_all = traits.Bool(False, usedefault=True, desc='output all DVARS') + False, usedefault=True, desc="save voxel-wise standardized DVARS" + ) + save_all = traits.Bool(False, usedefault=True, desc="output all DVARS") - series_tr = traits.Float(desc='repetition time in sec.') - save_plot = traits.Bool(False, usedefault=True, desc='write DVARS plot') - figdpi = traits.Int(100, usedefault=True, desc='output dpi for the plot') + series_tr = traits.Float(desc="repetition time in sec.") + save_plot = traits.Bool(False, usedefault=True, desc="write DVARS plot") + figdpi = traits.Int(100, usedefault=True, desc="output dpi for the plot") figsize = traits.Tuple( traits.Float(11.7), traits.Float(2.3), usedefault=True, - desc='output figure size') + desc="output figure size", + ) figformat = traits.Enum( - 'png', 'pdf', 'svg', usedefault=True, desc='output format for figures') + "png", "pdf", "svg", usedefault=True, desc="output format for figures" + ) intensity_normalization = traits.Float( 1000.0, usedefault=True, - desc='Divide value in each voxel at each timepoint ' - 'by the median calculated across all voxels' - 'and timepoints within the mask (if specified)' - 'and then multiply by the value specified by' - 'this parameter. By using the default (1000)' - 'output DVARS will be expressed in ' - 'x10 % BOLD units compatible with Power et al.' - '2012. Set this to 0 to disable intensity' - 'normalization altogether.') + desc="Divide value in each voxel at each timepoint " + "by the median calculated across all voxels" + "and timepoints within the mask (if specified)" + "and then multiply by the value specified by" + "this parameter. By using the default (1000)" + "output DVARS will be expressed in " + "x10 % BOLD units compatible with Power et al." + "2012. Set this to 0 to disable intensity" + "normalization altogether.", + ) class ComputeDVARSOutputSpec(TraitedSpec): - out_std = File(exists=True, desc='output text file') - out_nstd = File(exists=True, desc='output text file') - out_vxstd = File(exists=True, desc='output text file') - out_all = File(exists=True, desc='output text file') + out_std = File(exists=True, desc="output text file") + out_nstd = File(exists=True, desc="output text file") + out_vxstd = File(exists=True, desc="output text file") + out_all = File(exists=True, desc="output text file") avg_std = traits.Float() avg_nstd = traits.Float() avg_vxstd = traits.Float() - fig_std = File(exists=True, desc='output DVARS plot') - fig_nstd = File(exists=True, desc='output DVARS plot') - fig_vxstd = File(exists=True, desc='output DVARS plot') + fig_std = File(exists=True, desc="output DVARS plot") + fig_nstd = File(exists=True, desc="output DVARS plot") + fig_vxstd = File(exists=True, desc="output DVARS plot") class ComputeDVARS(BaseInterface): """ Computes the DVARS. """ + input_spec = ComputeDVARSInputSpec output_spec = ComputeDVARSOutputSpec - references_ = [{ - 'entry': - BibTeX("""\ + references_ = [ + { + "entry": BibTeX( + """\ @techreport{nichols_notes_2013, address = {Coventry, UK}, title = {Notes on {Creating} a {Standardized} {Version} of {DVARS}}, @@ -104,11 +118,13 @@ class ComputeDVARS(BaseInterface): institution = {University of Warwick}, author = {Nichols, Thomas}, year = {2013} -}"""), - 'tags': ['method'] - }, { - 'entry': - BibTeX("""\ +}""" + ), + "tags": ["method"], + }, + { + "entry": BibTeX( + """\ @article{power_spurious_2012, title = {Spurious but systematic correlations in functional connectivity {MRI} networks \ arise from subject motion}, @@ -122,9 +138,11 @@ class ComputeDVARS(BaseInterface): year = {2012}, pages = {2142--2154}, } -"""), - 'tags': ['method'] - }] +""" + ), + "tags": ["method"], + }, + ] def __init__(self, **inputs): self._results = {} @@ -133,100 +151,107 @@ def __init__(self, **inputs): def _gen_fname(self, suffix, ext=None): fname, in_ext = op.splitext(op.basename(self.inputs.in_file)) - if in_ext == '.gz': + if in_ext == ".gz": fname, in_ext2 = op.splitext(fname) in_ext = in_ext2 + in_ext if ext is None: ext = in_ext - if ext.startswith('.'): + if ext.startswith("."): ext = ext[1:] - return op.abspath('{}_{}.{}'.format(fname, suffix, ext)) + return op.abspath("{}_{}.{}".format(fname, suffix, ext)) def _run_interface(self, runtime): dvars = compute_dvars( self.inputs.in_file, self.inputs.in_mask, remove_zerovariance=self.inputs.remove_zerovariance, - intensity_normalization=self.inputs.intensity_normalization) + intensity_normalization=self.inputs.intensity_normalization, + ) - (self._results['avg_std'], self._results['avg_nstd'], - self._results['avg_vxstd']) = np.mean( - dvars, axis=1).astype(float) + ( + self._results["avg_std"], + self._results["avg_nstd"], + self._results["avg_vxstd"], + ) = np.mean(dvars, axis=1).astype(float) tr = None if isdefined(self.inputs.series_tr): tr = self.inputs.series_tr if self.inputs.save_std: - out_file = self._gen_fname('dvars_std', ext='tsv') - np.savetxt(out_file, dvars[0], fmt=b'%0.6f') - self._results['out_std'] = out_file + out_file = self._gen_fname("dvars_std", ext="tsv") + np.savetxt(out_file, dvars[0], fmt=b"%0.6f") + self._results["out_std"] = out_file if self.inputs.save_plot: - self._results['fig_std'] = self._gen_fname( - 'dvars_std', ext=self.inputs.figformat) + self._results["fig_std"] = self._gen_fname( + "dvars_std", ext=self.inputs.figformat + ) fig = plot_confound( - dvars[0], - self.inputs.figsize, - 'Standardized DVARS', - series_tr=tr) + dvars[0], self.inputs.figsize, "Standardized DVARS", series_tr=tr + ) fig.savefig( - self._results['fig_std'], + self._results["fig_std"], dpi=float(self.inputs.figdpi), format=self.inputs.figformat, - bbox_inches='tight') + bbox_inches="tight", + ) fig.clf() if self.inputs.save_nstd: - out_file = self._gen_fname('dvars_nstd', ext='tsv') - np.savetxt(out_file, dvars[1], fmt=b'%0.6f') - self._results['out_nstd'] = out_file + out_file = self._gen_fname("dvars_nstd", ext="tsv") + np.savetxt(out_file, dvars[1], fmt=b"%0.6f") + self._results["out_nstd"] = out_file if self.inputs.save_plot: - self._results['fig_nstd'] = self._gen_fname( - 'dvars_nstd', ext=self.inputs.figformat) + self._results["fig_nstd"] = self._gen_fname( + "dvars_nstd", ext=self.inputs.figformat + ) fig = plot_confound( - dvars[1], self.inputs.figsize, 'DVARS', series_tr=tr) + dvars[1], self.inputs.figsize, "DVARS", series_tr=tr + ) fig.savefig( - self._results['fig_nstd'], + self._results["fig_nstd"], dpi=float(self.inputs.figdpi), format=self.inputs.figformat, - bbox_inches='tight') + bbox_inches="tight", + ) fig.clf() if self.inputs.save_vxstd: - out_file = self._gen_fname('dvars_vxstd', ext='tsv') - np.savetxt(out_file, dvars[2], fmt=b'%0.6f') - self._results['out_vxstd'] = out_file + out_file = self._gen_fname("dvars_vxstd", ext="tsv") + np.savetxt(out_file, dvars[2], fmt=b"%0.6f") + self._results["out_vxstd"] = out_file if self.inputs.save_plot: - self._results['fig_vxstd'] = self._gen_fname( - 'dvars_vxstd', ext=self.inputs.figformat) + self._results["fig_vxstd"] = self._gen_fname( + "dvars_vxstd", ext=self.inputs.figformat + ) fig = plot_confound( - dvars[2], - self.inputs.figsize, - 'Voxelwise std DVARS', - series_tr=tr) + dvars[2], self.inputs.figsize, "Voxelwise std DVARS", series_tr=tr + ) fig.savefig( - self._results['fig_vxstd'], + self._results["fig_vxstd"], dpi=float(self.inputs.figdpi), format=self.inputs.figformat, - bbox_inches='tight') + bbox_inches="tight", + ) fig.clf() if self.inputs.save_all: - out_file = self._gen_fname('dvars', ext='tsv') + out_file = self._gen_fname("dvars", ext="tsv") np.savetxt( out_file, np.vstack(dvars).T, - fmt=b'%0.8f', - delimiter=b'\t', - header='std DVARS\tnon-std DVARS\tvx-wise std DVARS', - comments='') - self._results['out_all'] = out_file + fmt=b"%0.8f", + delimiter=b"\t", + header="std DVARS\tnon-std DVARS\tvx-wise std DVARS", + comments="", + ) + self._results["out_all"] = out_file return runtime @@ -235,7 +260,7 @@ def _list_outputs(self): class FramewiseDisplacementInputSpec(BaseInterfaceInputSpec): - in_file = File(exists=True, mandatory=True, desc='motion parameters') + in_file = File(exists=True, mandatory=True, desc="motion parameters") parameter_source = traits.Enum( "FSL", "AFNI", @@ -243,33 +268,32 @@ class FramewiseDisplacementInputSpec(BaseInterfaceInputSpec): "FSFAST", "NIPY", desc="Source of movement parameters", - mandatory=True) + mandatory=True, + ) radius = traits.Float( 50, usedefault=True, - desc='radius in mm to calculate angular FDs, 50mm is the ' - 'default since it is used in Power et al. 2012') - out_file = File( - 'fd_power_2012.txt', usedefault=True, desc='output file name') - out_figure = File( - 'fd_power_2012.pdf', usedefault=True, desc='output figure name') - series_tr = traits.Float(desc='repetition time in sec.') - save_plot = traits.Bool(False, usedefault=True, desc='write FD plot') - normalize = traits.Bool( - False, usedefault=True, desc='calculate FD in mm/s') - figdpi = traits.Int( - 100, usedefault=True, desc='output dpi for the FD plot') + desc="radius in mm to calculate angular FDs, 50mm is the " + "default since it is used in Power et al. 2012", + ) + out_file = File("fd_power_2012.txt", usedefault=True, desc="output file name") + out_figure = File("fd_power_2012.pdf", usedefault=True, desc="output figure name") + series_tr = traits.Float(desc="repetition time in sec.") + save_plot = traits.Bool(False, usedefault=True, desc="write FD plot") + normalize = traits.Bool(False, usedefault=True, desc="calculate FD in mm/s") + figdpi = traits.Int(100, usedefault=True, desc="output dpi for the FD plot") figsize = traits.Tuple( traits.Float(11.7), traits.Float(2.3), usedefault=True, - desc='output figure size') + desc="output figure size", + ) class FramewiseDisplacementOutputSpec(TraitedSpec): - out_file = File(desc='calculated FD per timestep') - out_figure = File(desc='output image file') - fd_average = traits.Float(desc='average FD') + out_file = File(desc="calculated FD per timestep") + out_figure = File(desc="output image file") + fd_average = traits.Float(desc="average FD") class FramewiseDisplacement(BaseInterface): @@ -288,9 +312,10 @@ class FramewiseDisplacement(BaseInterface): input_spec = FramewiseDisplacementInputSpec output_spec = FramewiseDisplacementOutputSpec - references_ = [{ - 'entry': - BibTeX("""\ + references_ = [ + { + "entry": BibTeX( + """\ @article{power_spurious_2012, title = {Spurious but systematic correlations in functional connectivity {MRI} networks \ arise from subject motion}, @@ -304,9 +329,11 @@ class FramewiseDisplacement(BaseInterface): year = {2012}, pages = {2142--2154}, } -"""), - 'tags': ['method'] - }] +""" + ), + "tags": ["method"], + } + ] def _run_interface(self, runtime): mpars = np.loadtxt(self.inputs.in_file) # mpars is N_t x 6 @@ -314,20 +341,19 @@ def _run_interface(self, runtime): func1d=normalize_mc_params, axis=1, arr=mpars, - source=self.inputs.parameter_source) + source=self.inputs.parameter_source, + ) diff = mpars[:-1, :6] - mpars[1:, :6] diff[:, 3:6] *= self.inputs.radius fd_res = np.abs(diff).sum(axis=1) self._results = { - 'out_file': op.abspath(self.inputs.out_file), - 'fd_average': float(fd_res.mean()) + "out_file": op.abspath(self.inputs.out_file), + "fd_average": float(fd_res.mean()), } np.savetxt( - self.inputs.out_file, - fd_res, - header='FramewiseDisplacement', - comments='') + self.inputs.out_file, fd_res, header="FramewiseDisplacement", comments="" + ) if self.inputs.save_plot: tr = None @@ -335,21 +361,23 @@ def _run_interface(self, runtime): tr = self.inputs.series_tr if self.inputs.normalize and tr is None: - IFLOGGER.warning('FD plot cannot be normalized if TR is not set') + IFLOGGER.warning("FD plot cannot be normalized if TR is not set") - self._results['out_figure'] = op.abspath(self.inputs.out_figure) + self._results["out_figure"] = op.abspath(self.inputs.out_figure) fig = plot_confound( fd_res, self.inputs.figsize, - 'FD', - units='mm', + "FD", + units="mm", series_tr=tr, - normalize=self.inputs.normalize) + normalize=self.inputs.normalize, + ) fig.savefig( - self._results['out_figure'], + self._results["out_figure"], dpi=float(self.inputs.figdpi), format=self.inputs.out_figure[-3:], - bbox_inches='tight') + bbox_inches="tight", + ) fig.clf() return runtime @@ -360,104 +388,135 @@ def _list_outputs(self): class CompCorInputSpec(BaseInterfaceInputSpec): realigned_file = File( - exists=True, mandatory=True, desc='already realigned brain image (4D)') + exists=True, mandatory=True, desc="already realigned brain image (4D)" + ) mask_files = InputMultiPath( File(exists=True), - desc=('One or more mask files that determines ' - 'ROI (3D). When more that one file is ' - 'provided `merge_method` or ' - '`merge_index` must be provided')) + desc=( + "One or more mask files that determines " + "ROI (3D). When more that one file is " + "provided `merge_method` or " + "`merge_index` must be provided" + ), + ) merge_method = traits.Enum( - 'union', - 'intersect', - 'none', - xor=['mask_index'], - requires=['mask_files'], - desc=('Merge method if multiple masks are ' - 'present - `union` uses voxels included in' - ' at least one input mask, `intersect` ' - 'uses only voxels present in all input ' - 'masks, `none` performs CompCor on ' - 'each mask individually')) + "union", + "intersect", + "none", + xor=["mask_index"], + requires=["mask_files"], + desc=( + "Merge method if multiple masks are " + "present - `union` uses voxels included in" + " at least one input mask, `intersect` " + "uses only voxels present in all input " + "masks, `none` performs CompCor on " + "each mask individually" + ), + ) mask_index = traits.Range( low=0, - xor=['merge_method'], - requires=['mask_files'], - desc=('Position of mask in `mask_files` to use - ' - 'first is the default.')) + xor=["merge_method"], + requires=["mask_files"], + desc=("Position of mask in `mask_files` to use - " "first is the default."), + ) mask_names = traits.List( traits.Str, - desc='Names for provided masks (for printing into metadata). ' - 'If provided, it must be as long as the final mask list ' - '(after any merge and indexing operations).') + desc="Names for provided masks (for printing into metadata). " + "If provided, it must be as long as the final mask list " + "(after any merge and indexing operations).", + ) components_file = traits.Str( - 'components_file.txt', + "components_file.txt", usedefault=True, - desc='Filename to store physiological components') + desc="Filename to store physiological components", + ) num_components = traits.Either( - 'all', traits.Range(low=1), xor=['variance_threshold'], - desc='Number of components to return from the decomposition. If ' - '`num_components` is `all`, then all components will be ' - 'retained.') + "all", + traits.Range(low=1), + xor=["variance_threshold"], + desc="Number of components to return from the decomposition. If " + "`num_components` is `all`, then all components will be " + "retained.", + ) # 6 for BOLD, 4 for ASL # automatically instantiated to 6 in CompCor below if neither # `num_components` nor `variance_threshold` is defined (for # backward compatibility) variance_threshold = traits.Range( - low=0.0, high=1.0, exclude_low=True, exclude_high=True, xor=['num_components'], - desc='Select the number of components to be returned automatically ' - 'based on their ability to explain variance in the dataset. ' - '`variance_threshold` is a fractional value between 0 and 1; ' - 'the number of components retained will be equal to the minimum ' - 'number of components necessary to explain the provided ' - 'fraction of variance in the masked time series.') + low=0.0, + high=1.0, + exclude_low=True, + exclude_high=True, + xor=["num_components"], + desc="Select the number of components to be returned automatically " + "based on their ability to explain variance in the dataset. " + "`variance_threshold` is a fractional value between 0 and 1; " + "the number of components retained will be equal to the minimum " + "number of components necessary to explain the provided " + "fraction of variance in the masked time series.", + ) pre_filter = traits.Enum( - 'polynomial', - 'cosine', + "polynomial", + "cosine", False, usedefault=True, - desc='Detrend time series prior to component ' - 'extraction') + desc="Detrend time series prior to component " "extraction", + ) use_regress_poly = traits.Bool( - deprecated='0.15.0', - new_name='pre_filter', - desc=('use polynomial regression ' - 'pre-component extraction')) + deprecated="0.15.0", + new_name="pre_filter", + desc=("use polynomial regression " "pre-component extraction"), + ) regress_poly_degree = traits.Range( - low=1, value=1, usedefault=True, desc='the degree polynomial to use') + low=1, value=1, usedefault=True, desc="the degree polynomial to use" + ) header_prefix = traits.Str( - desc=('the desired header for the output tsv ' - 'file (one column). If undefined, will ' - 'default to "CompCor"')) + desc=( + "the desired header for the output tsv " + "file (one column). If undefined, will " + 'default to "CompCor"' + ) + ) high_pass_cutoff = traits.Float( - 128, - usedefault=True, - desc='Cutoff (in seconds) for "cosine" pre-filter') + 128, usedefault=True, desc='Cutoff (in seconds) for "cosine" pre-filter' + ) repetition_time = traits.Float( - desc='Repetition time (TR) of series - derived from image header if ' - 'unspecified') + desc="Repetition time (TR) of series - derived from image header if " + "unspecified" + ) save_pre_filter = traits.Either( - traits.Bool, File, default=False, usedefault=True, - desc='Save pre-filter basis as text file') + traits.Bool, + File, + default=False, + usedefault=True, + desc="Save pre-filter basis as text file", + ) save_metadata = traits.Either( - traits.Bool, File, default=False, usedefault=True, - desc='Save component metadata as text file') - ignore_initial_volumes = traits.Range( - low=0, + traits.Bool, + File, + default=False, usedefault=True, - desc='Number of volumes at start of series to ignore') + desc="Save component metadata as text file", + ) + ignore_initial_volumes = traits.Range( + low=0, usedefault=True, desc="Number of volumes at start of series to ignore" + ) failure_mode = traits.Enum( - 'error', 'NaN', + "error", + "NaN", usedefault=True, - desc='When no components are found or convergence fails, raise an error ' - 'or silently return columns of NaNs.') + desc="When no components are found or convergence fails, raise an error " + "or silently return columns of NaNs.", + ) class CompCorOutputSpec(TraitedSpec): components_file = File( - exists=True, desc='text file containing the noise components') - pre_filter_file = File(desc='text file containing high-pass filter basis') - metadata_file = File(desc='text file containing component metadata') + exists=True, desc="text file containing the noise components" + ) + pre_filter_file = File(desc="text file containing high-pass filter basis") + metadata_file = File(desc="text file containing component metadata") class CompCor(SimpleInterface): @@ -495,12 +554,14 @@ class CompCor(SimpleInterface): >>> ccinterface.inputs.regress_poly_degree = 2 """ + input_spec = CompCorInputSpec output_spec = CompCorOutputSpec - references_ = [{ - 'tags': ['method', 'implementation'], - 'entry': - BibTeX("""\ + references_ = [ + { + "tags": ["method", "implementation"], + "entry": BibTeX( + """\ @article{compcor_2007, title = {A component based noise correction method (CompCor) for BOLD and perfusion based}, volume = {37}, @@ -511,67 +572,82 @@ class CompCor(SimpleInterface): author = {Behzadi, Yashar and Restom, Khaled and Liau, Joy and Liu, Thomas T.}, year = {2007}, pages = {90-101} -}""")}] +}""" + ), + } + ] def __init__(self, *args, **kwargs): - ''' exactly the same as compcor except the header ''' + """ exactly the same as compcor except the header """ super(CompCor, self).__init__(*args, **kwargs) - self._header = 'CompCor' + self._header = "CompCor" def _run_interface(self, runtime): mask_images = [] if isdefined(self.inputs.mask_files): - mask_images = combine_mask_files(self.inputs.mask_files, - self.inputs.merge_method, - self.inputs.mask_index) + mask_images = combine_mask_files( + self.inputs.mask_files, self.inputs.merge_method, self.inputs.mask_index + ) if self.inputs.use_regress_poly: - self.inputs.pre_filter = 'polynomial' + self.inputs.pre_filter = "polynomial" # Degree 0 == remove mean; see compute_noise_components - degree = (self.inputs.regress_poly_degree - if self.inputs.pre_filter == 'polynomial' else 0) + degree = ( + self.inputs.regress_poly_degree + if self.inputs.pre_filter == "polynomial" + else 0 + ) imgseries = nb.load(self.inputs.realigned_file, mmap=NUMPY_MMAP) if len(imgseries.shape) != 4: - raise ValueError('{} expected a 4-D nifti file. Input {} has ' - '{} dimensions (shape {})'.format( - self._header, self.inputs.realigned_file, - len(imgseries.shape), imgseries.shape)) + raise ValueError( + "{} expected a 4-D nifti file. Input {} has " + "{} dimensions (shape {})".format( + self._header, + self.inputs.realigned_file, + len(imgseries.shape), + imgseries.shape, + ) + ) if len(mask_images) == 0: img = nb.Nifti1Image( np.ones(imgseries.shape[:3], dtype=np.bool), affine=imgseries.affine, - header=imgseries.header) + header=imgseries.header, + ) mask_images = [img] skip_vols = self.inputs.ignore_initial_volumes if skip_vols: imgseries = imgseries.__class__( - imgseries.get_data()[..., skip_vols:], imgseries.affine, - imgseries.header) + imgseries.get_data()[..., skip_vols:], + imgseries.affine, + imgseries.header, + ) mask_images = self._process_masks(mask_images, imgseries.get_data()) TR = 0 - if self.inputs.pre_filter == 'cosine': + if self.inputs.pre_filter == "cosine": if isdefined(self.inputs.repetition_time): TR = self.inputs.repetition_time else: # Derive TR from NIfTI header, if possible try: TR = imgseries.header.get_zooms()[3] - if imgseries.header.get_xyzt_units()[1] == 'msec': + if imgseries.header.get_xyzt_units()[1] == "msec": TR /= 1000 except (AttributeError, IndexError): TR = 0 if TR == 0: raise ValueError( - '{} cannot detect repetition time from image - ' - 'Set the repetition_time input'.format(self._header)) + "{} cannot detect repetition time from image - " + "Set the repetition_time input".format(self._header) + ) if isdefined(self.inputs.variance_threshold): components_criterion = self.inputs.variance_threshold @@ -579,91 +655,104 @@ def _run_interface(self, runtime): components_criterion = self.inputs.num_components else: components_criterion = 6 - IFLOGGER.warning('`num_components` and `variance_threshold` are ' - 'not defined. Setting number of components to 6 ' - 'for backward compatibility. Please set either ' - '`num_components` or `variance_threshold`, as ' - 'this feature may be deprecated in the future.') + IFLOGGER.warning( + "`num_components` and `variance_threshold` are " + "not defined. Setting number of components to 6 " + "for backward compatibility. Please set either " + "`num_components` or `variance_threshold`, as " + "this feature may be deprecated in the future." + ) components, filter_basis, metadata = compute_noise_components( - imgseries.get_data(), mask_images, components_criterion, - self.inputs.pre_filter, degree, self.inputs.high_pass_cutoff, TR, - self.inputs.failure_mode, self.inputs.mask_names) + imgseries.get_data(), + mask_images, + components_criterion, + self.inputs.pre_filter, + degree, + self.inputs.high_pass_cutoff, + TR, + self.inputs.failure_mode, + self.inputs.mask_names, + ) if skip_vols: old_comp = components nrows = skip_vols + components.shape[0] - components = np.zeros( - (nrows, components.shape[1]), dtype=components.dtype) + components = np.zeros((nrows, components.shape[1]), dtype=components.dtype) components[skip_vols:] = old_comp - components_file = os.path.join(os.getcwd(), - self.inputs.components_file) + components_file = os.path.join(os.getcwd(), self.inputs.components_file) components_header = self._make_headers(components.shape[1]) np.savetxt( components_file, components, fmt=b"%.10f", - delimiter='\t', - header='\t'.join(components_header), - comments='') - self._results['components_file'] = os.path.join( - runtime.cwd, self.inputs.components_file) + delimiter="\t", + header="\t".join(components_header), + comments="", + ) + self._results["components_file"] = os.path.join( + runtime.cwd, self.inputs.components_file + ) save_pre_filter = False - if self.inputs.pre_filter in ['polynomial', 'cosine']: + if self.inputs.pre_filter in ["polynomial", "cosine"]: save_pre_filter = self.inputs.save_pre_filter if save_pre_filter: - self._results['pre_filter_file'] = save_pre_filter + self._results["pre_filter_file"] = save_pre_filter if save_pre_filter is True: - self._results['pre_filter_file'] = os.path.join( - runtime.cwd, 'pre_filter.tsv') + self._results["pre_filter_file"] = os.path.join( + runtime.cwd, "pre_filter.tsv" + ) - ftype = { - 'polynomial': 'Legendre', - 'cosine': 'Cosine' - }[self.inputs.pre_filter] + ftype = {"polynomial": "Legendre", "cosine": "Cosine"}[ + self.inputs.pre_filter + ] ncols = filter_basis.shape[1] if filter_basis.size > 0 else 0 - header = ['{}{:02d}'.format(ftype, i) for i in range(ncols)] + header = ["{}{:02d}".format(ftype, i) for i in range(ncols)] if skip_vols: old_basis = filter_basis # nrows defined above filter_basis = np.zeros( - (nrows, ncols + skip_vols), dtype=filter_basis.dtype) + (nrows, ncols + skip_vols), dtype=filter_basis.dtype + ) if old_basis.size > 0: filter_basis[skip_vols:, :ncols] = old_basis filter_basis[:skip_vols, -skip_vols:] = np.eye(skip_vols) - header.extend([ - 'NonSteadyStateOutlier{:02d}'.format(i) - for i in range(skip_vols) - ]) + header.extend( + ["NonSteadyStateOutlier{:02d}".format(i) for i in range(skip_vols)] + ) np.savetxt( - self._results['pre_filter_file'], + self._results["pre_filter_file"], filter_basis, - fmt=b'%.10f', - delimiter='\t', - header='\t'.join(header), - comments='') + fmt=b"%.10f", + delimiter="\t", + header="\t".join(header), + comments="", + ) metadata_file = self.inputs.save_metadata if metadata_file: - self._results['metadata_file'] = metadata_file + self._results["metadata_file"] = metadata_file if metadata_file is True: - self._results['metadata_file'] = ( - os.path.join(runtime.cwd, 'component_metadata.tsv')) - components_names = np.empty(len(metadata['mask']), - dtype='object_') - retained = np.where(metadata['retained']) - not_retained = np.where(np.logical_not(metadata['retained'])) + self._results["metadata_file"] = os.path.join( + runtime.cwd, "component_metadata.tsv" + ) + components_names = np.empty(len(metadata["mask"]), dtype="object_") + retained = np.where(metadata["retained"]) + not_retained = np.where(np.logical_not(metadata["retained"])) components_names[retained] = components_header - components_names[not_retained] = ([ - 'dropped{}'.format(i) for i in range(len(not_retained[0]))]) - with open(self._results['metadata_file'], 'w') as f: - f.write('\t'.join(['component'] + list(metadata.keys())) + '\n') + components_names[not_retained] = [ + "dropped{}".format(i) for i in range(len(not_retained[0])) + ] + with open(self._results["metadata_file"], "w") as f: + f.write("\t".join(["component"] + list(metadata.keys())) + "\n") for i in zip(components_names, *metadata.values()): - f.write('{0[0]}\t{0[1]}\t{0[2]:.10f}\t' - '{0[3]:.10f}\t{0[4]:.10f}\t{0[5]}\n'.format(i)) + f.write( + "{0[0]}\t{0[1]}\t{0[2]:.10f}\t" + "{0[3]:.10f}\t{0[4]:.10f}\t{0[5]}\n".format(i) + ) return runtime @@ -671,9 +760,12 @@ def _process_masks(self, mask_images, timeseries=None): return mask_images def _make_headers(self, num_col): - header = self.inputs.header_prefix if \ - isdefined(self.inputs.header_prefix) else self._header - headers = ['{}{:02d}'.format(header, i) for i in range(num_col)] + header = ( + self.inputs.header_prefix + if isdefined(self.inputs.header_prefix) + else self._header + ) + headers = ["{}{:02d}".format(header, i) for i in range(num_col)] return headers @@ -685,35 +777,35 @@ class ACompCor(CompCor): """ def __init__(self, *args, **kwargs): - ''' exactly the same as compcor except the header ''' + """ exactly the same as compcor except the header """ super(ACompCor, self).__init__(*args, **kwargs) - self._header = 'aCompCor' + self._header = "aCompCor" class TCompCorInputSpec(CompCorInputSpec): # and all the fields in CompCorInputSpec percentile_threshold = traits.Range( - low=0., - high=1., - value=.02, + low=0.0, + high=1.0, + value=0.02, exclude_low=True, exclude_high=True, usedefault=True, - desc='the percentile ' - 'used to select highest-variance ' - 'voxels, represented by a number ' - 'between 0 and 1, exclusive. By ' - 'default, this value is set to .02. ' - 'That is, the 2% of voxels ' - 'with the highest variance are used.') + desc="the percentile " + "used to select highest-variance " + "voxels, represented by a number " + "between 0 and 1, exclusive. By " + "default, this value is set to .02. " + "That is, the 2% of voxels " + "with the highest variance are used.", + ) class TCompCorOutputSpec(CompCorOutputSpec): # and all the fields in CompCorOutputSpec high_variance_masks = OutputMultiPath( - File(exists=True), - desc=(("voxels exceeding the variance" - " threshold"))) + File(exists=True), desc=(("voxels exceeding the variance" " threshold")) + ) class TCompCor(CompCor): @@ -737,9 +829,9 @@ class TCompCor(CompCor): output_spec = TCompCorOutputSpec def __init__(self, *args, **kwargs): - ''' exactly the same as compcor except the header ''' + """ exactly the same as compcor except the header """ super(TCompCor, self).__init__(*args, **kwargs) - self._header = 'tCompCor' + self._header = "tCompCor" self._mask_files = [] def _process_masks(self, mask_images, timeseries=None): @@ -752,25 +844,27 @@ def _process_masks(self, mask_images, timeseries=None): tSTD = _compute_tSTD(imgseries, 0, axis=-1) threshold_std = np.percentile( tSTD, - np.round(100. * - (1. - self.inputs.percentile_threshold)).astype(int)) + np.round(100.0 * (1.0 - self.inputs.percentile_threshold)).astype(int), + ) mask_data = np.zeros_like(mask) mask_data[mask != 0] = tSTD >= threshold_std - out_image = nb.Nifti1Image( - mask_data, affine=img.affine, header=img.header) + out_image = nb.Nifti1Image(mask_data, affine=img.affine, header=img.header) # save mask - mask_file = os.path.abspath('mask_{:03d}.nii.gz'.format(i)) + mask_file = os.path.abspath("mask_{:03d}.nii.gz".format(i)) out_image.to_filename(mask_file) - IFLOGGER.debug('tCompcor computed and saved mask of shape %s to ' - 'mask_file %s', str(mask.shape), mask_file) + IFLOGGER.debug( + "tCompcor computed and saved mask of shape %s to " "mask_file %s", + str(mask.shape), + mask_file, + ) self._mask_files.append(mask_file) out_images.append(out_image) return out_images def _list_outputs(self): outputs = super(TCompCor, self)._list_outputs() - outputs['high_variance_masks'] = self._mask_files + outputs["high_variance_masks"] = self._mask_files return outputs @@ -778,35 +872,31 @@ class TSNRInputSpec(BaseInterfaceInputSpec): in_file = InputMultiPath( File(exists=True), mandatory=True, - desc='realigned 4D file or a list of 3D files') - regress_poly = traits.Range(low=1, desc='Remove polynomials') + desc="realigned 4D file or a list of 3D files", + ) + regress_poly = traits.Range(low=1, desc="Remove polynomials") tsnr_file = File( - 'tsnr.nii.gz', - usedefault=True, - hash_files=False, - desc='output tSNR file') + "tsnr.nii.gz", usedefault=True, hash_files=False, desc="output tSNR file" + ) mean_file = File( - 'mean.nii.gz', - usedefault=True, - hash_files=False, - desc='output mean file') + "mean.nii.gz", usedefault=True, hash_files=False, desc="output mean file" + ) stddev_file = File( - 'stdev.nii.gz', - usedefault=True, - hash_files=False, - desc='output tSNR file') + "stdev.nii.gz", usedefault=True, hash_files=False, desc="output tSNR file" + ) detrended_file = File( - 'detrend.nii.gz', + "detrend.nii.gz", usedefault=True, hash_files=False, - desc='input file after detrending') + desc="input file after detrending", + ) class TSNROutputSpec(TraitedSpec): - tsnr_file = File(exists=True, desc='tsnr image file') - mean_file = File(exists=True, desc='mean image file') - stddev_file = File(exists=True, desc='std dev image file') - detrended_file = File(desc='detrended input file') + tsnr_file = File(exists=True, desc="tsnr image file") + mean_file = File(exists=True, desc="mean image file") + stddev_file = File(exists=True, desc="std dev image file") + detrended_file = File(desc="detrended input file") class TSNR(BaseInterface): @@ -823,6 +913,7 @@ class TSNR(BaseInterface): >>> res = tsnr.run() # doctest: +SKIP """ + input_spec = TSNRInputSpec output_spec = TSNROutputSpec @@ -830,33 +921,29 @@ def _run_interface(self, runtime): img = nb.load(self.inputs.in_file[0], mmap=NUMPY_MMAP) header = img.header.copy() vollist = [ - nb.load(filename, mmap=NUMPY_MMAP) - for filename in self.inputs.in_file + nb.load(filename, mmap=NUMPY_MMAP) for filename in self.inputs.in_file ] data = np.concatenate( - [ - vol.get_data().reshape(vol.shape[:3] + (-1, )) - for vol in vollist - ], - axis=3) + [vol.get_data().reshape(vol.shape[:3] + (-1,)) for vol in vollist], axis=3 + ) data = np.nan_to_num(data) - if data.dtype.kind == 'i': + if data.dtype.kind == "i": header.set_data_dtype(np.float32) data = data.astype(np.float32) if isdefined(self.inputs.regress_poly): - data = regress_poly( - self.inputs.regress_poly, data, remove_mean=False)[0] + data = regress_poly(self.inputs.regress_poly, data, remove_mean=False)[0] img = nb.Nifti1Image(data, img.affine, header) nb.save(img, op.abspath(self.inputs.detrended_file)) meanimg = np.mean(data, axis=3) stddevimg = np.std(data, axis=3) tsnr = np.zeros_like(meanimg) - stddevimg_nonzero = stddevimg > 1.e-3 - tsnr[stddevimg_nonzero] = meanimg[stddevimg_nonzero] / stddevimg[ - stddevimg_nonzero] + stddevimg_nonzero = stddevimg > 1.0e-3 + tsnr[stddevimg_nonzero] = ( + meanimg[stddevimg_nonzero] / stddevimg[stddevimg_nonzero] + ) img = nb.Nifti1Image(tsnr, img.affine, header) nb.save(img, op.abspath(self.inputs.tsnr_file)) img = nb.Nifti1Image(meanimg, img.affine, header) @@ -867,21 +954,23 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - for k in ['tsnr_file', 'mean_file', 'stddev_file']: + for k in ["tsnr_file", "mean_file", "stddev_file"]: outputs[k] = op.abspath(getattr(self.inputs, k)) if isdefined(self.inputs.regress_poly): - outputs['detrended_file'] = op.abspath(self.inputs.detrended_file) + outputs["detrended_file"] = op.abspath(self.inputs.detrended_file) return outputs class NonSteadyStateDetectorInputSpec(BaseInterfaceInputSpec): - in_file = File(exists=True, mandatory=True, desc='4D NIFTI EPI file') + in_file = File(exists=True, mandatory=True, desc="4D NIFTI EPI file") class NonSteadyStateDetectorOutputSpec(TraitedSpec): - n_volumes_to_discard = traits.Int(desc='Number of non-steady state volumes' - 'detected in the beginning of the scan.') + n_volumes_to_discard = traits.Int( + desc="Number of non-steady state volumes" + "detected in the beginning of the scan." + ) class NonSteadyStateDetector(BaseInterface): @@ -895,10 +984,11 @@ class NonSteadyStateDetector(BaseInterface): def _run_interface(self, runtime): in_nii = nb.load(self.inputs.in_file) - global_signal = in_nii.get_data()[:, :, :, :50].mean(axis=0).mean( - axis=0).mean(axis=0) + global_signal = ( + in_nii.get_data()[:, :, :, :50].mean(axis=0).mean(axis=0).mean(axis=0) + ) - self._results = {'n_volumes_to_discard': is_outlier(global_signal)} + self._results = {"n_volumes_to_discard": is_outlier(global_signal)} return runtime @@ -906,10 +996,9 @@ def _list_outputs(self): return self._results -def compute_dvars(in_file, - in_mask, - remove_zerovariance=False, - intensity_normalization=1000): +def compute_dvars( + in_file, in_mask, remove_zerovariance=False, intensity_normalization=1000 +): """ Compute the :abbr:`DVARS (D referring to temporal derivative of timecourses, VARS referring to RMS variance over voxels)` @@ -957,18 +1046,19 @@ def compute_dvars(in_file, # Robust standard deviation (we are using "lower" interpolation # because this is what FSL is doing - func_sd = (np.percentile(mfunc, 75, axis=1, interpolation="lower") - - np.percentile(mfunc, 25, axis=1, interpolation="lower")) / 1.349 + func_sd = ( + np.percentile(mfunc, 75, axis=1, interpolation="lower") + - np.percentile(mfunc, 25, axis=1, interpolation="lower") + ) / 1.349 if remove_zerovariance: mfunc = mfunc[func_sd != 0, :] func_sd = func_sd[func_sd != 0] # Compute (non-robust) estimate of lag-1 autocorrelation - ar1 = np.apply_along_axis(AR_est_YW, 1, - regress_poly(0, mfunc, - remove_mean=True)[0].astype( - np.float32), 1)[:, 0] + ar1 = np.apply_along_axis( + AR_est_YW, 1, regress_poly(0, mfunc, remove_mean=True)[0].astype(np.float32), 1 + )[:, 0] # Compute (predicted) standard deviation of temporal difference time series diff_sdhat = np.squeeze(np.sqrt(((1 - ar1) * 2).tolist())) * func_sd @@ -984,28 +1074,25 @@ def compute_dvars(in_file, dvars_stdz = dvars_nstd / diff_sd_mean with warnings.catch_warnings(): # catch, e.g., divide by zero errors - warnings.filterwarnings('error') + warnings.filterwarnings("error") # voxelwise standardization diff_vx_stdz = np.square( - func_diff / np.array([diff_sdhat] * func_diff.shape[-1]).T) + func_diff / np.array([diff_sdhat] * func_diff.shape[-1]).T + ) dvars_vx_stdz = np.sqrt(diff_vx_stdz.mean(axis=0)) return (dvars_stdz, dvars_nstd, dvars_vx_stdz) -def plot_confound(tseries, - figsize, - name, - units=None, - series_tr=None, - normalize=False): +def plot_confound(tseries, figsize, name, units=None, series_tr=None, normalize=False): """ A helper function to plot :abbr:`fMRI (functional MRI)` confounds. """ import matplotlib - matplotlib.use(config.get('execution', 'matplotlib_backend')) + + matplotlib.use(config.get("execution", "matplotlib_backend")) import matplotlib.pyplot as plt from matplotlib.gridspec import GridSpec from matplotlib.backends.backend_pdf import FigureCanvasPdf as FigureCanvas @@ -1024,18 +1111,18 @@ def plot_confound(tseries, ax.set_xlim((0, len(tseries))) ylabel = name if units is not None: - ylabel += (' speed [{}/s]' if normalize else ' [{}]').format(units) + ylabel += (" speed [{}/s]" if normalize else " [{}]").format(units) ax.set_ylabel(ylabel) - xlabel = 'Frame #' + xlabel = "Frame #" if series_tr is not None: - xlabel = 'Frame # ({} sec TR)'.format(series_tr) + xlabel = "Frame # ({} sec TR)".format(series_tr) ax.set_xlabel(xlabel) ylim = ax.get_ylim() ax = fig.add_subplot(grid[0, -1]) sns.distplot(tseries, vertical=True, ax=ax) - ax.set_xlabel('Frames') + ax.set_xlabel("Frames") ax.set_ylim(ylim) ax.set_yticklabels([]) return fig @@ -1063,7 +1150,7 @@ def is_outlier(points, thresh=3.5): if len(points.shape) == 1: points = points[:, None] median = np.median(points, axis=0) - diff = np.sum((points - median)**2, axis=-1) + diff = np.sum((points - median) ** 2, axis=-1) diff = np.sqrt(diff) med_abs_deviation = np.median(diff) @@ -1079,11 +1166,12 @@ def is_outlier(points, thresh=3.5): return timepoints_to_discard -def cosine_filter(data, timestep, period_cut, remove_mean=True, axis=-1, - failure_mode='error'): +def cosine_filter( + data, timestep, period_cut, remove_mean=True, axis=-1, failure_mode="error" +): datashape = data.shape timepoints = datashape[axis] - if datashape[0] == 0 and failure_mode != 'error': + if datashape[0] == 0 and failure_mode != "error": return data, np.array([]) data = data.reshape((-1, timepoints)) @@ -1103,8 +1191,7 @@ def cosine_filter(data, timestep, period_cut, remove_mean=True, axis=-1, return residuals.reshape(datashape), non_constant_regressors -def regress_poly(degree, data, remove_mean=True, axis=-1, - failure_mode='error'): +def regress_poly(degree, data, remove_mean=True, axis=-1, failure_mode="error"): """ Returns data with degree polynomial regressed out. @@ -1112,12 +1199,13 @@ def regress_poly(degree, data, remove_mean=True, axis=-1, :param int axis: numpy array axes along which regression is performed """ - IFLOGGER.debug('Performing polynomial regression on data of shape %s', - str(data.shape)) + IFLOGGER.debug( + "Performing polynomial regression on data of shape %s", str(data.shape) + ) datashape = data.shape timepoints = datashape[axis] - if datashape[0] == 0 and failure_mode != 'error': + if datashape[0] == 0 and failure_mode != "error": return data, np.array([]) # Rearrange all voxel-wise time-series in rows @@ -1166,21 +1254,28 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None): if len(mask_files) == 1: mask_index = 0 else: - raise ValueError(('When more than one mask file is provided, ' - 'one of merge_method or mask_index must be ' - 'set')) + raise ValueError( + ( + "When more than one mask file is provided, " + "one of merge_method or mask_index must be " + "set" + ) + ) if mask_index < len(mask_files): mask = nb.load(mask_files[mask_index], mmap=NUMPY_MMAP) return [mask] - raise ValueError(('mask_index {0} must be less than number of mask ' - 'files {1}').format(mask_index, len(mask_files))) + raise ValueError( + ("mask_index {0} must be less than number of mask " "files {1}").format( + mask_index, len(mask_files) + ) + ) masks = [] - if mask_method == 'none': + if mask_method == "none": for filename in mask_files: masks.append(nb.load(filename, mmap=NUMPY_MMAP)) return masks - if mask_method == 'union': + if mask_method == "union": mask = None for filename in mask_files: img = nb.load(filename, mmap=NUMPY_MMAP) @@ -1190,7 +1285,7 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None): img = nb.Nifti1Image(mask, img.affine, header=img.header) return [img] - if mask_method == 'intersect': + if mask_method == "intersect": mask = None for filename in mask_files: img = nb.load(filename, mmap=NUMPY_MMAP) @@ -1201,10 +1296,17 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None): return [img] -def compute_noise_components(imgseries, mask_images, components_criterion=0.5, - filter_type=False, degree=0, period_cut=128, - repetition_time=None, failure_mode='error', - mask_names=None): +def compute_noise_components( + imgseries, + mask_images, + components_criterion=0.5, + filter_type=False, + degree=0, + period_cut=128, + repetition_time=None, + failure_mode="error", + mask_names=None, +): """Compute the noise components from the imgseries for each mask Parameters @@ -1260,7 +1362,7 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, cumulative explained variances. """ basis = np.array([]) - if components_criterion == 'all': + if components_criterion == "all": components_criterion = -1 mask_names = mask_names or range(len(mask_images)) @@ -1275,9 +1377,11 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, mask = nb.squeeze_image(img).get_data().astype(np.bool) if imgseries.shape[:3] != mask.shape: raise ValueError( - 'Inputs for CompCor, timeseries and mask, do not have ' - 'matching spatial dimensions ({} and {}, respectively)'.format( - imgseries.shape[:3], mask.shape)) + "Inputs for CompCor, timeseries and mask, do not have " + "matching spatial dimensions ({} and {}, respectively)".format( + imgseries.shape[:3], mask.shape + ) + ) voxel_timecourses = imgseries[mask, :] @@ -1286,19 +1390,22 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, # Currently support Legendre-polynomial or cosine or detrending # With no filter, the mean is nonetheless removed (poly w/ degree 0) - if filter_type == 'cosine': + if filter_type == "cosine": if repetition_time is None: - raise ValueError( - 'Repetition time must be provided for cosine filter') + raise ValueError("Repetition time must be provided for cosine filter") voxel_timecourses, basis = cosine_filter( - voxel_timecourses, repetition_time, period_cut, - failure_mode=failure_mode) - elif filter_type in ('polynomial', False): + voxel_timecourses, + repetition_time, + period_cut, + failure_mode=failure_mode, + ) + elif filter_type in ("polynomial", False): # from paper: # "The constant and linear trends of the columns in the matrix M were # removed [prior to ...]" - voxel_timecourses, basis = regress_poly(degree, voxel_timecourses, - failure_mode=failure_mode) + voxel_timecourses, basis = regress_poly( + degree, voxel_timecourses, failure_mode=failure_mode + ) # "Voxel time series from the noise ROI (either anatomical or tSTD) were # placed in a matrix M of size Nxm, with time along the row dimension @@ -1306,19 +1413,20 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, M = voxel_timecourses.T # "[... were removed] prior to column-wise variance normalization." - M = M / _compute_tSTD(M, 1.) + M = M / _compute_tSTD(M, 1.0) # "The covariance matrix C = MMT was constructed and decomposed into its # principal components using a singular value decomposition." try: u, s, _ = fallback_svd(M, full_matrices=False) except (np.linalg.LinAlgError, ValueError): - if failure_mode == 'error': + if failure_mode == "error": raise s = np.full(M.shape[0], np.nan, dtype=np.float32) if components_criterion >= 1: - u = np.full((M.shape[0], components_criterion), - np.nan, dtype=np.float32) + u = np.full( + (M.shape[0], components_criterion), np.nan, dtype=np.float32 + ) else: u = np.full((M.shape[0], 1), np.nan, dtype=np.float32) @@ -1327,8 +1435,9 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, num_components = int(components_criterion) if 0 < components_criterion < 1: - num_components = np.searchsorted(cumulative_variance_explained, - components_criterion) + 1 + num_components = ( + np.searchsorted(cumulative_variance_explained, components_criterion) + 1 + ) elif components_criterion == -1: num_components = len(s) @@ -1346,18 +1455,19 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5, if len(components) > 0: components = np.hstack(components) else: - if failure_mode == 'error': - raise ValueError('No components found') - components = np.full((M.shape[0], num_components), - np.nan, dtype=np.float32) - - metadata = OrderedDict([ - ('mask', list(chain(*md_mask))), - ('singular_value', np.hstack(md_sv)), - ('variance_explained', np.hstack(md_var)), - ('cumulative_variance_explained', np.hstack(md_cumvar)), - ('retained', list(chain(*md_retained))) - ]) + if failure_mode == "error": + raise ValueError("No components found") + components = np.full((M.shape[0], num_components), np.nan, dtype=np.float32) + + metadata = OrderedDict( + [ + ("mask", list(chain(*md_mask))), + ("singular_value", np.hstack(md_sv)), + ("variance_explained", np.hstack(md_var)), + ("cumulative_variance_explained", np.hstack(md_cumvar)), + ("retained", list(chain(*md_retained))), + ] + ) return components, basis, metadata @@ -1395,7 +1505,7 @@ def _cosine_drift(period_cut, frametimes): """ len_tim = len(frametimes) n_times = np.arange(len_tim) - hfcut = 1. / period_cut # input parameter is the period + hfcut = 1.0 / period_cut # input parameter is the period # frametimes.max() should be (len_tim-1)*dt dt = frametimes[1] - frametimes[0] @@ -1406,10 +1516,9 @@ def _cosine_drift(period_cut, frametimes): nfct = np.sqrt(2.0 / len_tim) for k in range(1, order): - cdrift[:, k - 1] = nfct * np.cos( - (np.pi / len_tim) * (n_times + .5) * k) + cdrift[:, k - 1] = nfct * np.cos((np.pi / len_tim) * (n_times + 0.5) * k) - cdrift[:, order - 1] = 1. # or 1./sqrt(len_tim) to normalize + cdrift[:, order - 1] = 1.0 # or 1./sqrt(len_tim) to normalize return cdrift @@ -1433,7 +1542,7 @@ def _full_rank(X, cmax=1e15): c = smax / smin if c < cmax: return X, c - IFLOGGER.warning('Matrix is singular at working precision, regularizing...') + IFLOGGER.warning("Matrix is singular at working precision, regularizing...") lda = (smax - cmax * smin) / (cmax - 1) s = s + lda X = np.dot(U, np.dot(np.diag(s), V)) diff --git a/nipype/algorithms/icc.py b/nipype/algorithms/icc.py index ff57cc1fd4..a3ae57edf8 100644 --- a/nipype/algorithms/icc.py +++ b/nipype/algorithms/icc.py @@ -4,8 +4,13 @@ from numpy import ones, kron, mean, eye, hstack, dot, tile from numpy.linalg import pinv import nibabel as nb -from ..interfaces.base import BaseInterfaceInputSpec, TraitedSpec, \ - BaseInterface, traits, File +from ..interfaces.base import ( + BaseInterfaceInputSpec, + TraitedSpec, + BaseInterface, + traits, + File, +) from ..utils import NUMPY_MMAP @@ -13,7 +18,8 @@ class ICCInputSpec(BaseInterfaceInputSpec): subjects_sessions = traits.List( traits.List(File(exists=True)), desc="n subjects m sessions 3D stat files", - mandatory=True) + mandatory=True, + ) mask = File(exists=True, mandatory=True) @@ -24,27 +30,28 @@ class ICCOutputSpec(TraitedSpec): class ICC(BaseInterface): - ''' + """ Calculates Interclass Correlation Coefficient (3,1) as defined in P. E. Shrout & Joseph L. Fleiss (1979). "Intraclass Correlations: Uses in Assessing Rater Reliability". Psychological Bulletin 86 (2): 420-428. This particular implementation is aimed at relaibility (test-retest) studies. - ''' + """ + input_spec = ICCInputSpec output_spec = ICCOutputSpec def _run_interface(self, runtime): maskdata = nb.load(self.inputs.mask).get_data() - maskdata = np.logical_not( - np.logical_or(maskdata == 0, np.isnan(maskdata))) - - session_datas = [[ - nb.load(fname, mmap=NUMPY_MMAP).get_data()[maskdata].reshape( - -1, 1) for fname in sessions - ] for sessions in self.inputs.subjects_sessions] - list_of_sessions = [ - np.dstack(session_data) for session_data in session_datas + maskdata = np.logical_not(np.logical_or(maskdata == 0, np.isnan(maskdata))) + + session_datas = [ + [ + nb.load(fname, mmap=NUMPY_MMAP).get_data()[maskdata].reshape(-1, 1) + for fname in sessions + ] + for sessions in self.inputs.subjects_sessions ] + list_of_sessions = [np.dstack(session_data) for session_data in session_datas] all_data = np.hstack(list_of_sessions) icc = np.zeros(session_datas[0][0].shape) session_F = np.zeros(session_datas[0][0].shape) @@ -53,44 +60,45 @@ def _run_interface(self, runtime): for x in range(icc.shape[0]): Y = all_data[x, :, :] - icc[x], subject_var[x], session_var[x], session_F[ - x], _, _ = ICC_rep_anova(Y) + icc[x], subject_var[x], session_var[x], session_F[x], _, _ = ICC_rep_anova( + Y + ) nim = nb.load(self.inputs.subjects_sessions[0][0]) new_data = np.zeros(nim.shape) - new_data[maskdata] = icc.reshape(-1, ) + new_data[maskdata] = icc.reshape(-1,) new_img = nb.Nifti1Image(new_data, nim.affine, nim.header) - nb.save(new_img, 'icc_map.nii') + nb.save(new_img, "icc_map.nii") new_data = np.zeros(nim.shape) - new_data[maskdata] = session_var.reshape(-1, ) + new_data[maskdata] = session_var.reshape(-1,) new_img = nb.Nifti1Image(new_data, nim.affine, nim.header) - nb.save(new_img, 'session_var_map.nii') + nb.save(new_img, "session_var_map.nii") new_data = np.zeros(nim.shape) - new_data[maskdata] = subject_var.reshape(-1, ) + new_data[maskdata] = subject_var.reshape(-1,) new_img = nb.Nifti1Image(new_data, nim.affine, nim.header) - nb.save(new_img, 'subject_var_map.nii') + nb.save(new_img, "subject_var_map.nii") return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['icc_map'] = os.path.abspath('icc_map.nii') - outputs['session_var_map'] = os.path.abspath('session_var_map.nii') - outputs['subject_var_map'] = os.path.abspath('subject_var_map.nii') + outputs["icc_map"] = os.path.abspath("icc_map.nii") + outputs["session_var_map"] = os.path.abspath("session_var_map.nii") + outputs["subject_var_map"] = os.path.abspath("subject_var_map.nii") return outputs def ICC_rep_anova(Y): - ''' + """ the data Y are entered as a 'table' ie subjects are in rows and repeated measures in columns One Sample Repeated measure ANOVA Y = XB + E with X = [FaTor / Subjects] - ''' + """ [nb_subjects, nb_conditions] = Y.shape dfc = nb_conditions - 1 @@ -102,7 +110,7 @@ def ICC_rep_anova(Y): # Sum Square Total mean_Y = mean(Y) - SST = ((Y - mean_Y)**2).sum() + SST = ((Y - mean_Y) ** 2).sum() # create the design matrix for the different levels x = kron(eye(nb_conditions), ones((nb_subjects, 1))) # sessions @@ -110,16 +118,16 @@ def ICC_rep_anova(Y): X = hstack([x, x0]) # Sum Square Error - predicted_Y = dot(dot(dot(X, pinv(dot(X.T, X))), X.T), Y.flatten('F')) - residuals = Y.flatten('F') - predicted_Y - SSE = (residuals**2).sum() + predicted_Y = dot(dot(dot(X, pinv(dot(X.T, X))), X.T), Y.flatten("F")) + residuals = Y.flatten("F") - predicted_Y + SSE = (residuals ** 2).sum() residuals.shape = Y.shape MSE = SSE / dfe # Sum square session effect - between colums/sessions - SSC = ((mean(Y, 0) - mean_Y)**2).sum() * nb_subjects + SSC = ((mean(Y, 0) - mean_Y) ** 2).sum() * nb_subjects MSC = SSC / dfc / nb_subjects session_effect_F = MSC / MSE diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index 2d5c7d2d49..3732f88548 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -9,11 +9,17 @@ from numpy import linalg as nla from .. import logging -from ..interfaces.base import (BaseInterface, traits, TraitedSpec, File, - BaseInterfaceInputSpec) +from ..interfaces.base import ( + BaseInterface, + traits, + TraitedSpec, + File, + BaseInterfaceInputSpec, +) from ..interfaces.vtkbase import tvtk from ..interfaces import vtkbase as VTKInfo -IFLOGGER = logging.getLogger('nipype.interface') + +IFLOGGER = logging.getLogger("nipype.interface") class TVTKBaseInterface(BaseInterface): @@ -23,34 +29,34 @@ class TVTKBaseInterface(BaseInterface): def __init__(self, **inputs): if VTKInfo.no_tvtk(): - raise ImportError('This interface requires tvtk to run.') + raise ImportError("This interface requires tvtk to run.") super(TVTKBaseInterface, self).__init__(**inputs) class WarpPointsInputSpec(BaseInterfaceInputSpec): - points = File( - exists=True, mandatory=True, desc='file containing the point set') + points = File(exists=True, mandatory=True, desc="file containing the point set") warp = File( - exists=True, - mandatory=True, - desc='dense deformation field to be applied') + exists=True, mandatory=True, desc="dense deformation field to be applied" + ) interp = traits.Enum( - 'cubic', - 'nearest', - 'linear', + "cubic", + "nearest", + "linear", usedefault=True, mandatory=True, - desc='interpolation') + desc="interpolation", + ) out_points = File( - name_source='points', - name_template='%s_warped', - output_name='out_points', + name_source="points", + name_template="%s_warped", + output_name="out_points", keep_extension=True, - desc='the warped point set') + desc="the warped point set", + ) class WarpPointsOutputSpec(TraitedSpec): - out_points = File(desc='the warped point set') + out_points = File(desc="the warped point set") class WarpPoints(TVTKBaseInterface): @@ -70,22 +76,23 @@ class WarpPoints(TVTKBaseInterface): res = wp.run() """ + input_spec = WarpPointsInputSpec output_spec = WarpPointsOutputSpec - def _gen_fname(self, in_file, suffix='generated', ext=None): + def _gen_fname(self, in_file, suffix="generated", ext=None): fname, fext = op.splitext(op.basename(in_file)) - if fext == '.gz': + if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext if ext is None: ext = fext - if ext[0] == '.': + if ext[0] == ".": ext = ext[1:] - return op.abspath('%s_%s.%s' % (fname, suffix, ext)) + return op.abspath("%s_%s.%s" % (fname, suffix, ext)) def _run_interface(self, runtime): import nibabel as nb @@ -111,7 +118,7 @@ def _run_interface(self, runtime): warp = ndimage.map_coordinates(wdata, voxpoints.transpose()) else: - warp = np.zeros((points.shape[0], )) + warp = np.zeros((points.shape[0],)) warps.append(warp) @@ -120,15 +127,15 @@ def _run_interface(self, runtime): mesh.points = newpoints w = tvtk.PolyDataWriter() VTKInfo.configure_input_data(w, mesh) - w.file_name = self._gen_fname( - self.inputs.points, suffix='warped', ext='.vtk') + w.file_name = self._gen_fname(self.inputs.points, suffix="warped", ext=".vtk") w.write() return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_points'] = self._gen_fname( - self.inputs.points, suffix='warped', ext='.vtk') + outputs["out_points"] = self._gen_fname( + self.inputs.points, suffix="warped", ext=".vtk" + ) return outputs @@ -136,43 +143,46 @@ class ComputeMeshWarpInputSpec(BaseInterfaceInputSpec): surface1 = File( exists=True, mandatory=True, - desc=('Reference surface (vtk format) to which compute ' - 'distance.')) + desc=("Reference surface (vtk format) to which compute " "distance."), + ) surface2 = File( exists=True, mandatory=True, - desc=('Test surface (vtk format) from which compute ' - 'distance.')) + desc=("Test surface (vtk format) from which compute " "distance."), + ) metric = traits.Enum( - 'euclidean', - 'sqeuclidean', - usedefault=True, - desc='norm used to report distance') + "euclidean", "sqeuclidean", usedefault=True, desc="norm used to report distance" + ) weighting = traits.Enum( - 'none', - 'area', + "none", + "area", usedefault=True, - desc=('"none": no weighting is performed, surface": edge distance is ' - 'weighted by the corresponding surface area')) + desc=( + '"none": no weighting is performed, surface": edge distance is ' + "weighted by the corresponding surface area" + ), + ) out_warp = File( - 'surfwarp.vtk', + "surfwarp.vtk", usedefault=True, - desc='vtk file based on surface1 and warpings mapping it ' - 'to surface2') + desc="vtk file based on surface1 and warpings mapping it " "to surface2", + ) out_file = File( - 'distance.npy', + "distance.npy", usedefault=True, - desc='numpy file keeping computed distances and weights') + desc="numpy file keeping computed distances and weights", + ) class ComputeMeshWarpOutputSpec(TraitedSpec): distance = traits.Float(desc="computed distance") out_warp = File( exists=True, - desc=('vtk file with the vertex-wise ' - 'mapping of surface1 to surface2')) + desc=("vtk file with the vertex-wise " "mapping of surface1 to surface2"), + ) out_file = File( - exists=True, desc='numpy file keeping computed distances and weights') + exists=True, desc="numpy file keeping computed distances and weights" + ) class ComputeMeshWarp(TVTKBaseInterface): @@ -216,7 +226,7 @@ def _run_interface(self, runtime): vtk2 = VTKInfo.vtk_output(r2) r1.update() r2.update() - assert (len(vtk1.points) == len(vtk2.points)) + assert len(vtk1.points) == len(vtk2.points) points1 = np.array(vtk1.points) points2 = np.array(vtk2.points) @@ -229,10 +239,10 @@ def _run_interface(self, runtime): except TypeError: # numpy < 1.9 errvector = np.apply_along_axis(nla.norm, 1, diff) - if self.inputs.metric == 'sqeuclidean': + if self.inputs.metric == "sqeuclidean": errvector **= 2 - if self.inputs.weighting == 'area': + if self.inputs.weighting == "area": faces = vtk1.polys.to_array().reshape(-1, 4).astype(int)[:, 1:] for i, p1 in enumerate(points2): @@ -254,9 +264,8 @@ def _run_interface(self, runtime): out_mesh.points = vtk1.points out_mesh.polys = vtk1.polys out_mesh.point_data.vectors = diff - out_mesh.point_data.vectors.name = 'warpings' - writer = tvtk.PolyDataWriter( - file_name=op.abspath(self.inputs.out_warp)) + out_mesh.point_data.vectors.name = "warpings" + writer = tvtk.PolyDataWriter(file_name=op.abspath(self.inputs.out_warp)) VTKInfo.configure_input_data(writer, out_mesh) writer.write() @@ -265,9 +274,9 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) - outputs['out_warp'] = op.abspath(self.inputs.out_warp) - outputs['distance'] = self._distance + outputs["out_file"] = op.abspath(self.inputs.out_file) + outputs["out_warp"] = op.abspath(self.inputs.out_warp) + outputs["distance"] = self._distance return outputs @@ -275,11 +284,15 @@ class MeshWarpMathsInputSpec(BaseInterfaceInputSpec): in_surf = File( exists=True, mandatory=True, - desc=('Input surface in vtk format, with associated warp ' - 'field as point data (ie. from ComputeMeshWarp')) + desc=( + "Input surface in vtk format, with associated warp " + "field as point data (ie. from ComputeMeshWarp" + ), + ) float_trait = traits.Either( traits.Float(1.0), - traits.Tuple(traits.Float(1.0), traits.Float(1.0), traits.Float(1.0))) + traits.Tuple(traits.Float(1.0), traits.Float(1.0), traits.Float(1.0)), + ) operator = traits.Either( float_trait, @@ -287,31 +300,27 @@ class MeshWarpMathsInputSpec(BaseInterfaceInputSpec): default=1.0, usedefault=True, mandatory=True, - desc='image, float or tuple of floats to act as operator') + desc="image, float or tuple of floats to act as operator", + ) operation = traits.Enum( - 'sum', - 'sub', - 'mul', - 'div', - usedefault=True, - desc='operation to be performed') + "sum", "sub", "mul", "div", usedefault=True, desc="operation to be performed" + ) out_warp = File( - 'warp_maths.vtk', + "warp_maths.vtk", usedefault=True, - desc='vtk file based on in_surf and warpings mapping it ' - 'to out_file') - out_file = File( - 'warped_surf.vtk', usedefault=True, desc='vtk with surface warped') + desc="vtk file based on in_surf and warpings mapping it " "to out_file", + ) + out_file = File("warped_surf.vtk", usedefault=True, desc="vtk with surface warped") class MeshWarpMathsOutputSpec(TraitedSpec): out_warp = File( exists=True, - desc=('vtk file with the vertex-wise ' - 'mapping of surface1 to surface2')) - out_file = File(exists=True, desc='vtk with surface warped') + desc=("vtk file with the vertex-wise " "mapping of surface1 to surface2"), + ) + out_file = File(exists=True, desc="vtk with surface warped") class MeshWarpMaths(TVTKBaseInterface): @@ -346,7 +355,7 @@ def _run_interface(self, runtime): points1 = np.array(vtk1.points) if vtk1.point_data.vectors is None: - raise RuntimeError('No warping field was found in in_surf') + raise RuntimeError("No warping field was found in in_surf") operator = self.inputs.operator opfield = np.ones_like(points1) @@ -355,7 +364,7 @@ def _run_interface(self, runtime): r2 = tvtk.PolyDataReader(file_name=self.inputs.surface2) vtk2 = VTKInfo.vtk_output(r2) r2.update() - assert (len(points1) == len(vtk2.points)) + assert len(points1) == len(vtk2.points) opfield = vtk2.point_data.vectors @@ -363,7 +372,7 @@ def _run_interface(self, runtime): opfield = vtk2.point_data.scalars if opfield is None: - raise RuntimeError('No operator values found in operator file') + raise RuntimeError("No operator values found in operator file") opfield = np.array(opfield) @@ -375,33 +384,31 @@ def _run_interface(self, runtime): warping = np.array(vtk1.point_data.vectors) - if self.inputs.operation == 'sum': + if self.inputs.operation == "sum": warping += opfield - elif self.inputs.operation == 'sub': + elif self.inputs.operation == "sub": warping -= opfield - elif self.inputs.operation == 'mul': + elif self.inputs.operation == "mul": warping *= opfield - elif self.inputs.operation == 'div': + elif self.inputs.operation == "div": warping /= opfield vtk1.point_data.vectors = warping - writer = tvtk.PolyDataWriter( - file_name=op.abspath(self.inputs.out_warp)) + writer = tvtk.PolyDataWriter(file_name=op.abspath(self.inputs.out_warp)) VTKInfo.configure_input_data(writer, vtk1) writer.write() vtk1.point_data.vectors = None vtk1.points = points1 + warping - writer = tvtk.PolyDataWriter( - file_name=op.abspath(self.inputs.out_file)) + writer = tvtk.PolyDataWriter(file_name=op.abspath(self.inputs.out_file)) VTKInfo.configure_input_data(writer, vtk1) writer.write() return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) - outputs['out_warp'] = op.abspath(self.inputs.out_warp) + outputs["out_file"] = op.abspath(self.inputs.out_file) + outputs["out_warp"] = op.abspath(self.inputs.out_warp) return outputs @@ -418,5 +425,7 @@ class P2PDistance(ComputeMeshWarp): def __init__(self, **inputs): super(P2PDistance, self).__init__(**inputs) - IFLOGGER.warning('This interface has been deprecated since 1.0, please ' - 'use ComputeMeshWarp') + IFLOGGER.warning( + "This interface has been deprecated since 1.0, please " + "use ComputeMeshWarp" + ) diff --git a/nipype/algorithms/metrics.py b/nipype/algorithms/metrics.py index d1075ec04e..7ee1ac5bfd 100644 --- a/nipype/algorithms/metrics.py +++ b/nipype/algorithms/metrics.py @@ -1,10 +1,10 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -''' +""" Image assessment algorithms. Typical overlap and error computation measures to evaluate results from other processing units. -''' +""" import os import os.path as op @@ -14,23 +14,27 @@ from .. import config, logging from ..interfaces.base import ( - SimpleInterface, BaseInterface, traits, TraitedSpec, File, - InputMultiPath, BaseInterfaceInputSpec, - isdefined) + SimpleInterface, + BaseInterface, + traits, + TraitedSpec, + File, + InputMultiPath, + BaseInterfaceInputSpec, + isdefined, +) from ..interfaces.nipy.base import NipyBaseInterface -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") class DistanceInputSpec(BaseInterfaceInputSpec): volume1 = File( - exists=True, - mandatory=True, - desc="Has to have the same dimensions as volume2.") + exists=True, mandatory=True, desc="Has to have the same dimensions as volume2." + ) volume2 = File( - exists=True, - mandatory=True, - desc="Has to have the same dimensions as volume1.") + exists=True, mandatory=True, desc="Has to have the same dimensions as volume1." + ) method = traits.Enum( "eucl_min", "eucl_cog", @@ -46,21 +50,22 @@ class DistanceInputSpec(BaseInterfaceInputSpec): to volume1 weighted by their values\ "eucl_max": maximum over minimum Euclidian distances of all volume2\ voxels to volume1 (also known as the Hausdorff distance)', - usedefault=True) - mask_volume = File( - exists=True, desc="calculate overlap only within this mask.") + usedefault=True, + ) + mask_volume = File(exists=True, desc="calculate overlap only within this mask.") class DistanceOutputSpec(TraitedSpec): distance = traits.Float() - point1 = traits.Array(shape=(3, )) - point2 = traits.Array(shape=(3, )) + point1 = traits.Array(shape=(3,)) + point2 = traits.Array(shape=(3,)) histogram = File() class Distance(BaseInterface): """Calculates distance between two volumes. """ + input_spec = DistanceInputSpec output_spec = DistanceOutputSpec @@ -68,6 +73,7 @@ class Distance(BaseInterface): def _find_border(self, data): from scipy.ndimage.morphology import binary_erosion + eroded = binary_erosion(data) border = np.logical_and(data, np.logical_not(eroded)) return border @@ -82,6 +88,7 @@ def _get_coordinates(self, data, affine): def _eucl_min(self, nii1, nii2): from scipy.spatial.distance import cdist, euclidean + origdata1 = nii1.get_data().astype(np.bool) border1 = self._find_border(origdata1) @@ -93,30 +100,33 @@ def _eucl_min(self, nii1, nii2): set2_coordinates = self._get_coordinates(border2, nii2.affine) dist_matrix = cdist(set1_coordinates.T, set2_coordinates.T) - (point1, point2) = np.unravel_index( - np.argmin(dist_matrix), dist_matrix.shape) - return (euclidean(set1_coordinates.T[point1, :], - set2_coordinates.T[point2, :]), - set1_coordinates.T[point1, :], set2_coordinates.T[point2, :]) + (point1, point2) = np.unravel_index(np.argmin(dist_matrix), dist_matrix.shape) + return ( + euclidean(set1_coordinates.T[point1, :], set2_coordinates.T[point2, :]), + set1_coordinates.T[point1, :], + set2_coordinates.T[point2, :], + ) def _eucl_cog(self, nii1, nii2): from scipy.spatial.distance import cdist from scipy.ndimage.measurements import center_of_mass, label - origdata1 = np.logical_and(nii1.get_data() != 0, - np.logical_not(np.isnan(nii1.get_data()))) + + origdata1 = np.logical_and( + nii1.get_data() != 0, np.logical_not(np.isnan(nii1.get_data())) + ) cog_t = np.array(center_of_mass(origdata1.copy())).reshape(-1, 1) cog_t = np.vstack((cog_t, np.array([1]))) cog_t_coor = np.dot(nii1.affine, cog_t)[:3, :] - origdata2 = np.logical_and(nii2.get_data() != 0, - np.logical_not(np.isnan(nii2.get_data()))) + origdata2 = np.logical_and( + nii2.get_data() != 0, np.logical_not(np.isnan(nii2.get_data())) + ) (labeled_data, n_labels) = label(origdata2) cogs = np.ones((4, n_labels)) for i in range(n_labels): - cogs[:3, i] = np.array( - center_of_mass(origdata2, labeled_data, i + 1)) + cogs[:3, i] = np.array(center_of_mass(origdata2, labeled_data, i + 1)) cogs_coor = np.dot(nii2.affine, cogs)[:3, :] @@ -126,6 +136,7 @@ def _eucl_cog(self, nii1, nii2): def _eucl_mean(self, nii1, nii2, weighted=False): from scipy.spatial.distance import cdist + origdata1 = nii1.get_data().astype(np.bool) border1 = self._find_border(origdata1) @@ -137,33 +148,32 @@ def _eucl_mean(self, nii1, nii2, weighted=False): dist_matrix = cdist(set1_coordinates.T, set2_coordinates.T) min_dist_matrix = np.amin(dist_matrix, axis=0) import matplotlib - matplotlib.use(config.get('execution', 'matplotlib_backend')) + + matplotlib.use(config.get("execution", "matplotlib_backend")) import matplotlib.pyplot as plt + plt.figure() - plt.hist(min_dist_matrix, 50, normed=1, facecolor='green') + plt.hist(min_dist_matrix, 50, normed=1, facecolor="green") plt.savefig(self._hist_filename) plt.clf() plt.close() if weighted: - return np.average( - min_dist_matrix, weights=nii2.get_data()[origdata2].flat) + return np.average(min_dist_matrix, weights=nii2.get_data()[origdata2].flat) else: return np.mean(min_dist_matrix) def _eucl_max(self, nii1, nii2): from scipy.spatial.distance import cdist + origdata1 = nii1.get_data() - origdata1 = np.logical_not( - np.logical_or(origdata1 == 0, np.isnan(origdata1))) + origdata1 = np.logical_not(np.logical_or(origdata1 == 0, np.isnan(origdata1))) origdata2 = nii2.get_data() - origdata2 = np.logical_not( - np.logical_or(origdata2 == 0, np.isnan(origdata2))) + origdata2 = np.logical_not(np.logical_or(origdata2 == 0, np.isnan(origdata2))) if isdefined(self.inputs.mask_volume): maskdata = nb.load(self.inputs.mask_volume).get_data() - maskdata = np.logical_not( - np.logical_or(maskdata == 0, np.isnan(maskdata))) + maskdata = np.logical_not(np.logical_or(maskdata == 0, np.isnan(maskdata))) origdata1 = np.logical_and(maskdata, origdata1) origdata2 = np.logical_and(maskdata, origdata2) @@ -176,8 +186,7 @@ def _eucl_max(self, nii1, nii2): set1_coordinates = self._get_coordinates(border1, nii1.affine) set2_coordinates = self._get_coordinates(border2, nii2.affine) distances = cdist(set1_coordinates.T, set2_coordinates.T) - mins = np.concatenate((np.amin(distances, axis=0), - np.amin(distances, axis=1))) + mins = np.concatenate((np.amin(distances, axis=0), np.amin(distances, axis=1))) return np.max(mins) @@ -187,8 +196,7 @@ def _run_interface(self, runtime): nii2 = nb.load(self.inputs.volume2, mmap=False) if self.inputs.method == "eucl_min": - self._distance, self._point1, self._point2 = self._eucl_min( - nii1, nii2) + self._distance, self._point1, self._point2 = self._eucl_min(nii1, nii2) elif self.inputs.method == "eucl_cog": self._distance = self._eucl_cog(nii1, nii2) @@ -205,62 +213,55 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['distance'] = self._distance + outputs["distance"] = self._distance if self.inputs.method == "eucl_min": - outputs['point1'] = self._point1 - outputs['point2'] = self._point2 + outputs["point1"] = self._point1 + outputs["point2"] = self._point2 elif self.inputs.method in ["eucl_mean", "eucl_wmean"]: - outputs['histogram'] = os.path.abspath(self._hist_filename) + outputs["histogram"] = os.path.abspath(self._hist_filename) return outputs class OverlapInputSpec(BaseInterfaceInputSpec): volume1 = File( - exists=True, - mandatory=True, - desc='Has to have the same dimensions as volume2.') + exists=True, mandatory=True, desc="Has to have the same dimensions as volume2." + ) volume2 = File( - exists=True, - mandatory=True, - desc='Has to have the same dimensions as volume1.') - mask_volume = File( - exists=True, desc='calculate overlap only within this mask.') + exists=True, mandatory=True, desc="Has to have the same dimensions as volume1." + ) + mask_volume = File(exists=True, desc="calculate overlap only within this mask.") bg_overlap = traits.Bool( - False, - usedefault=True, - mandatory=True, - desc='consider zeros as a label') - out_file = File('diff.nii', usedefault=True) + False, usedefault=True, mandatory=True, desc="consider zeros as a label" + ) + out_file = File("diff.nii", usedefault=True) weighting = traits.Enum( - 'none', - 'volume', - 'squared_vol', + "none", + "volume", + "squared_vol", usedefault=True, - desc=('\'none\': no class-overlap weighting is ' - 'performed. \'volume\': computed class-' - 'overlaps are weighted by class volume ' - '\'squared_vol\': computed class-overlaps ' - 'are weighted by the squared volume of ' - 'the class')) + desc=( + "'none': no class-overlap weighting is " + "performed. 'volume': computed class-" + "overlaps are weighted by class volume " + "'squared_vol': computed class-overlaps " + "are weighted by the squared volume of " + "the class" + ), + ) vol_units = traits.Enum( - 'voxel', - 'mm', - mandatory=True, - usedefault=True, - desc='units for volumes') + "voxel", "mm", mandatory=True, usedefault=True, desc="units for volumes" + ) class OverlapOutputSpec(TraitedSpec): - jaccard = traits.Float(desc='averaged jaccard index') - dice = traits.Float(desc='averaged dice index') - roi_ji = traits.List( - traits.Float(), desc=('the Jaccard index (JI) per ROI')) - roi_di = traits.List(traits.Float(), desc=('the Dice index (DI) per ROI')) - volume_difference = traits.Float(desc=('averaged volume difference')) - roi_voldiff = traits.List( - traits.Float(), desc=('volume differences of ROIs')) - labels = traits.List(traits.Int(), desc=('detected labels')) - diff_file = File(exists=True, desc='error map of differences') + jaccard = traits.Float(desc="averaged jaccard index") + dice = traits.Float(desc="averaged dice index") + roi_ji = traits.List(traits.Float(), desc=("the Jaccard index (JI) per ROI")) + roi_di = traits.List(traits.Float(), desc=("the Dice index (DI) per ROI")) + volume_difference = traits.Float(desc=("averaged volume difference")) + roi_voldiff = traits.List(traits.Float(), desc=("volume differences of ROIs")) + labels = traits.List(traits.Int(), desc=("detected labels")) + diff_file = File(exists=True, desc="error map of differences") class Overlap(BaseInterface): @@ -282,12 +283,14 @@ class Overlap(BaseInterface): >>> res = overlap.run() # doctest: +SKIP """ + input_spec = OverlapInputSpec output_spec = OverlapOutputSpec def _bool_vec_dissimilarity(self, booldata1, booldata2, method): from scipy.spatial.distance import dice, jaccard - methods = {'dice': dice, 'jaccard': jaccard} + + methods = {"dice": dice, "jaccard": jaccard} if not (np.any(booldata1) or np.any(booldata2)): return 0 return 1 - methods[method](booldata1.flat, booldata2.flat) @@ -298,7 +301,7 @@ def _run_interface(self, runtime): scale = 1.0 - if self.inputs.vol_units == 'mm': + if self.inputs.vol_units == "mm": voxvol = nii1.header.get_zooms() for i in range(nii1.get_data().ndim - 1): scale = scale * voxvol[i] @@ -326,51 +329,50 @@ def _run_interface(self, runtime): for l in labels: res.append( - self._bool_vec_dissimilarity( - data1 == l, data2 == l, method='jaccard')) + self._bool_vec_dissimilarity(data1 == l, data2 == l, method="jaccard") + ) volumes1.append(scale * len(data1[data1 == l])) volumes2.append(scale * len(data2[data2 == l])) results = dict(jaccard=[], dice=[]) - results['jaccard'] = np.array(res) - results['dice'] = 2.0 * results['jaccard'] / (results['jaccard'] + 1.0) + results["jaccard"] = np.array(res) + results["dice"] = 2.0 * results["jaccard"] / (results["jaccard"] + 1.0) - weights = np.ones((len(volumes1), ), dtype=np.float32) - if self.inputs.weighting != 'none': + weights = np.ones((len(volumes1),), dtype=np.float32) + if self.inputs.weighting != "none": weights = weights / np.array(volumes1) - if self.inputs.weighting == 'squared_vol': - weights = weights**2 + if self.inputs.weighting == "squared_vol": + weights = weights ** 2 weights = weights / np.sum(weights) both_data = np.zeros(data1.shape) both_data[(data1 - data2) != 0] = 1 nb.save( - nb.Nifti1Image(both_data, nii1.affine, nii1.header), - self.inputs.out_file) + nb.Nifti1Image(both_data, nii1.affine, nii1.header), self.inputs.out_file + ) self._labels = labels self._ove_rois = results - self._vol_rois = ( - np.array(volumes1) - np.array(volumes2)) / np.array(volumes1) + self._vol_rois = (np.array(volumes1) - np.array(volumes2)) / np.array(volumes1) - self._dice = round(np.sum(weights * results['dice']), 5) - self._jaccard = round(np.sum(weights * results['jaccard']), 5) + self._dice = round(np.sum(weights * results["dice"]), 5) + self._jaccard = round(np.sum(weights * results["jaccard"]), 5) self._volume = np.sum(weights * self._vol_rois) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['labels'] = self._labels - outputs['jaccard'] = self._jaccard - outputs['dice'] = self._dice - outputs['volume_difference'] = self._volume - - outputs['roi_ji'] = self._ove_rois['jaccard'].tolist() - outputs['roi_di'] = self._ove_rois['dice'].tolist() - outputs['roi_voldiff'] = self._vol_rois.tolist() - outputs['diff_file'] = os.path.abspath(self.inputs.out_file) + outputs["labels"] = self._labels + outputs["jaccard"] = self._jaccard + outputs["dice"] = self._dice + outputs["volume_difference"] = self._volume + + outputs["roi_ji"] = self._ove_rois["jaccard"].tolist() + outputs["roi_di"] = self._ove_rois["dice"].tolist() + outputs["roi_voldiff"] = self._vol_rois.tolist() + outputs["diff_file"] = os.path.abspath(self.inputs.out_file) return outputs @@ -378,38 +380,44 @@ class FuzzyOverlapInputSpec(BaseInterfaceInputSpec): in_ref = InputMultiPath( File(exists=True), mandatory=True, - desc='Reference image. Requires the same dimensions as in_tst.') + desc="Reference image. Requires the same dimensions as in_tst.", + ) in_tst = InputMultiPath( File(exists=True), mandatory=True, - desc='Test image. Requires the same dimensions as in_ref.') - in_mask = File(exists=True, desc='calculate overlap only within mask') + desc="Test image. Requires the same dimensions as in_ref.", + ) + in_mask = File(exists=True, desc="calculate overlap only within mask") weighting = traits.Enum( - 'none', - 'volume', - 'squared_vol', + "none", + "volume", + "squared_vol", usedefault=True, - desc=('\'none\': no class-overlap weighting is ' - 'performed. \'volume\': computed class-' - 'overlaps are weighted by class volume ' - '\'squared_vol\': computed class-overlaps ' - 'are weighted by the squared volume of ' - 'the class')) + desc=( + "'none': no class-overlap weighting is " + "performed. 'volume': computed class-" + "overlaps are weighted by class volume " + "'squared_vol': computed class-overlaps " + "are weighted by the squared volume of " + "the class" + ), + ) out_file = File( - 'diff.nii', - desc='alternative name for resulting difference-map', - usedefault=True) + "diff.nii", + desc="alternative name for resulting difference-map", + usedefault=True, + ) class FuzzyOverlapOutputSpec(TraitedSpec): - jaccard = traits.Float(desc='Fuzzy Jaccard Index (fJI), all the classes') - dice = traits.Float(desc='Fuzzy Dice Index (fDI), all the classes') + jaccard = traits.Float(desc="Fuzzy Jaccard Index (fJI), all the classes") + dice = traits.Float(desc="Fuzzy Dice Index (fDI), all the classes") class_fji = traits.List( - traits.Float(), - desc='Array containing the fJIs of each computed class') + traits.Float(), desc="Array containing the fJIs of each computed class" + ) class_fdi = traits.List( - traits.Float(), - desc='Array containing the fDIs of each computed class') + traits.Float(), desc="Array containing the fDIs of each computed class" + ) class FuzzyOverlap(SimpleInterface): @@ -443,8 +451,9 @@ def _run_interface(self, runtime): # Data must have same shape if not refdata.shape == tstdata.shape: raise RuntimeError( - 'Size of "in_tst" %s must match that of "in_ref" %s.' % - (tstdata.shape, refdata.shape)) + 'Size of "in_tst" %s must match that of "in_ref" %s.' + % (tstdata.shape, refdata.shape) + ) ncomp = refdata.shape[-1] @@ -461,29 +470,33 @@ def _run_interface(self, runtime): tstdata = tstdata[mask] if np.any(refdata < 0.0): - iflogger.warning('Negative values encountered in "in_ref" input, ' - 'taking absolute values.') + iflogger.warning( + 'Negative values encountered in "in_ref" input, ' + "taking absolute values." + ) refdata = np.abs(refdata) if np.any(tstdata < 0.0): - iflogger.warning('Negative values encountered in "in_tst" input, ' - 'taking absolute values.') + iflogger.warning( + 'Negative values encountered in "in_tst" input, ' + "taking absolute values." + ) tstdata = np.abs(tstdata) if np.any(refdata > 1.0): - iflogger.warning('Values greater than 1.0 found in "in_ref" input, ' - 'scaling values.') + iflogger.warning( + 'Values greater than 1.0 found in "in_ref" input, ' "scaling values." + ) refdata /= refdata.max() if np.any(tstdata > 1.0): - iflogger.warning('Values greater than 1.0 found in "in_tst" input, ' - 'scaling values.') + iflogger.warning( + 'Values greater than 1.0 found in "in_tst" input, ' "scaling values." + ) tstdata /= tstdata.max() - numerators = np.atleast_2d( - np.minimum(refdata, tstdata).reshape((-1, ncomp))) - denominators = np.atleast_2d( - np.maximum(refdata, tstdata).reshape((-1, ncomp))) + numerators = np.atleast_2d(np.minimum(refdata, tstdata).reshape((-1, ncomp))) + denominators = np.atleast_2d(np.maximum(refdata, tstdata).reshape((-1, ncomp))) jaccards = numerators.sum(axis=0) / denominators.sum(axis=0) @@ -493,16 +506,16 @@ def _run_interface(self, runtime): volumes = np.sum((refdata + tstdata) > 0, axis=1).reshape((-1, ncomp)) weights = 1.0 / volumes if self.inputs.weighting == "squared_vol": - weights = weights**2 + weights = weights ** 2 weights = weights / np.sum(weights) dices = 2.0 * jaccards / (jaccards + 1.0) # Fill-in the results object - self._results['jaccard'] = float(weights.dot(jaccards)) - self._results['dice'] = float(weights.dot(dices)) - self._results['class_fji'] = [float(v) for v in jaccards] - self._results['class_fdi'] = [float(v) for v in dices] + self._results["jaccard"] = float(weights.dot(jaccards)) + self._results["dice"] = float(weights.dot(dices)) + self._results["class_fji"] = [float(v) for v in jaccards] + self._results["class_fdi"] = [float(v) for v in dices] return runtime @@ -510,18 +523,21 @@ class ErrorMapInputSpec(BaseInterfaceInputSpec): in_ref = File( exists=True, mandatory=True, - desc="Reference image. Requires the same dimensions as in_tst.") + desc="Reference image. Requires the same dimensions as in_tst.", + ) in_tst = File( exists=True, mandatory=True, - desc="Test image. Requires the same dimensions as in_ref.") + desc="Test image. Requires the same dimensions as in_ref.", + ) mask = File(exists=True, desc="calculate overlap only within this mask.") metric = traits.Enum( "sqeuclidean", "euclidean", - desc='error map metric (as implemented in scipy cdist)', + desc="error map metric (as implemented in scipy cdist)", usedefault=True, - mandatory=True) + mandatory=True, + ) out_map = File(desc="Name for the output file") @@ -541,31 +557,34 @@ class ErrorMap(BaseInterface): >>> errormap.inputs.in_tst = 'cont2.nii' >>> res = errormap.run() # doctest: +SKIP """ + input_spec = ErrorMapInputSpec output_spec = ErrorMapOutputSpec - _out_file = '' + _out_file = "" def _run_interface(self, runtime): # Get two numpy data matrices nii_ref = nb.load(self.inputs.in_ref) ref_data = np.squeeze(nii_ref.get_data()) tst_data = np.squeeze(nb.load(self.inputs.in_tst).get_data()) - assert (ref_data.ndim == tst_data.ndim) + assert ref_data.ndim == tst_data.ndim # Load mask comps = 1 mapshape = ref_data.shape - if (ref_data.ndim == 4): + if ref_data.ndim == 4: comps = ref_data.shape[-1] mapshape = ref_data.shape[:-1] if isdefined(self.inputs.mask): msk = nb.load(self.inputs.mask).get_data() - if (mapshape != msk.shape): - raise RuntimeError("Mask should match volume shape, \ - mask is %s and volumes are %s" % - (list(msk.shape), list(mapshape))) + if mapshape != msk.shape: + raise RuntimeError( + "Mask should match volume shape, \ + mask is %s and volumes are %s" + % (list(msk.shape), list(mapshape)) + ) else: msk = np.ones(shape=mapshape) @@ -574,51 +593,52 @@ def _run_interface(self, runtime): msk_idxs = np.where(mskvector == 1) refvector = ref_data.reshape(-1, comps)[msk_idxs].astype(np.float32) tstvector = tst_data.reshape(-1, comps)[msk_idxs].astype(np.float32) - diffvector = (refvector - tstvector) + diffvector = refvector - tstvector # Scale the difference - if self.inputs.metric == 'sqeuclidean': - errvector = diffvector**2 - if (comps > 1): + if self.inputs.metric == "sqeuclidean": + errvector = diffvector ** 2 + if comps > 1: errvector = np.sum(errvector, axis=1) else: errvector = np.squeeze(errvector) - elif self.inputs.metric == 'euclidean': + elif self.inputs.metric == "euclidean": errvector = np.linalg.norm(diffvector, axis=1) errvectorexp = np.zeros_like( - mskvector, dtype=np.float32) # The default type is uint8 + mskvector, dtype=np.float32 + ) # The default type is uint8 errvectorexp[msk_idxs] = errvector # Get averaged error - self._distance = np.average( - errvector) # Only average the masked voxels + self._distance = np.average(errvector) # Only average the masked voxels errmap = errvectorexp.reshape(mapshape) hdr = nii_ref.header.copy() hdr.set_data_dtype(np.float32) - hdr['data_type'] = 16 + hdr["data_type"] = 16 hdr.set_data_shape(mapshape) if not isdefined(self.inputs.out_map): fname, ext = op.splitext(op.basename(self.inputs.in_tst)) - if ext == '.gz': + if ext == ".gz": fname, ext2 = op.splitext(fname) ext = ext2 + ext self._out_file = op.abspath(fname + "_errmap" + ext) else: self._out_file = self.inputs.out_map - nb.Nifti1Image(errmap.astype(np.float32), nii_ref.affine, - hdr).to_filename(self._out_file) + nb.Nifti1Image(errmap.astype(np.float32), nii_ref.affine, hdr).to_filename( + self._out_file + ) return runtime def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_map'] = self._out_file - outputs['distance'] = self._distance + outputs["out_map"] = self._out_file + outputs["distance"] = self._distance return outputs @@ -628,7 +648,7 @@ class SimilarityInputSpec(BaseInterfaceInputSpec): mask1 = File(exists=True, desc="3D volume") mask2 = File(exists=True, desc="3D volume") metric = traits.Either( - traits.Enum('cc', 'cr', 'crl1', 'mi', 'nmi', 'slr'), + traits.Enum("cc", "cr", "crl1", "mi", "nmi", "slr"), traits.Callable(), desc="""str or callable Cost-function for assessing image similarity. If a string, @@ -638,12 +658,14 @@ class SimilarityInputSpec(BaseInterfaceInputSpec): supervised log-likelihood ratio. If a callable, it should take a two-dimensional array representing the image joint histogram as an input and return a float.""", - usedefault=True) + usedefault=True, + ) class SimilarityOutputSpec(TraitedSpec): similarity = traits.List( - traits.Float(desc="Similarity between volume 1 and 2, frame by frame")) + traits.Float(desc="Similarity between volume 1 and 2, frame by frame") + ) class Similarity(NipyBaseInterface): @@ -671,7 +693,9 @@ class Similarity(NipyBaseInterface): output_spec = SimilarityOutputSpec def _run_interface(self, runtime): - from nipy.algorithms.registration.histogram_registration import HistogramRegistration + from nipy.algorithms.registration.histogram_registration import ( + HistogramRegistration, + ) from nipy.algorithms.registration.affine import Affine vol1_nii = nb.load(self.inputs.volume1) @@ -688,7 +712,8 @@ def _run_interface(self, runtime): if dims < 2 or dims > 4: raise RuntimeError( - 'Image dimensions not supported (detected %dD file)' % dims) + "Image dimensions not supported (detected %dD file)" % dims + ) if isdefined(self.inputs.mask1): mask1 = nb.load(self.inputs.mask1).get_data() == 1 @@ -708,12 +733,13 @@ def _run_interface(self, runtime): to_img=ts2, similarity=self.inputs.metric, from_mask=mask1, - to_mask=mask2) + to_mask=mask2, + ) self._similarity.append(histreg.eval(Affine())) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['similarity'] = self._similarity + outputs["similarity"] = self._similarity return outputs diff --git a/nipype/algorithms/misc.py b/nipype/algorithms/misc.py index fc44e8738f..723b4d83d4 100644 --- a/nipype/algorithms/misc.py +++ b/nipype/algorithms/misc.py @@ -1,9 +1,9 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -''' +""" Miscellaneous algorithms -''' +""" import os import os.path as op @@ -16,36 +16,49 @@ from .. import logging from . import metrics as nam from ..interfaces.base import ( - BaseInterface, traits, TraitedSpec, File, InputMultiPath, OutputMultiPath, - BaseInterfaceInputSpec, isdefined, DynamicTraitedSpec, Undefined) + BaseInterface, + traits, + TraitedSpec, + File, + InputMultiPath, + OutputMultiPath, + BaseInterfaceInputSpec, + isdefined, + DynamicTraitedSpec, + Undefined, +) from ..utils.filemanip import fname_presuffix, split_filename, ensure_list from ..utils import NUMPY_MMAP from . import confounds -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") class PickAtlasInputSpec(BaseInterfaceInputSpec): atlas = File( - exists=True, - desc="Location of the atlas that will be used.", - mandatory=True) + exists=True, desc="Location of the atlas that will be used.", mandatory=True + ) labels = traits.Either( traits.Int, traits.List(traits.Int), - desc=("Labels of regions that will be included in the mask. Must be\ - compatible with the atlas used."), - mandatory=True) + desc=( + "Labels of regions that will be included in the mask. Must be\ + compatible with the atlas used." + ), + mandatory=True, + ) hemi = traits.Enum( - 'both', - 'left', - 'right', + "both", + "left", + "right", desc="Restrict the mask to only one hemisphere: left or right", - usedefault=True) + usedefault=True, + ) dilation_size = traits.Int( usedefault=True, - desc="Defines how much the mask will be dilated (expanded in 3D).") + desc="Defines how much the mask will be dilated (expanded in 3D).", + ) output_file = File(desc="Where to store the output mask.") @@ -73,7 +86,8 @@ def _gen_output_filename(self): fname=self.inputs.atlas, suffix="_mask", newpath=os.getcwd(), - use_ext=True) + use_ext=True, + ) else: output = os.path.realpath(self.inputs.output_file) return output @@ -89,43 +103,50 @@ def _get_brodmann_area(self): labels = self.inputs.labels for lab in labels: newdata[origdata == lab] = 1 - if self.inputs.hemi == 'right': - newdata[int(floor(float(origdata.shape[0]) / 2)):, :, :] = 0 - elif self.inputs.hemi == 'left': - newdata[:int(ceil(float(origdata.shape[0]) / 2)), :, :] = 0 + if self.inputs.hemi == "right": + newdata[int(floor(float(origdata.shape[0]) / 2)) :, :, :] = 0 + elif self.inputs.hemi == "left": + newdata[: int(ceil(float(origdata.shape[0]) / 2)), :, :] = 0 if self.inputs.dilation_size != 0: from scipy.ndimage.morphology import grey_dilation - newdata = grey_dilation(newdata, - (2 * self.inputs.dilation_size + 1, - 2 * self.inputs.dilation_size + 1, - 2 * self.inputs.dilation_size + 1)) + + newdata = grey_dilation( + newdata, + ( + 2 * self.inputs.dilation_size + 1, + 2 * self.inputs.dilation_size + 1, + 2 * self.inputs.dilation_size + 1, + ), + ) return nb.Nifti1Image(newdata, nii.affine, nii.header) def _list_outputs(self): outputs = self._outputs().get() - outputs['mask_file'] = self._gen_output_filename() + outputs["mask_file"] = self._gen_output_filename() return outputs class SimpleThresholdInputSpec(BaseInterfaceInputSpec): volumes = InputMultiPath( - File(exists=True), desc='volumes to be thresholded', mandatory=True) + File(exists=True), desc="volumes to be thresholded", mandatory=True + ) threshold = traits.Float( - desc='volumes to be thresholdedeverything below this value will be set\ - to zero', - mandatory=True) + desc="volumes to be thresholdedeverything below this value will be set\ + to zero", + mandatory=True, + ) class SimpleThresholdOutputSpec(TraitedSpec): - thresholded_volumes = OutputMultiPath( - File(exists=True), desc="thresholded volumes") + thresholded_volumes = OutputMultiPath(File(exists=True), desc="thresholded volumes") class SimpleThreshold(BaseInterface): """Applies a threshold to input volumes """ + input_spec = SimpleThresholdInputSpec output_spec = SimpleThresholdOutputSpec @@ -141,7 +162,7 @@ def _run_interface(self, runtime): new_img = nb.Nifti1Image(thresholded_map, img.affine, img.header) _, base, _ = split_filename(fname) - nb.save(new_img, base + '_thresholded.nii') + nb.save(new_img, base + "_thresholded.nii") return runtime @@ -151,21 +172,24 @@ def _list_outputs(self): for fname in self.inputs.volumes: _, base, _ = split_filename(fname) outputs["thresholded_volumes"].append( - os.path.abspath(base + '_thresholded.nii')) + os.path.abspath(base + "_thresholded.nii") + ) return outputs class ModifyAffineInputSpec(BaseInterfaceInputSpec): volumes = InputMultiPath( File(exists=True), - desc='volumes which affine matrices will be modified', - mandatory=True) + desc="volumes which affine matrices will be modified", + mandatory=True, + ) transformation_matrix = traits.Array( value=np.eye(4), shape=(4, 4), desc="transformation matrix that will be left multiplied by the\ affine matrix", - usedefault=True) + usedefault=True, + ) class ModifyAffineOutputSpec(TraitedSpec): @@ -176,6 +200,7 @@ class ModifyAffine(BaseInterface): """Left multiplies the affine matrix with a specified values. Saves the volume as a nifti file. """ + input_spec = ModifyAffineInputSpec output_spec = ModifyAffineOutputSpec @@ -192,23 +217,24 @@ def _run_interface(self, runtime): nb.save( nb.Nifti1Image(img.get_data(), affine, img.header), - self._gen_output_filename(fname)) + self._gen_output_filename(fname), + ) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['transformed_volumes'] = [] + outputs["transformed_volumes"] = [] for fname in self.inputs.volumes: - outputs['transformed_volumes'].append( - self._gen_output_filename(fname)) + outputs["transformed_volumes"].append(self._gen_output_filename(fname)) return outputs class CreateNiftiInputSpec(BaseInterfaceInputSpec): data_file = File(exists=True, mandatory=True, desc="ANALYZE img file") header_file = File( - exists=True, mandatory=True, desc="corresponding ANALYZE hdr file") + exists=True, mandatory=True, desc="corresponding ANALYZE hdr file" + ) affine = traits.Array(desc="affine transformation array") @@ -219,6 +245,7 @@ class CreateNiftiOutputSpec(TraitedSpec): class CreateNifti(BaseInterface): """Creates a nifti volume """ + input_spec = CreateNiftiInputSpec output_spec = CreateNiftiOutputSpec @@ -227,7 +254,7 @@ def _gen_output_file_name(self): return os.path.abspath(base + ".nii") def _run_interface(self, runtime): - with open(self.inputs.header_file, 'rb') as hdr_file: + with open(self.inputs.header_file, "rb") as hdr_file: hdr = nb.AnalyzeHeader.from_fileobj(hdr_file) if isdefined(self.inputs.affine): @@ -235,7 +262,7 @@ def _run_interface(self, runtime): else: affine = None - with open(self.inputs.data_file, 'rb') as data_file: + with open(self.inputs.data_file, "rb") as data_file: data = hdr.data_from_fileobj(data_file) img = nb.Nifti1Image(data, affine, hdr) @@ -245,7 +272,7 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['nifti_file'] = self._gen_output_file_name() + outputs["nifti_file"] = self._gen_output_file_name() return outputs @@ -270,6 +297,7 @@ class Gunzip(BaseInterface): >>> os.unlink('tpms_msk.nii') """ + input_spec = GunzipInputSpec output_spec = GunzipOutputSpec @@ -282,14 +310,15 @@ def _gen_output_file_name(self): def _run_interface(self, runtime): import gzip import shutil - with gzip.open(self.inputs.in_file, 'rb') as in_file: - with open(self._gen_output_file_name(), 'wb') as out_file: + + with gzip.open(self.inputs.in_file, "rb") as in_file: + with open(self._gen_output_file_name(), "wb") as out_file: shutil.copyfileobj(in_file, out_file) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = self._gen_output_file_name() + outputs["out_file"] = self._gen_output_file_name() return outputs @@ -307,27 +336,31 @@ def matlab2csv(in_array, name, reshape): if reshape: if len(np.shape(output_array)) > 1: output_array = np.reshape( - output_array, - (np.shape(output_array)[0] * np.shape(output_array)[1], 1)) + output_array, (np.shape(output_array)[0] * np.shape(output_array)[1], 1) + ) iflogger.info(np.shape(output_array)) - output_name = op.abspath(name + '.csv') - np.savetxt(output_name, output_array, delimiter=',') + output_name = op.abspath(name + ".csv") + np.savetxt(output_name, output_array, delimiter=",") return output_name class Matlab2CSVInputSpec(TraitedSpec): - in_file = File(exists=True, mandatory=True, desc='Input MATLAB .mat file') + in_file = File(exists=True, mandatory=True, desc="Input MATLAB .mat file") reshape_matrix = traits.Bool( True, usedefault=True, - desc='The output of this interface is meant for R, so matrices will be\ - reshaped to vectors by default.') + desc="The output of this interface is meant for R, so matrices will be\ + reshaped to vectors by default.", + ) class Matlab2CSVOutputSpec(TraitedSpec): csv_files = OutputMultiPath( - File(desc='Output CSV files for each variable saved in the input .mat\ - file')) + File( + desc="Output CSV files for each variable saved in the input .mat\ + file" + ) + ) class Matlab2CSV(BaseInterface): @@ -345,11 +378,13 @@ class Matlab2CSV(BaseInterface): >>> mat2csv.inputs.in_file = 'cmatrix.mat' >>> mat2csv.run() # doctest: +SKIP """ + input_spec = Matlab2CSVInputSpec output_spec = Matlab2CSVOutputSpec def _run_interface(self, runtime): import scipy.io as sio + in_dict = sio.loadmat(op.abspath(self.inputs.in_file)) # Check if the file has multiple variables in it. If it does, loop @@ -359,87 +394,100 @@ def _run_interface(self, runtime): saved_variables = list() for key in list(in_dict.keys()): - if not key.startswith('__'): + if not key.startswith("__"): if isinstance(in_dict[key][0], np.ndarray): saved_variables.append(key) else: - iflogger.info('One of the keys in the input file, %s, is ' - 'not a Numpy array', key) + iflogger.info( + "One of the keys in the input file, %s, is " + "not a Numpy array", + key, + ) if len(saved_variables) > 1: - iflogger.info('%i variables found:', len(saved_variables)) + iflogger.info("%i variables found:", len(saved_variables)) iflogger.info(saved_variables) for variable in saved_variables: - iflogger.info('...Converting %s - type %s - to CSV', variable, - type(in_dict[variable])) - matlab2csv(in_dict[variable], variable, - self.inputs.reshape_matrix) + iflogger.info( + "...Converting %s - type %s - to CSV", + variable, + type(in_dict[variable]), + ) + matlab2csv(in_dict[variable], variable, self.inputs.reshape_matrix) elif len(saved_variables) == 1: _, name, _ = split_filename(self.inputs.in_file) variable = saved_variables[0] - iflogger.info('Single variable found %s, type %s:', variable, - type(in_dict[variable])) - iflogger.info('...Converting %s to CSV from %s', variable, - self.inputs.in_file) + iflogger.info( + "Single variable found %s, type %s:", variable, type(in_dict[variable]) + ) + iflogger.info( + "...Converting %s to CSV from %s", variable, self.inputs.in_file + ) matlab2csv(in_dict[variable], name, self.inputs.reshape_matrix) else: - iflogger.error('No values in the MATLAB file?!') + iflogger.error("No values in the MATLAB file?!") return runtime def _list_outputs(self): import scipy.io as sio + outputs = self.output_spec().get() in_dict = sio.loadmat(op.abspath(self.inputs.in_file)) saved_variables = list() for key in list(in_dict.keys()): - if not key.startswith('__'): + if not key.startswith("__"): if isinstance(in_dict[key][0], np.ndarray): saved_variables.append(key) else: - iflogger.error('One of the keys in the input file, %s, is ' - 'not a Numpy array', key) + iflogger.error( + "One of the keys in the input file, %s, is " + "not a Numpy array", + key, + ) if len(saved_variables) > 1: - outputs['csv_files'] = replaceext(saved_variables, '.csv') + outputs["csv_files"] = replaceext(saved_variables, ".csv") elif len(saved_variables) == 1: _, name, ext = split_filename(self.inputs.in_file) - outputs['csv_files'] = op.abspath(name + '.csv') + outputs["csv_files"] = op.abspath(name + ".csv") else: - iflogger.error('No values in the MATLAB file?!') + iflogger.error("No values in the MATLAB file?!") return outputs def merge_csvs(in_list): for idx, in_file in enumerate(in_list): try: - in_array = np.loadtxt(in_file, delimiter=',') + in_array = np.loadtxt(in_file, delimiter=",") except ValueError: try: - in_array = np.loadtxt(in_file, delimiter=',', skiprows=1) + in_array = np.loadtxt(in_file, delimiter=",", skiprows=1) except ValueError: - with open(in_file, 'r') as first: + with open(in_file, "r") as first: header_line = first.readline() - header_list = header_line.split(',') + header_list = header_line.split(",") n_cols = len(header_list) try: in_array = np.loadtxt( in_file, - delimiter=',', + delimiter=",", skiprows=1, - usecols=list(range(1, n_cols))) + usecols=list(range(1, n_cols)), + ) except ValueError: in_array = np.loadtxt( in_file, - delimiter=',', + delimiter=",", skiprows=1, - usecols=list(range(1, n_cols - 1))) + usecols=list(range(1, n_cols - 1)), + ) if idx == 0: out_array = in_array else: out_array = np.dstack((out_array, in_array)) out_array = np.squeeze(out_array) - iflogger.info('Final output array shape:') + iflogger.info("Final output array shape:") iflogger.info(np.shape(out_array)) return out_array @@ -447,16 +495,17 @@ def merge_csvs(in_list): def remove_identical_paths(in_files): import os.path as op from ..utils.filemanip import split_filename + if len(in_files) > 1: out_names = list() commonprefix = op.commonprefix(in_files) - lastslash = commonprefix.rfind('/') - commonpath = commonprefix[0:(lastslash + 1)] + lastslash = commonprefix.rfind("/") + commonpath = commonprefix[0 : (lastslash + 1)] for fileidx, in_file in enumerate(in_files): path, name, ext = split_filename(in_file) in_file = op.join(path, name) - name = in_file.replace(commonpath, '') - name = name.replace('_subject_id_', '') + name = in_file.replace(commonpath, "") + name = name.replace("_subject_id_", "") out_names.append(name) else: path, name, ext = split_filename(in_files[0]) @@ -467,7 +516,7 @@ def remove_identical_paths(in_files): def maketypelist(rowheadings, shape, extraheadingBool, extraheading): typelist = [] if rowheadings: - typelist.append(('heading', 'a40')) + typelist.append(("heading", "a40")) if len(shape) > 1: for idx in range(1, (min(shape) + 1)): typelist.append((str(idx), float)) @@ -475,29 +524,28 @@ def maketypelist(rowheadings, shape, extraheadingBool, extraheading): for idx in range(1, (shape[0] + 1)): typelist.append((str(idx), float)) if extraheadingBool: - typelist.append((extraheading, 'a40')) + typelist.append((extraheading, "a40")) iflogger.info(typelist) return typelist -def makefmtlist(output_array, typelist, rowheadingsBool, shape, - extraheadingBool): +def makefmtlist(output_array, typelist, rowheadingsBool, shape, extraheadingBool): fmtlist = [] if rowheadingsBool: - fmtlist.append('%s') + fmtlist.append("%s") if len(shape) > 1: output = np.zeros(max(shape), typelist) for idx in range(1, min(shape) + 1): output[str(idx)] = output_array[:, idx - 1] - fmtlist.append('%f') + fmtlist.append("%f") else: output = np.zeros(1, typelist) for idx in range(1, len(output_array) + 1): output[str(idx)] = output_array[idx - 1] - fmtlist.append('%f') + fmtlist.append("%f") if extraheadingBool: - fmtlist.append('%s') - fmt = ','.join(fmtlist) + fmtlist.append("%s") + fmt = ",".join(fmtlist) return fmt, output @@ -505,34 +553,37 @@ class MergeCSVFilesInputSpec(TraitedSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, - desc='Input comma-separated value (CSV) files') + desc="Input comma-separated value (CSV) files", + ) out_file = File( - 'merged.csv', - usedefault=True, - desc='Output filename for merged CSV file') + "merged.csv", usedefault=True, desc="Output filename for merged CSV file" + ) column_headings = traits.List( traits.Str, - desc='List of column headings to save in merged CSV file\ + desc="List of column headings to save in merged CSV file\ (must be equal to number of input files). If left undefined, these\ - will be pulled from the input filenames.') + will be pulled from the input filenames.", + ) row_headings = traits.List( traits.Str, - desc='List of row headings to save in merged CSV file\ - (must be equal to number of rows in the input files).') + desc="List of row headings to save in merged CSV file\ + (must be equal to number of rows in the input files).", + ) row_heading_title = traits.Str( - 'label', + "label", usedefault=True, - desc='Column heading for the row headings\ - added') - extra_column_heading = traits.Str( - desc='New heading to add for the added field.') + desc="Column heading for the row headings\ + added", + ) + extra_column_heading = traits.Str(desc="New heading to add for the added field.") extra_field = traits.Str( - desc='New field to add to each row. This is useful for saving the\ - group or subject ID in the file.') + desc="New field to add to each row. This is useful for saving the\ + group or subject ID in the file." + ) class MergeCSVFilesOutputSpec(TraitedSpec): - csv_file = File(desc='Output CSV file containing columns ') + csv_file = File(desc="Output CSV file containing columns ") class MergeCSVFiles(BaseInterface): @@ -553,51 +604,49 @@ class MergeCSVFiles(BaseInterface): >>> mat2csv.inputs.column_headings = ['degree','clustering'] >>> mat2csv.run() # doctest: +SKIP """ + input_spec = MergeCSVFilesInputSpec output_spec = MergeCSVFilesOutputSpec def _run_interface(self, runtime): extraheadingBool = False - extraheading = '' + extraheading = "" rowheadingsBool = False """ This block defines the column headings. """ if isdefined(self.inputs.column_headings): - iflogger.info('Column headings have been provided:') + iflogger.info("Column headings have been provided:") headings = self.inputs.column_headings else: - iflogger.info( - 'Column headings not provided! Pulled from input filenames:') + iflogger.info("Column headings not provided! Pulled from input filenames:") headings = remove_identical_paths(self.inputs.in_files) if isdefined(self.inputs.extra_field): if isdefined(self.inputs.extra_column_heading): extraheading = self.inputs.extra_column_heading - iflogger.info('Extra column heading provided: %s', - extraheading) + iflogger.info("Extra column heading provided: %s", extraheading) else: - extraheading = 'type' - iflogger.info( - 'Extra column heading was not defined. Using "type"') + extraheading = "type" + iflogger.info('Extra column heading was not defined. Using "type"') headings.append(extraheading) extraheadingBool = True if len(self.inputs.in_files) == 1: - iflogger.warning('Only one file input!') + iflogger.warning("Only one file input!") if isdefined(self.inputs.row_headings): - iflogger.info('Row headings have been provided. Adding "labels"' - 'column header.') + iflogger.info( + 'Row headings have been provided. Adding "labels"' "column header." + ) prefix = '"{p}","'.format(p=self.inputs.row_heading_title) - csv_headings = prefix + '","'.join( - itertools.chain(headings)) + '"\n' + csv_headings = prefix + '","'.join(itertools.chain(headings)) + '"\n' rowheadingsBool = True else: - iflogger.info('Row headings have not been provided.') + iflogger.info("Row headings have not been provided.") csv_headings = '"' + '","'.join(itertools.chain(headings)) + '"\n' - iflogger.info('Final Headings:') + iflogger.info("Final Headings:") iflogger.info(csv_headings) """ Next we merge the arrays and define the output text file @@ -605,18 +654,18 @@ def _run_interface(self, runtime): output_array = merge_csvs(self.inputs.in_files) _, name, ext = split_filename(self.inputs.out_file) - if not ext == '.csv': - ext = '.csv' + if not ext == ".csv": + ext = ".csv" out_file = op.abspath(name + ext) - with open(out_file, 'w') as file_handle: + with open(out_file, "w") as file_handle: file_handle.write(csv_headings) shape = np.shape(output_array) - typelist = maketypelist(rowheadingsBool, shape, extraheadingBool, - extraheading) - fmt, output = makefmtlist(output_array, typelist, rowheadingsBool, - shape, extraheadingBool) + typelist = maketypelist(rowheadingsBool, shape, extraheadingBool, extraheading) + fmt, output = makefmtlist( + output_array, typelist, rowheadingsBool, shape, extraheadingBool + ) if rowheadingsBool: row_heading_list = self.inputs.row_headings @@ -624,8 +673,8 @@ def _run_interface(self, runtime): for row_heading in row_heading_list: row_heading_with_quotes = '"' + row_heading + '"' row_heading_list_with_quotes.append(row_heading_with_quotes) - row_headings = np.array(row_heading_list_with_quotes, dtype='|S40') - output['heading'] = row_headings + row_headings = np.array(row_heading_list_with_quotes, dtype="|S40") + output["heading"] = row_headings if isdefined(self.inputs.extra_field): extrafieldlist = [] @@ -639,39 +688,37 @@ def _run_interface(self, runtime): output[extraheading] = extrafieldlist iflogger.info(output) iflogger.info(fmt) - with open(out_file, 'a') as file_handle: - np.savetxt(file_handle, output, fmt, delimiter=',') + with open(out_file, "a") as file_handle: + np.savetxt(file_handle, output, fmt, delimiter=",") return runtime def _list_outputs(self): outputs = self.output_spec().get() _, name, ext = split_filename(self.inputs.out_file) - if not ext == '.csv': - ext = '.csv' + if not ext == ".csv": + ext = ".csv" out_file = op.abspath(name + ext) - outputs['csv_file'] = out_file + outputs["csv_file"] = out_file return outputs class AddCSVColumnInputSpec(TraitedSpec): in_file = File( - exists=True, - mandatory=True, - desc='Input comma-separated value (CSV) files') + exists=True, mandatory=True, desc="Input comma-separated value (CSV) files" + ) out_file = File( - 'extra_heading.csv', - usedefault=True, - desc='Output filename for merged CSV file') - extra_column_heading = traits.Str( - desc='New heading to add for the added field.') + "extra_heading.csv", usedefault=True, desc="Output filename for merged CSV file" + ) + extra_column_heading = traits.Str(desc="New heading to add for the added field.") extra_field = traits.Str( - desc='New field to add to each row. This is useful for saving the\ - group or subject ID in the file.') + desc="New field to add to each row. This is useful for saving the\ + group or subject ID in the file." + ) class AddCSVColumnOutputSpec(TraitedSpec): - csv_file = File(desc='Output CSV file containing columns ') + csv_file = File(desc="Output CSV file containing columns ") class AddCSVColumn(BaseInterface): @@ -687,25 +734,25 @@ class AddCSVColumn(BaseInterface): >>> addcol.inputs.extra_field = 'male' >>> addcol.run() # doctest: +SKIP """ + input_spec = AddCSVColumnInputSpec output_spec = AddCSVColumnOutputSpec def _run_interface(self, runtime): - in_file = open(self.inputs.in_file, 'r') + in_file = open(self.inputs.in_file, "r") _, name, ext = split_filename(self.inputs.out_file) - if not ext == '.csv': - ext = '.csv' + if not ext == ".csv": + ext = ".csv" out_file = op.abspath(name + ext) - out_file = open(out_file, 'w') + out_file = open(out_file, "w") firstline = in_file.readline() - firstline = firstline.replace('\n', '') - new_firstline = firstline + ',"' + \ - self.inputs.extra_column_heading + '"\n' + firstline = firstline.replace("\n", "") + new_firstline = firstline + ',"' + self.inputs.extra_column_heading + '"\n' out_file.write(new_firstline) for line in in_file: - new_line = line.replace('\n', '') - new_line = new_line + ',' + self.inputs.extra_field + '\n' + new_line = line.replace("\n", "") + new_line = new_line + "," + self.inputs.extra_field + "\n" out_file.write(new_line) in_file.close() out_file.close() @@ -714,16 +761,15 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self.output_spec().get() _, name, ext = split_filename(self.inputs.out_file) - if not ext == '.csv': - ext = '.csv' + if not ext == ".csv": + ext = ".csv" out_file = op.abspath(name + ext) - outputs['csv_file'] = out_file + outputs["csv_file"] = out_file return outputs class AddCSVRowInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): - in_file = File( - mandatory=True, desc='Input comma-separated value (CSV) files') + in_file = File(mandatory=True, desc="Input comma-separated value (CSV) files") _outputs = traits.Dict(traits.Any, value={}, usedefault=True) def __setattr__(self, key, value): @@ -738,7 +784,7 @@ def __setattr__(self, key, value): class AddCSVRowOutputSpec(TraitedSpec): - csv_file = File(desc='Output CSV file containing rows ') + csv_file = File(desc="Output CSV file containing rows ") class AddCSVRow(BaseInterface): @@ -765,6 +811,7 @@ class AddCSVRow(BaseInterface): >>> addrow.inputs.list_of_values = [ 0.4, 0.7, 0.3 ] >>> addrow.run() # doctest: +SKIP """ + input_spec = AddCSVRowInputSpec output_spec = AddCSVRowOutputSpec @@ -789,34 +836,40 @@ def _run_interface(self, runtime): try: import pandas as pd except ImportError as e: - raise ImportError('This interface requires pandas ' - '(http://pandas.pydata.org/) to run.') from e + raise ImportError( + "This interface requires pandas " "(http://pandas.pydata.org/) to run." + ) from e try: from filelock import SoftFileLock + self._have_lock = True except ImportError: from warnings import warn - warn(('Python module filelock was not found: AddCSVRow will not be' - ' thread-safe in multi-processor execution')) + + warn( + ( + "Python module filelock was not found: AddCSVRow will not be" + " thread-safe in multi-processor execution" + ) + ) input_dict = {} for key, val in list(self.inputs._outputs.items()): # expand lists to several columns - if key == 'trait_added' and val in self.inputs.copyable_trait_names( - ): + if key == "trait_added" and val in self.inputs.copyable_trait_names(): continue if isinstance(val, list): for i, v in enumerate(val): - input_dict['%s_%d' % (key, i)] = v + input_dict["%s_%d" % (key, i)] = v else: input_dict[key] = val df = pd.DataFrame([input_dict]) if self._have_lock: - self._lock = SoftFileLock('%s.lock' % self.inputs.in_file) + self._lock = SoftFileLock("%s.lock" % self.inputs.in_file) # Acquire lock self._lock.acquire() @@ -825,7 +878,7 @@ def _run_interface(self, runtime): formerdf = pd.read_csv(self.inputs.in_file, index_col=0) df = pd.concat([formerdf, df], ignore_index=True) - with open(self.inputs.in_file, 'w') as f: + with open(self.inputs.in_file, "w") as f: df.to_csv(f) if self._have_lock: @@ -835,7 +888,7 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self.output_spec().get() - outputs['csv_file'] = self.inputs.in_file + outputs["csv_file"] = self.inputs.in_file return outputs def _outputs(self): @@ -849,16 +902,18 @@ class CalculateNormalizedMomentsInputSpec(TraitedSpec): timeseries_file = File( exists=True, mandatory=True, - desc='Text file with timeseries in columns and timepoints in rows,\ - whitespace separated') + desc="Text file with timeseries in columns and timepoints in rows,\ + whitespace separated", + ) moment = traits.Int( mandatory=True, desc="Define which moment should be calculated, 3 for skewness, 4 for\ - kurtosis.") + kurtosis.", + ) class CalculateNormalizedMomentsOutputSpec(TraitedSpec): - moments = traits.List(traits.Float(), desc='Moments') + moments = traits.List(traits.Float(), desc="Moments") class CalculateNormalizedMoments(BaseInterface): @@ -873,18 +928,18 @@ class CalculateNormalizedMoments(BaseInterface): >>> skew.inputs.timeseries_file = 'timeseries.txt' >>> skew.run() # doctest: +SKIP """ + input_spec = CalculateNormalizedMomentsInputSpec output_spec = CalculateNormalizedMomentsOutputSpec def _run_interface(self, runtime): - self._moments = calc_moments(self.inputs.timeseries_file, - self.inputs.moment) + self._moments = calc_moments(self.inputs.timeseries_file, self.inputs.moment) return runtime def _list_outputs(self): outputs = self.output_spec().get() - outputs['skewness'] = self._moments + outputs["skewness"] = self._moments return outputs @@ -897,42 +952,45 @@ def calc_moments(timeseries_file, moment): """ import scipy.stats as stats + timeseries = np.genfromtxt(timeseries_file) m2 = stats.moment(timeseries, 2, axis=0) m3 = stats.moment(timeseries, moment, axis=0) - zero = (m2 == 0) - return np.where(zero, 0, m3 / m2**(moment / 2.0)) + zero = m2 == 0 + return np.where(zero, 0, m3 / m2 ** (moment / 2.0)) class AddNoiseInputSpec(TraitedSpec): in_file = File( exists=True, mandatory=True, - desc='input image that will be corrupted with noise') + desc="input image that will be corrupted with noise", + ) in_mask = File( exists=True, - desc=('input mask, voxels outside this mask ' - 'will be considered background')) - snr = traits.Float(10.0, desc='desired output SNR in dB', usedefault=True) + desc=("input mask, voxels outside this mask " "will be considered background"), + ) + snr = traits.Float(10.0, desc="desired output SNR in dB", usedefault=True) dist = traits.Enum( - 'normal', - 'rician', + "normal", + "rician", usedefault=True, mandatory=True, - desc=('desired noise distribution')) + desc=("desired noise distribution"), + ) bg_dist = traits.Enum( - 'normal', - 'rayleigh', + "normal", + "rayleigh", usedefault=True, mandatory=True, - desc=('desired noise distribution, currently ' - 'only normal is implemented')) - out_file = File(desc='desired output filename') + desc=("desired noise distribution, currently " "only normal is implemented"), + ) + out_file = File(desc="desired output filename") class AddNoiseOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='corrupted image') + out_file = File(exists=True, desc="corrupted image") class AddNoise(BaseInterface): @@ -950,6 +1008,7 @@ class AddNoise(BaseInterface): >>> noise.run() # doctest: +SKIP """ + input_spec = AddNoiseInputSpec output_spec = AddNoiseOutputSpec @@ -968,7 +1027,8 @@ def _run_interface(self, runtime): mask=in_mask, snr_db=snr, dist=self.inputs.dist, - bg_dist=self.inputs.bg_dist) + bg_dist=self.inputs.bg_dist, + ) res_im = nb.Nifti1Image(result, in_image.affine, in_image.header) res_im.to_filename(self._gen_output_filename()) return runtime @@ -976,8 +1036,7 @@ def _run_interface(self, runtime): def _gen_output_filename(self): if not isdefined(self.inputs.out_file): _, base, ext = split_filename(self.inputs.in_file) - out_file = os.path.abspath('%s_SNR%03.2f%s' % - (base, self.inputs.snr, ext)) + out_file = os.path.abspath("%s_SNR%03.2f%s" % (base, self.inputs.snr, ext)) else: out_file = self.inputs.out_file @@ -985,20 +1044,16 @@ def _gen_output_filename(self): def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self._gen_output_filename() + outputs["out_file"] = self._gen_output_filename() return outputs - def gen_noise(self, - image, - mask=None, - snr_db=10.0, - dist='normal', - bg_dist='normal'): + def gen_noise(self, image, mask=None, snr_db=10.0, dist="normal", bg_dist="normal"): """ Generates a copy of an image with a certain amount of added gaussian noise (rayleigh for background in mask) """ from math import sqrt + snr = sqrt(np.power(10.0, snr_db / 10.0)) if mask is None: @@ -1012,36 +1067,37 @@ def gen_noise(self, signal = image[mask > 0].reshape(-1) - if dist == 'normal': + if dist == "normal": signal = signal - signal.mean() sigma_n = sqrt(signal.var() / snr) noise = np.random.normal(size=image.shape, scale=sigma_n) - if (np.any(mask == 0)) and (bg_dist == 'rayleigh'): + if (np.any(mask == 0)) and (bg_dist == "rayleigh"): bg_noise = np.random.rayleigh(size=image.shape, scale=sigma_n) noise[mask == 0] = bg_noise[mask == 0] im_noise = image + noise - elif dist == 'rician': + elif dist == "rician": sigma_n = signal.mean() / snr n_1 = np.random.normal(size=image.shape, scale=sigma_n) n_2 = np.random.normal(size=image.shape, scale=sigma_n) stde_1 = n_1 / sqrt(2.0) stde_2 = n_2 / sqrt(2.0) - im_noise = np.sqrt((image + stde_1)**2 + (stde_2)**2) + im_noise = np.sqrt((image + stde_1) ** 2 + (stde_2) ** 2) else: - raise NotImplementedError(('Only normal and rician distributions ' - 'are supported')) + raise NotImplementedError( + ("Only normal and rician distributions " "are supported") + ) return im_noise class NormalizeProbabilityMapSetInputSpec(TraitedSpec): in_files = InputMultiPath( - File(exists=True, mandatory=True, desc='The tpms to be normalized')) - in_mask = File( - exists=True, desc='Masked voxels must sum up 1.0, 0.0 otherwise.') + File(exists=True, mandatory=True, desc="The tpms to be normalized") + ) + in_mask = File(exists=True, desc="Masked voxels must sum up 1.0, 0.0 otherwise.") class NormalizeProbabilityMapSetOutputSpec(TraitedSpec): @@ -1065,6 +1121,7 @@ class NormalizeProbabilityMapSet(BaseInterface): >>> normalize.inputs.in_mask = 'tpms_msk.nii.gz' >>> normalize.run() # doctest: +SKIP """ + input_spec = NormalizeProbabilityMapSetInputSpec output_spec = NormalizeProbabilityMapSetOutputSpec @@ -1079,23 +1136,24 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_files'] = self._out_filenames + outputs["out_files"] = self._out_filenames return outputs class SplitROIsInputSpec(TraitedSpec): - in_file = File(exists=True, mandatory=True, desc='file to be splitted') - in_mask = File(exists=True, desc='only process files inside mask') - roi_size = traits.Tuple( - traits.Int, traits.Int, traits.Int, desc='desired ROI size') + in_file = File(exists=True, mandatory=True, desc="file to be splitted") + in_mask = File(exists=True, desc="only process files inside mask") + roi_size = traits.Tuple(traits.Int, traits.Int, traits.Int, desc="desired ROI size") class SplitROIsOutputSpec(TraitedSpec): - out_files = OutputMultiPath(File(exists=True), desc='the resulting ROIs') + out_files = OutputMultiPath(File(exists=True), desc="the resulting ROIs") out_masks = OutputMultiPath( - File(exists=True), desc='a mask indicating valid values') + File(exists=True), desc="a mask indicating valid values" + ) out_index = OutputMultiPath( - File(exists=True), desc='arrays keeping original locations') + File(exists=True), desc="arrays keeping original locations" + ) class SplitROIs(BaseInterface): @@ -1113,6 +1171,7 @@ class SplitROIs(BaseInterface): >>> rois.run() # doctest: +SKIP """ + input_spec = SplitROIsInputSpec output_spec = SplitROIsOutputSpec @@ -1127,9 +1186,9 @@ def _run_interface(self, runtime): roisize = self.inputs.roi_size res = split_rois(self.inputs.in_file, mask, roisize) - self._outnames['out_files'] = res[0] - self._outnames['out_masks'] = res[1] - self._outnames['out_index'] = res[2] + self._outnames["out_files"] = res[0] + self._outnames["out_masks"] = res[1] + self._outnames["out_index"] = res[2] return runtime def _list_outputs(self): @@ -1141,15 +1200,16 @@ def _list_outputs(self): class MergeROIsInputSpec(TraitedSpec): in_files = InputMultiPath( - File(exists=True, mandatory=True, desc='files to be re-merged')) + File(exists=True, mandatory=True, desc="files to be re-merged") + ) in_index = InputMultiPath( - File(exists=True, mandatory=True), - desc='array keeping original locations') - in_reference = File(exists=True, desc='reference file') + File(exists=True, mandatory=True), desc="array keeping original locations" + ) + in_reference = File(exists=True, desc="reference file") class MergeROIsOutputSpec(TraitedSpec): - merged_file = File(exists=True, desc='the recomposed file') + merged_file = File(exists=True, desc="the recomposed file") class MergeROIs(BaseInterface): @@ -1168,18 +1228,20 @@ class MergeROIs(BaseInterface): >>> rois.run() # doctest: +SKIP """ + input_spec = MergeROIsInputSpec output_spec = MergeROIsOutputSpec def _run_interface(self, runtime): - res = merge_rois(self.inputs.in_files, self.inputs.in_index, - self.inputs.in_reference) + res = merge_rois( + self.inputs.in_files, self.inputs.in_index, self.inputs.in_reference + ) self._merged = res return runtime def _list_outputs(self): outputs = self.output_spec().get() - outputs['merged_file'] = self._merged + outputs["merged_file"] = self._merged return outputs @@ -1200,11 +1262,11 @@ def normalize_tpms(in_files, in_mask=None, out_files=None): if len(out_files) != len(in_files): for i, finname in enumerate(in_files): fname, fext = op.splitext(op.basename(finname)) - if fext == '.gz': + if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext - out_file = op.abspath('%s_norm_%02d%s' % (fname, i, fext)) + out_file = op.abspath("%s_norm_%02d%s" % (fname, i, fext)) out_files += [out_file] imgs = [nb.load(fim, mmap=NUMPY_MMAP) for fim in in_files] @@ -1213,11 +1275,12 @@ def normalize_tpms(in_files, in_mask=None, out_files=None): img_data = imgs[0].get_data() img_data[img_data > 0.0] = 1.0 hdr = imgs[0].header.copy() - hdr['data_type'] = 16 + hdr["data_type"] = 16 hdr.set_data_dtype(np.float32) nb.save( nb.Nifti1Image(img_data.astype(np.float32), imgs[0].affine, hdr), - out_files[0]) + out_files[0], + ) return out_files[0] img_data = np.array([im.get_data() for im in imgs]).astype(np.float32) @@ -1239,11 +1302,11 @@ def normalize_tpms(in_files, in_mask=None, out_files=None): data = np.ma.masked_equal(img_data[i], 0) probmap = data / weights hdr = imgs[i].header.copy() - hdr['data_type'] = 16 - hdr.set_data_dtype('float32') + hdr["data_type"] = 16 + hdr.set_data_dtype("float32") nb.save( - nb.Nifti1Image(probmap.astype(np.float32), imgs[i].affine, hdr), - out_file) + nb.Nifti1Image(probmap.astype(np.float32), imgs[i].affine, hdr), out_file + ) return out_files @@ -1283,9 +1346,10 @@ def split_rois(in_file, mask=None, roishape=None): data = np.squeeze(data.take(nzels, axis=0)) nvols = data.shape[-1] - roidefname = op.abspath('onesmask.nii.gz') - nb.Nifti1Image(np.ones(roishape, dtype=np.uint8), None, - None).to_filename(roidefname) + roidefname = op.abspath("onesmask.nii.gz") + nb.Nifti1Image(np.ones(roishape, dtype=np.uint8), None, None).to_filename( + roidefname + ) out_files = [] out_mask = [] @@ -1301,24 +1365,25 @@ def split_rois(in_file, mask=None, roishape=None): last = els droi = data[first:last, ...] - iname = op.abspath('roi%010d_idx' % i) - out_idxs.append(iname + '.npz') - np.savez(iname, (nzels[0][first:last], )) + iname = op.abspath("roi%010d_idx" % i) + out_idxs.append(iname + ".npz") + np.savez(iname, (nzels[0][first:last],)) if fill > 0: - droi = np.vstack((droi, - np.zeros( - (int(fill), int(nvols)), dtype=np.float32))) - partialmsk = np.ones((roisize, ), dtype=np.uint8) - partialmsk[-int(fill):] = 0 - partname = op.abspath('partialmask.nii.gz') - nb.Nifti1Image(partialmsk.reshape(roishape), None, - None).to_filename(partname) + droi = np.vstack( + (droi, np.zeros((int(fill), int(nvols)), dtype=np.float32)) + ) + partialmsk = np.ones((roisize,), dtype=np.uint8) + partialmsk[-int(fill) :] = 0 + partname = op.abspath("partialmask.nii.gz") + nb.Nifti1Image(partialmsk.reshape(roishape), None, None).to_filename( + partname + ) out_mask.append(partname) else: out_mask.append(roidefname) - fname = op.abspath('roi%010d.nii.gz' % i) + fname = op.abspath("roi%010d.nii.gz" % i) nb.Nifti1Image(droi.reshape(droishape), None, None).to_filename(fname) out_files.append(fname) return out_files, out_mask, out_idxs @@ -1334,17 +1399,17 @@ def merge_rois(in_files, in_idxs, in_ref, dtype=None, out_file=None): import subprocess as sp if out_file is None: - out_file = op.abspath('merged.nii.gz') + out_file = op.abspath("merged.nii.gz") if dtype is None: dtype = np.float32 # if file is compressed, uncompress using os # to avoid memory errors - if op.splitext(in_ref)[1] == '.gz': + if op.splitext(in_ref)[1] == ".gz": try: - iflogger.info('uncompress %i', in_ref) - sp.check_call(['gunzip', in_ref], stdout=sp.PIPE, shell=True) + iflogger.info("uncompress %i", in_ref) + sp.check_call(["gunzip", in_ref], stdout=sp.PIPE, shell=True) in_ref = op.splitext(in_ref)[0] except: pass @@ -1363,50 +1428,54 @@ def merge_rois(in_files, in_idxs, in_ref, dtype=None, out_file=None): ndirs = 1 newshape = (rsh[0], rsh[1], rsh[2], ndirs) hdr.set_data_dtype(dtype) - hdr.set_xyzt_units('mm', 'sec') + hdr.set_xyzt_units("mm", "sec") if ndirs < 300: data = np.zeros((npix, ndirs)) for cname, iname in zip(in_files, in_idxs): f = np.load(iname) - idxs = np.squeeze(f['arr_0']) - cdata = nb.load( - cname, mmap=NUMPY_MMAP).get_data().reshape(-1, ndirs) + idxs = np.squeeze(f["arr_0"]) + cdata = nb.load(cname, mmap=NUMPY_MMAP).get_data().reshape(-1, ndirs) nels = len(idxs) - idata = (idxs, ) + idata = (idxs,) try: data[idata, ...] = cdata[0:nels, ...] except: - print(('Consistency between indexes and chunks was ' - 'lost: data=%s, chunk=%s') % (str(data.shape), - str(cdata.shape))) + print( + ( + "Consistency between indexes and chunks was " + "lost: data=%s, chunk=%s" + ) + % (str(data.shape), str(cdata.shape)) + ) raise hdr.set_data_shape(newshape) - nb.Nifti1Image(data.reshape(newshape).astype(dtype), aff, - hdr).to_filename(out_file) + nb.Nifti1Image(data.reshape(newshape).astype(dtype), aff, hdr).to_filename( + out_file + ) else: hdr.set_data_shape(rsh[:3]) nii = [] for d in range(ndirs): - fname = op.abspath('vol%06d.nii' % d) + fname = op.abspath("vol%06d.nii" % d) nb.Nifti1Image(np.zeros(rsh[:3]), aff, hdr).to_filename(fname) nii.append(fname) for cname, iname in zip(in_files, in_idxs): f = np.load(iname) - idxs = np.squeeze(f['arr_0']) + idxs = np.squeeze(f["arr_0"]) for d, fname in enumerate(nii): data = nb.load(fname, mmap=NUMPY_MMAP).get_data().reshape(-1) - cdata = nb.load( - cname, mmap=NUMPY_MMAP).get_data().reshape(-1, ndirs)[:, d] + cdata = ( + nb.load(cname, mmap=NUMPY_MMAP).get_data().reshape(-1, ndirs)[:, d] + ) nels = len(idxs) - idata = (idxs, ) + idata = (idxs,) data[idata] = cdata[0:nels] - nb.Nifti1Image(data.reshape(rsh[:3]), aff, - hdr).to_filename(fname) + nb.Nifti1Image(data.reshape(rsh[:3]), aff, hdr).to_filename(fname) imgs = [nb.load(im, mmap=NUMPY_MMAP) for im in nii] allim = nb.concat_images(imgs) @@ -1420,15 +1489,17 @@ class CalculateMedianInputSpec(BaseInterfaceInputSpec): File( exists=True, mandatory=True, - desc="One or more realigned Nifti 4D timeseries")) + desc="One or more realigned Nifti 4D timeseries", + ) + ) median_file = traits.Str(desc="Filename prefix to store median images") median_per_file = traits.Bool( - False, usedefault=True, desc="Calculate a median file for each Nifti") + False, usedefault=True, desc="Calculate a median file for each Nifti" + ) class CalculateMedianOutputSpec(TraitedSpec): - median_files = OutputMultiPath( - File(exists=True), desc="One or more median images") + median_files = OutputMultiPath(File(exists=True), desc="One or more median images") class CalculateMedian(BaseInterface): @@ -1444,6 +1515,7 @@ class CalculateMedian(BaseInterface): >>> mean.run() # doctest: +SKIP """ + input_spec = CalculateMedianInputSpec output_spec = CalculateMedianOutputSpec @@ -1460,20 +1532,20 @@ def _gen_fname(self, suffix, idx=None, ext=None): else: in_file = self.inputs.in_files fname, in_ext = op.splitext(op.basename(in_file)) - if in_ext == '.gz': + if in_ext == ".gz": fname, in_ext2 = op.splitext(fname) in_ext = in_ext2 + in_ext if ext is None: ext = in_ext - if ext.startswith('.'): + if ext.startswith("."): ext = ext[1:] if self.inputs.median_file: outname = self.inputs.median_file else: - outname = '{}_{}'.format(fname, suffix) + outname = "{}_{}".format(fname, suffix) if idx: outname += str(idx) - return op.abspath('{}.{}'.format(outname, ext)) + return op.abspath("{}.{}".format(outname, ext)) def _run_interface(self, runtime): total = None @@ -1494,16 +1566,15 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['median_files'] = self._median_files + outputs["median_files"] = self._median_files return outputs - def _write_nifti(self, img, data, idx, suffix='median'): + def _write_nifti(self, img, data, idx, suffix="median"): if self.inputs.median_per_file: median_img = nb.Nifti1Image(data, img.affine, img.header) filename = self._gen_fname(suffix, idx=idx) else: - median_img = nb.Nifti1Image(data / (idx + 1), img.affine, - img.header) + median_img = nb.Nifti1Image(data / (idx + 1), img.affine, img.header) filename = self._gen_fname(suffix) median_img.to_filename(filename) return filename @@ -1521,9 +1592,13 @@ class Distance(nam.Distance): def __init__(self, **inputs): super(nam.Distance, self).__init__(**inputs) - warnings.warn(("This interface has been deprecated since 0.10.0," - " please use nipype.algorithms.metrics.Distance"), - DeprecationWarning) + warnings.warn( + ( + "This interface has been deprecated since 0.10.0," + " please use nipype.algorithms.metrics.Distance" + ), + DeprecationWarning, + ) class Overlap(nam.Overlap): @@ -1535,9 +1610,13 @@ class Overlap(nam.Overlap): def __init__(self, **inputs): super(nam.Overlap, self).__init__(**inputs) - warnings.warn(("This interface has been deprecated since 0.10.0," - " please use nipype.algorithms.metrics.Overlap"), - DeprecationWarning) + warnings.warn( + ( + "This interface has been deprecated since 0.10.0," + " please use nipype.algorithms.metrics.Overlap" + ), + DeprecationWarning, + ) class FuzzyOverlap(nam.FuzzyOverlap): @@ -1550,9 +1629,13 @@ class FuzzyOverlap(nam.FuzzyOverlap): def __init__(self, **inputs): super(nam.FuzzyOverlap, self).__init__(**inputs) - warnings.warn(("This interface has been deprecated since 0.10.0," - " please use nipype.algorithms.metrics.FuzzyOverlap"), - DeprecationWarning) + warnings.warn( + ( + "This interface has been deprecated since 0.10.0," + " please use nipype.algorithms.metrics.FuzzyOverlap" + ), + DeprecationWarning, + ) class TSNR(confounds.TSNR): @@ -1563,6 +1646,10 @@ class TSNR(confounds.TSNR): def __init__(self, **inputs): super(confounds.TSNR, self).__init__(**inputs) - warnings.warn(("This interface has been moved since 0.12.0," - " please use nipype.algorithms.confounds.TSNR"), - UserWarning) + warnings.warn( + ( + "This interface has been moved since 0.12.0," + " please use nipype.algorithms.confounds.TSNR" + ), + UserWarning, + ) diff --git a/nipype/algorithms/modelgen.py b/nipype/algorithms/modelgen.py index 9beb0f031d..2457fe8d2f 100644 --- a/nipype/algorithms/modelgen.py +++ b/nipype/algorithms/modelgen.py @@ -18,13 +18,21 @@ import numpy as np from ..utils import NUMPY_MMAP -from ..interfaces.base import (BaseInterface, TraitedSpec, InputMultiPath, - traits, File, Bunch, BaseInterfaceInputSpec, - isdefined) +from ..interfaces.base import ( + BaseInterface, + TraitedSpec, + InputMultiPath, + traits, + File, + Bunch, + BaseInterfaceInputSpec, + isdefined, +) from ..utils.filemanip import ensure_list from ..utils.misc import normalize_mc_params from .. import config, logging -iflogger = logging.getLogger('nipype.interface') + +iflogger = logging.getLogger("nipype.interface") def gcd(a, b): @@ -80,17 +88,22 @@ def spm_hrf(RT, P=None, fMRI_T=16): """ from scipy.special import gammaln + p = np.array([6, 16, 1, 1, 6, 0, 32], dtype=float) if P is not None: - p[0:len(P)] = P + p[0 : len(P)] = P - _spm_Gpdf = lambda x, h, l: np.exp(h * np.log(l) + (h - 1) * np.log(x) - (l * x) - gammaln(h)) + _spm_Gpdf = lambda x, h, l: np.exp( + h * np.log(l) + (h - 1) * np.log(x) - (l * x) - gammaln(h) + ) # modelled hemodynamic response function - {mixture of Gammas} dt = RT / float(fMRI_T) u = np.arange(0, int(p[6] / dt + 1)) - p[5] / dt - with np.errstate(divide='ignore'): # Known division-by-zero - hrf = _spm_Gpdf(u, p[0] / p[2], dt / p[2]) - _spm_Gpdf( - u, p[1] / p[3], dt / p[3]) / p[4] + with np.errstate(divide="ignore"): # Known division-by-zero + hrf = ( + _spm_Gpdf(u, p[0] / p[2], dt / p[2]) + - _spm_Gpdf(u, p[1] / p[3], dt / p[3]) / p[4] + ) idx = np.arange(0, int((p[6] / RT) + 1)) * fMRI_T hrf = hrf[idx] hrf = hrf / np.sum(hrf) @@ -130,21 +143,20 @@ def scale_timings(timelist, input_units, output_units, time_repetition): """ if input_units == output_units: - _scalefactor = 1. + _scalefactor = 1.0 - if (input_units == 'scans') and (output_units == 'secs'): + if (input_units == "scans") and (output_units == "secs"): _scalefactor = time_repetition - if (input_units == 'secs') and (output_units == 'scans'): - _scalefactor = 1. / time_repetition - timelist = [np.max([0., _scalefactor * t]) for t in timelist] + if (input_units == "secs") and (output_units == "scans"): + _scalefactor = 1.0 / time_repetition + timelist = [np.max([0.0, _scalefactor * t]) for t in timelist] return timelist -def bids_gen_info(bids_event_files, - condition_column='', - amplitude_column=None, - time_repetition=False, - ): + +def bids_gen_info( + bids_event_files, condition_column="", amplitude_column=None, time_repetition=False, +): """Generate subject_info structure from a list of BIDS .tsv event files. Parameters @@ -169,22 +181,22 @@ def bids_gen_info(bids_event_files, info = [] for bids_event_file in bids_event_files: with open(bids_event_file) as f: - f_events = csv.DictReader(f, skipinitialspace=True, delimiter='\t') + f_events = csv.DictReader(f, skipinitialspace=True, delimiter="\t") events = [{k: v for k, v in row.items()} for row in f_events] if not condition_column: - condition_column = '_trial_type' + condition_column = "_trial_type" for i in events: - i.update({condition_column: 'ev0'}) + i.update({condition_column: "ev0"}) conditions = sorted(set([i[condition_column] for i in events])) runinfo = Bunch(conditions=[], onsets=[], durations=[], amplitudes=[]) for condition in conditions: - selected_events = [i for i in events if i[condition_column]==condition] - onsets = [float(i['onset']) for i in selected_events] - durations = [float(i['duration']) for i in selected_events] + selected_events = [i for i in events if i[condition_column] == condition] + onsets = [float(i["onset"]) for i in selected_events] + durations = [float(i["duration"]) for i in selected_events] if time_repetition: decimals = math.ceil(-math.log10(time_repetition)) onsets = [np.round(i, decimals) for i in onsets] - durations = [np.round(i ,decimals) for i in durations] + durations = [np.round(i, decimals) for i in durations] runinfo.conditions.append(condition) runinfo.onsets.append(onsets) runinfo.durations.append(durations) @@ -205,10 +217,10 @@ def gen_info(run_event_files): runinfo = Bunch(conditions=[], onsets=[], durations=[], amplitudes=[]) for event_file in event_files: _, name = os.path.split(event_file) - if '.run' in name: - name, _ = name.split('.run%03d' % (i + 1)) - elif '.txt' in name: - name, _ = name.split('.txt') + if ".run" in name: + name, _ = name.split(".run%03d" % (i + 1)) + elif ".txt" in name: + name, _ = name.split(".txt") runinfo.conditions.append(name) event_info = np.atleast_2d(np.loadtxt(event_file)) @@ -221,7 +233,7 @@ def gen_info(run_event_files): if event_info.shape[1] > 2: runinfo.amplitudes.append(event_info[:, 2].tolist()) else: - delattr(runinfo, 'amplitudes') + delattr(runinfo, "amplitudes") info.append(runinfo) return info @@ -230,38 +242,43 @@ class SpecifyModelInputSpec(BaseInterfaceInputSpec): subject_info = InputMultiPath( Bunch, mandatory=True, - xor=['subject_info', 'event_files', 'bids_event_file'], - desc='Bunch or List(Bunch) subject-specific ' - 'condition information. see ' - ':ref:`SpecifyModel` or ' - 'SpecifyModel.__doc__ for details') + xor=["subject_info", "event_files", "bids_event_file"], + desc="Bunch or List(Bunch) subject-specific " + "condition information. see " + ":ref:`SpecifyModel` or " + "SpecifyModel.__doc__ for details", + ) event_files = InputMultiPath( traits.List(File(exists=True)), mandatory=True, - xor=['subject_info', 'event_files', 'bids_event_file'], - desc='List of event description files 1, 2 or 3 ' - 'column format corresponding to onsets, ' - 'durations and amplitudes') + xor=["subject_info", "event_files", "bids_event_file"], + desc="List of event description files 1, 2 or 3 " + "column format corresponding to onsets, " + "durations and amplitudes", + ) bids_event_file = InputMultiPath( File(exists=True), mandatory=True, - xor=['subject_info', 'event_files', 'bids_event_file'], - desc='TSV event file containing common BIDS fields: `onset`,' - '`duration`, and categorization and amplitude columns') + xor=["subject_info", "event_files", "bids_event_file"], + desc="TSV event file containing common BIDS fields: `onset`," + "`duration`, and categorization and amplitude columns", + ) bids_condition_column = traits.Str( - default_value='trial_type', + default_value="trial_type", usedefault=True, - desc='Column of the file passed to `bids_event_file` to the ' - 'unique values of which events will be assigned' - 'to regressors') + desc="Column of the file passed to `bids_event_file` to the " + "unique values of which events will be assigned" + "to regressors", + ) bids_amplitude_column = traits.Str( - desc='Column of the file passed to `bids_event_file` ' - 'according to which to assign amplitudes to events') + desc="Column of the file passed to `bids_event_file` " + "according to which to assign amplitudes to events" + ) realignment_parameters = InputMultiPath( File(exists=True), - desc='Realignment parameters returned ' - 'by motion correction algorithm', - copyfile=False) + desc="Realignment parameters returned " "by motion correction algorithm", + copyfile=False, + ) parameter_source = traits.Enum( "SPM", "FSL", @@ -269,38 +286,43 @@ class SpecifyModelInputSpec(BaseInterfaceInputSpec): "FSFAST", "NIPY", usedefault=True, - desc="Source of motion parameters") + desc="Source of motion parameters", + ) outlier_files = InputMultiPath( File(exists=True), - desc='Files containing scan outlier indices ' - 'that should be tossed', - copyfile=False) + desc="Files containing scan outlier indices " "that should be tossed", + copyfile=False, + ) functional_runs = InputMultiPath( traits.Either(traits.List(File(exists=True)), File(exists=True)), mandatory=True, - desc='Data files for model. List of 4D ' - 'files or list of list of 3D ' - 'files per session', - copyfile=False) + desc="Data files for model. List of 4D " + "files or list of list of 3D " + "files per session", + copyfile=False, + ) input_units = traits.Enum( - 'secs', - 'scans', + "secs", + "scans", mandatory=True, - desc='Units of event onsets and durations (secs ' - 'or scans). Output units are always in secs') + desc="Units of event onsets and durations (secs " + "or scans). Output units are always in secs", + ) high_pass_filter_cutoff = traits.Float( - mandatory=True, desc='High-pass filter cutoff in secs') + mandatory=True, desc="High-pass filter cutoff in secs" + ) time_repetition = traits.Float( mandatory=True, - desc='Time between the start of one volume ' - 'to the start of the next image volume.') + desc="Time between the start of one volume " + "to the start of the next image volume.", + ) # Not implemented yet # polynomial_order = traits.Range(0, low=0, # desc ='Number of polynomial functions to model high pass filter.') class SpecifyModelOutputSpec(TraitedSpec): - session_info = traits.Any(desc='Session info for level1designs') + session_info = traits.Any(desc="Session info for level1designs") class SpecifyModel(BaseInterface): @@ -368,107 +390,109 @@ class SpecifyModel(BaseInterface): >>> s.inputs.subject_info = [evs_run2, evs_run3] """ + input_spec = SpecifyModelInputSpec output_spec = SpecifyModelOutputSpec - def _generate_standard_design(self, - infolist, - functional_runs=None, - realignment_parameters=None, - outliers=None): + def _generate_standard_design( + self, infolist, functional_runs=None, realignment_parameters=None, outliers=None + ): """ Generates a standard design matrix paradigm given information about each run """ sessinfo = [] - output_units = 'secs' - if 'output_units' in self.inputs.traits(): + output_units = "secs" + if "output_units" in self.inputs.traits(): output_units = self.inputs.output_units for i, info in enumerate(infolist): sessinfo.insert(i, dict(cond=[])) if isdefined(self.inputs.high_pass_filter_cutoff): - sessinfo[i]['hpf'] = \ - np.float(self.inputs.high_pass_filter_cutoff) + sessinfo[i]["hpf"] = np.float(self.inputs.high_pass_filter_cutoff) - if hasattr(info, 'conditions') and info.conditions is not None: + if hasattr(info, "conditions") and info.conditions is not None: for cid, cond in enumerate(info.conditions): - sessinfo[i]['cond'].insert(cid, dict()) - sessinfo[i]['cond'][cid]['name'] = info.conditions[cid] + sessinfo[i]["cond"].insert(cid, dict()) + sessinfo[i]["cond"][cid]["name"] = info.conditions[cid] scaled_onset = scale_timings( - info.onsets[cid], self.inputs.input_units, - output_units, self.inputs.time_repetition) - sessinfo[i]['cond'][cid]['onset'] = scaled_onset + info.onsets[cid], + self.inputs.input_units, + output_units, + self.inputs.time_repetition, + ) + sessinfo[i]["cond"][cid]["onset"] = scaled_onset scaled_duration = scale_timings( - info.durations[cid], self.inputs.input_units, - output_units, self.inputs.time_repetition) - sessinfo[i]['cond'][cid]['duration'] = scaled_duration - if hasattr(info, 'amplitudes') and info.amplitudes: - sessinfo[i]['cond'][cid]['amplitudes'] = \ - info.amplitudes[cid] - - if hasattr(info, 'tmod') and info.tmod and \ - len(info.tmod) > cid: - sessinfo[i]['cond'][cid]['tmod'] = info.tmod[cid] - - if hasattr(info, 'pmod') and info.pmod and \ - len(info.pmod) > cid: + info.durations[cid], + self.inputs.input_units, + output_units, + self.inputs.time_repetition, + ) + sessinfo[i]["cond"][cid]["duration"] = scaled_duration + if hasattr(info, "amplitudes") and info.amplitudes: + sessinfo[i]["cond"][cid]["amplitudes"] = info.amplitudes[cid] + + if hasattr(info, "tmod") and info.tmod and len(info.tmod) > cid: + sessinfo[i]["cond"][cid]["tmod"] = info.tmod[cid] + + if hasattr(info, "pmod") and info.pmod and len(info.pmod) > cid: if info.pmod[cid]: - sessinfo[i]['cond'][cid]['pmod'] = [] + sessinfo[i]["cond"][cid]["pmod"] = [] for j, name in enumerate(info.pmod[cid].name): - sessinfo[i]['cond'][cid]['pmod'].insert(j, {}) - sessinfo[i]['cond'][cid]['pmod'][j]['name'] = \ - name - sessinfo[i]['cond'][cid]['pmod'][j]['poly'] = \ - info.pmod[cid].poly[j] - sessinfo[i]['cond'][cid]['pmod'][j]['param'] = \ - info.pmod[cid].param[j] - - sessinfo[i]['regress'] = [] - if hasattr(info, 'regressors') and info.regressors is not None: + sessinfo[i]["cond"][cid]["pmod"].insert(j, {}) + sessinfo[i]["cond"][cid]["pmod"][j]["name"] = name + sessinfo[i]["cond"][cid]["pmod"][j]["poly"] = info.pmod[ + cid + ].poly[j] + sessinfo[i]["cond"][cid]["pmod"][j][ + "param" + ] = info.pmod[cid].param[j] + + sessinfo[i]["regress"] = [] + if hasattr(info, "regressors") and info.regressors is not None: for j, r in enumerate(info.regressors): - sessinfo[i]['regress'].insert(j, dict(name='', val=[])) - if hasattr(info, 'regressor_names') and \ - info.regressor_names is not None: - sessinfo[i]['regress'][j]['name'] = \ - info.regressor_names[j] + sessinfo[i]["regress"].insert(j, dict(name="", val=[])) + if ( + hasattr(info, "regressor_names") + and info.regressor_names is not None + ): + sessinfo[i]["regress"][j]["name"] = info.regressor_names[j] else: - sessinfo[i]['regress'][j]['name'] = 'UR%d' % (j + 1) - sessinfo[i]['regress'][j]['val'] = info.regressors[j] - sessinfo[i]['scans'] = functional_runs[i] + sessinfo[i]["regress"][j]["name"] = "UR%d" % (j + 1) + sessinfo[i]["regress"][j]["val"] = info.regressors[j] + sessinfo[i]["scans"] = functional_runs[i] if realignment_parameters is not None: for i, rp in enumerate(realignment_parameters): mc = realignment_parameters[i] for col in range(mc.shape[1]): - colidx = len(sessinfo[i]['regress']) - sessinfo[i]['regress'].insert(colidx, dict( - name='', val=[])) - sessinfo[i]['regress'][colidx]['name'] = 'Realign%d' % ( - col + 1) - sessinfo[i]['regress'][colidx]['val'] = mc[:, col].tolist() + colidx = len(sessinfo[i]["regress"]) + sessinfo[i]["regress"].insert(colidx, dict(name="", val=[])) + sessinfo[i]["regress"][colidx]["name"] = "Realign%d" % (col + 1) + sessinfo[i]["regress"][colidx]["val"] = mc[:, col].tolist() if outliers is not None: for i, out in enumerate(outliers): numscans = 0 - for f in ensure_list(sessinfo[i]['scans']): + for f in ensure_list(sessinfo[i]["scans"]): shape = load(f, mmap=NUMPY_MMAP).shape if len(shape) == 3 or shape[3] == 1: - iflogger.warning('You are using 3D instead of 4D ' - 'files. Are you sure this was ' - 'intended?') + iflogger.warning( + "You are using 3D instead of 4D " + "files. Are you sure this was " + "intended?" + ) numscans += 1 else: numscans += shape[3] for j, scanno in enumerate(out): - colidx = len(sessinfo[i]['regress']) - sessinfo[i]['regress'].insert(colidx, dict( - name='', val=[])) - sessinfo[i]['regress'][colidx]['name'] = 'Outlier%d' % ( - j + 1) - sessinfo[i]['regress'][colidx]['val'] = \ - np.zeros((1, numscans))[0].tolist() - sessinfo[i]['regress'][colidx]['val'][int(scanno)] = 1 + colidx = len(sessinfo[i]["regress"]) + sessinfo[i]["regress"].insert(colidx, dict(name="", val=[])) + sessinfo[i]["regress"][colidx]["name"] = "Outlier%d" % (j + 1) + sessinfo[i]["regress"][colidx]["val"] = np.zeros((1, numscans))[ + 0 + ].tolist() + sessinfo[i]["regress"][colidx]["val"][int(scanno)] = 1 return sessinfo def _generate_design(self, infolist=None): @@ -482,7 +506,9 @@ def _generate_design(self, infolist=None): func1d=normalize_mc_params, axis=1, arr=np.loadtxt(parfile), - source=self.inputs.parameter_source)) + source=self.inputs.parameter_source, + ) + ) outliers = [] if isdefined(self.inputs.outlier_files): for filename in self.inputs.outlier_files: @@ -507,12 +533,13 @@ def _generate_design(self, infolist=None): self.inputs.bids_condition_column, self.inputs.bids_amplitude_column, self.inputs.time_repetition, - ) + ) self._sessinfo = self._generate_standard_design( infolist, functional_runs=self.inputs.functional_runs, realignment_parameters=realignment_parameters, - outliers=outliers) + outliers=outliers, + ) def _run_interface(self, runtime): """ @@ -523,9 +550,9 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - if not hasattr(self, '_sessinfo'): + if not hasattr(self, "_sessinfo"): self._generate_design() - outputs['session_info'] = self._sessinfo + outputs["session_info"] = self._sessinfo return outputs @@ -534,14 +561,14 @@ class SpecifySPMModelInputSpec(SpecifyModelInputSpec): concatenate_runs = traits.Bool( False, usedefault=True, - desc='Concatenate all runs to look like a ' - 'single session.') + desc="Concatenate all runs to look like a " "single session.", + ) output_units = traits.Enum( - 'secs', - 'scans', + "secs", + "scans", usedefault=True, - desc='Units of design event onsets and durations ' - '(secs or scans)') + desc="Units of design event onsets and durations " "(secs or scans)", + ) class SpecifySPMModel(SpecifyModel): @@ -580,7 +607,7 @@ def _concatenate_info(self, infolist): img = load(f, mmap=NUMPY_MMAP) numscans = img.shape[3] else: - raise Exception('Functional input not specified correctly') + raise Exception("Functional input not specified correctly") nscans.insert(i, numscans) # now combine all fields into 1 @@ -589,63 +616,68 @@ def _concatenate_info(self, infolist): infoout = infolist[0] for j, val in enumerate(infolist[0].durations): if len(infolist[0].onsets[j]) > 1 and len(val) == 1: - infoout.durations[j] = ( - infolist[0].durations[j] * len(infolist[0].onsets[j])) + infoout.durations[j] = infolist[0].durations[j] * len( + infolist[0].onsets[j] + ) for i, info in enumerate(infolist[1:]): # info.[conditions, tmod] remain the same if info.onsets: for j, val in enumerate(info.onsets): - if self.inputs.input_units == 'secs': - onsets = np.array(info.onsets[j]) +\ - self.inputs.time_repetition * \ - sum(nscans[0:(i + 1)]) + if self.inputs.input_units == "secs": + onsets = np.array( + info.onsets[j] + ) + self.inputs.time_repetition * sum(nscans[0 : (i + 1)]) infoout.onsets[j].extend(onsets.tolist()) else: - onsets = np.array(info.onsets[j]) + \ - sum(nscans[0:(i + 1)]) + onsets = np.array(info.onsets[j]) + sum(nscans[0 : (i + 1)]) infoout.onsets[j].extend(onsets.tolist()) for j, val in enumerate(info.durations): if len(info.onsets[j]) > 1 and len(val) == 1: infoout.durations[j].extend( - info.durations[j] * len(info.onsets[j])) + info.durations[j] * len(info.onsets[j]) + ) elif len(info.onsets[j]) == len(val): infoout.durations[j].extend(info.durations[j]) else: - raise ValueError('Mismatch in number of onsets and \ + raise ValueError( + "Mismatch in number of onsets and \ durations for run {0}, condition \ - {1}'.format(i + 2, j + 1)) + {1}".format( + i + 2, j + 1 + ) + ) - if hasattr(info, 'amplitudes') and info.amplitudes: + if hasattr(info, "amplitudes") and info.amplitudes: for j, val in enumerate(info.amplitudes): infoout.amplitudes[j].extend(info.amplitudes[j]) - if hasattr(info, 'pmod') and info.pmod: + if hasattr(info, "pmod") and info.pmod: for j, val in enumerate(info.pmod): if val: for key, data in enumerate(val.param): infoout.pmod[j].param[key].extend(data) - if hasattr(info, 'regressors') and info.regressors: + if hasattr(info, "regressors") and info.regressors: # assumes same ordering of regressors across different # runs and the same names for the regressors for j, v in enumerate(info.regressors): infoout.regressors[j].extend(info.regressors[j]) # insert session regressors - if not hasattr(infoout, 'regressors') or not infoout.regressors: + if not hasattr(infoout, "regressors") or not infoout.regressors: infoout.regressors = [] onelist = np.zeros((1, sum(nscans))) - onelist[0, sum(nscans[0:i]):sum(nscans[0:(i + 1)])] = 1 - infoout.regressors.insert( - len(infoout.regressors), - onelist.tolist()[0]) + onelist[0, sum(nscans[0:i]) : sum(nscans[0 : (i + 1)])] = 1 + infoout.regressors.insert(len(infoout.regressors), onelist.tolist()[0]) return [infoout], nscans def _generate_design(self, infolist=None): - if not isdefined(self.inputs.concatenate_runs) or \ - not self.inputs.concatenate_runs: + if ( + not isdefined(self.inputs.concatenate_runs) + or not self.inputs.concatenate_runs + ): super(SpecifySPMModel, self)._generate_design(infolist=infolist) return @@ -663,12 +695,14 @@ def _generate_design(self, infolist=None): func1d=normalize_mc_params, axis=1, arr=np.loadtxt(parfile), - source=self.inputs.parameter_source) + source=self.inputs.parameter_source, + ) if not realignment_parameters: realignment_parameters.insert(0, mc) else: - realignment_parameters[0] = \ - np.concatenate((realignment_parameters[0], mc)) + realignment_parameters[0] = np.concatenate( + (realignment_parameters[0], mc) + ) outliers = [] if isdefined(self.inputs.outlier_files): outliers = [[]] @@ -676,12 +710,13 @@ def _generate_design(self, infolist=None): try: out = np.loadtxt(filename) except IOError: - iflogger.warning('Error reading outliers file %s', filename) + iflogger.warning("Error reading outliers file %s", filename) out = np.array([]) if out.size > 0: - iflogger.debug('fname=%s, out=%s, nscans=%d', filename, - out, sum(nscans[0:i])) + iflogger.debug( + "fname=%s, out=%s, nscans=%d", filename, out, sum(nscans[0:i]) + ) sumscans = out.astype(int) + sum(nscans[0:i]) if out.size == 1: @@ -693,38 +728,39 @@ def _generate_design(self, infolist=None): concatlist, functional_runs=functional_runs, realignment_parameters=realignment_parameters, - outliers=outliers) + outliers=outliers, + ) class SpecifySparseModelInputSpec(SpecifyModelInputSpec): time_acquisition = traits.Float( - 0, - mandatory=True, - desc='Time in seconds to acquire a single ' - 'image volume') + 0, mandatory=True, desc="Time in seconds to acquire a single " "image volume" + ) volumes_in_cluster = traits.Range( - 1, usedefault=True, desc='Number of scan volumes in a cluster') - model_hrf = traits.Bool(desc='Model sparse events with hrf') + 1, usedefault=True, desc="Number of scan volumes in a cluster" + ) + model_hrf = traits.Bool(desc="Model sparse events with hrf") stimuli_as_impulses = traits.Bool( - True, desc='Treat each stimulus to be impulse-like', usedefault=True) + True, desc="Treat each stimulus to be impulse-like", usedefault=True + ) use_temporal_deriv = traits.Bool( - requires=['model_hrf'], - desc='Create a temporal derivative in ' - 'addition to regular regressor') + requires=["model_hrf"], + desc="Create a temporal derivative in " "addition to regular regressor", + ) scale_regressors = traits.Bool( - True, desc='Scale regressors by the peak', usedefault=True) + True, desc="Scale regressors by the peak", usedefault=True + ) scan_onset = traits.Float( - 0.0, - desc='Start of scanning relative to onset of run in secs', - usedefault=True) + 0.0, desc="Start of scanning relative to onset of run in secs", usedefault=True + ) save_plot = traits.Bool( - desc=('Save plot of sparse design calculation ' - '(requires matplotlib)')) + desc=("Save plot of sparse design calculation " "(requires matplotlib)") + ) class SpecifySparseModelOutputSpec(SpecifyModelOutputSpec): - sparse_png_file = File(desc='PNG file showing sparse design') - sparse_svg_file = File(desc='SVG file showing sparse design') + sparse_png_file = File(desc="PNG file showing sparse design") + sparse_svg_file = File(desc="SVG file showing sparse design") class SpecifySparseModel(SpecifyModel): @@ -756,6 +792,7 @@ class SpecifySparseModel(SpecifyModel): >>> s.inputs.subject_info = [evs_run2, evs_run3] """ + input_spec = SpecifySparseModelInputSpec output_spec = SpecifySparseModelOutputSpec @@ -766,7 +803,8 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): if isdefined(self.inputs.save_plot) and self.inputs.save_plot: bplot = True import matplotlib - matplotlib.use(config.get('execution', 'matplotlib_backend')) + + matplotlib.use(config.get("execution", "matplotlib_backend")) import matplotlib.pyplot as plt TR = np.round(self.inputs.time_repetition * 1000) # in ms @@ -789,8 +827,8 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): dt = float(gcd(dttemp, dt)) if dt < 1: - raise Exception('Time multiple less than 1 ms') - iflogger.info('Setting dt = %d ms\n', dt) + raise Exception("Time multiple less than 1 ms") + iflogger.info("Setting dt = %d ms\n", dt) npts = int(np.ceil(total_time / dt)) times = np.arange(0, total_time, dt) * 1e-3 timeline = np.zeros((npts)) @@ -804,14 +842,15 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): boxcar[int(1.0 * 1e3 / dt)] = 1.0 reg_scale = float(TA / dt) else: - boxcar[int(1.0 * 1e3 / dt):int(2.0 * 1e3 / dt)] = 1.0 + boxcar[int(1.0 * 1e3 / dt) : int(2.0 * 1e3 / dt)] = 1.0 if isdefined(self.inputs.model_hrf) and self.inputs.model_hrf: response = np.convolve(boxcar, hrf) reg_scale = 1.0 / response.max() - iflogger.info('response sum: %.4f max: %.4f', response.sum(), - response.max()) - iflogger.info('reg_scale: %.4f', reg_scale) + iflogger.info( + "response sum: %.4f max: %.4f", response.sum(), response.max() + ) + iflogger.info("reg_scale: %.4f", reg_scale) for i, t in enumerate(onsets): idx = int(np.round(t / dt)) @@ -831,7 +870,7 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): if durations[i] == 0: durations[i] = TA * nvol stimdur = np.ones((int(durations[i] / dt))) - timeline2 = np.convolve(timeline2, stimdur)[0:len(timeline2)] + timeline2 = np.convolve(timeline2, stimdur)[0 : len(timeline2)] timeline += timeline2 timeline2[:] = 0 @@ -840,17 +879,21 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): plt.plot(times, timeline) if isdefined(self.inputs.model_hrf) and self.inputs.model_hrf: - timeline = np.convolve(timeline, hrf)[0:len(timeline)] - if isdefined(self.inputs.use_temporal_deriv) and \ - self.inputs.use_temporal_deriv: + timeline = np.convolve(timeline, hrf)[0 : len(timeline)] + if ( + isdefined(self.inputs.use_temporal_deriv) + and self.inputs.use_temporal_deriv + ): # create temporal deriv timederiv = np.concatenate(([0], np.diff(timeline))) if bplot: plt.subplot(4, 1, 3) plt.plot(times, timeline) - if isdefined(self.inputs.use_temporal_deriv) and \ - self.inputs.use_temporal_deriv: + if ( + isdefined(self.inputs.use_temporal_deriv) + and self.inputs.use_temporal_deriv + ): plt.plot(times, timederiv) # sample timeline timeline2 = np.zeros((npts)) @@ -861,13 +904,14 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): scanidx = scanstart + np.arange(int(TA / dt)) timeline2[scanidx] = np.max(timeline) reg.insert(i, np.mean(timeline[scanidx]) * reg_scale) - if isdefined(self.inputs.use_temporal_deriv) and \ - self.inputs.use_temporal_deriv: + if ( + isdefined(self.inputs.use_temporal_deriv) + and self.inputs.use_temporal_deriv + ): regderiv.insert(i, np.mean(timederiv[scanidx]) * reg_scale) - if isdefined(self.inputs.use_temporal_deriv) and \ - self.inputs.use_temporal_deriv: - iflogger.info('orthoganlizing derivative w.r.t. main regressor') + if isdefined(self.inputs.use_temporal_deriv) and self.inputs.use_temporal_deriv: + iflogger.info("orthoganlizing derivative w.r.t. main regressor") regderiv = orth(reg, regderiv) if bplot: @@ -875,8 +919,8 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): plt.plot(times, timeline2) plt.subplot(4, 1, 4) plt.bar(np.arange(len(reg)), reg, width=0.5) - plt.savefig('sparse.png') - plt.savefig('sparse.svg') + plt.savefig("sparse.png") + plt.savefig("sparse.svg") if regderiv: return [reg, regderiv] @@ -889,23 +933,32 @@ def _cond_to_regress(self, info, nscans): reg = [] regnames = [] for i, cond in enumerate(info.conditions): - if hasattr(info, 'amplitudes') and info.amplitudes: + if hasattr(info, "amplitudes") and info.amplitudes: amplitudes = info.amplitudes[i] else: amplitudes = None regnames.insert(len(regnames), cond) - scaled_onsets = scale_timings(info.onsets[i], - self.inputs.input_units, 'secs', - self.inputs.time_repetition) - scaled_durations = scale_timings(info.durations[i], - self.inputs.input_units, 'secs', - self.inputs.time_repetition) - regressor = self._gen_regress(scaled_onsets, scaled_durations, - amplitudes, nscans) - if isdefined(self.inputs.use_temporal_deriv) and \ - self.inputs.use_temporal_deriv: + scaled_onsets = scale_timings( + info.onsets[i], + self.inputs.input_units, + "secs", + self.inputs.time_repetition, + ) + scaled_durations = scale_timings( + info.durations[i], + self.inputs.input_units, + "secs", + self.inputs.time_repetition, + ) + regressor = self._gen_regress( + scaled_onsets, scaled_durations, amplitudes, nscans + ) + if ( + isdefined(self.inputs.use_temporal_deriv) + and self.inputs.use_temporal_deriv + ): reg.insert(len(reg), regressor[0]) - regnames.insert(len(regnames), cond + '_D') + regnames.insert(len(regnames), cond + "_D") reg.insert(len(reg), regressor[1]) else: reg.insert(len(reg), regressor) @@ -917,7 +970,7 @@ def _cond_to_regress(self, info, nscans): treg = np.zeros((nscans / nvol, nvol)) treg[:, i] = 1 reg.insert(len(reg), treg.ravel().tolist()) - regnames.insert(len(regnames), 'T1effect_%d' % i) + regnames.insert(len(regnames), "T1effect_%d" % i) return reg, regnames def _generate_clustered_design(self, infolist): @@ -934,10 +987,11 @@ def _generate_clustered_design(self, infolist): img = load(self.inputs.functional_runs[i], mmap=NUMPY_MMAP) nscans = img.shape[3] reg, regnames = self._cond_to_regress(info, nscans) - if hasattr(infoout[i], 'regressors') and infoout[i].regressors: + if hasattr(infoout[i], "regressors") and infoout[i].regressors: if not infoout[i].regressor_names: - infoout[i].regressor_names = \ - ['R%d' % j for j in range(len(infoout[i].regressors))] + infoout[i].regressor_names = [ + "R%d" % j for j in range(len(infoout[i].regressors)) + ] else: infoout[i].regressors = [] infoout[i].regressor_names = [] @@ -958,13 +1012,11 @@ def _generate_design(self, infolist=None): def _list_outputs(self): outputs = self._outputs().get() - if not hasattr(self, '_sessinfo'): + if not hasattr(self, "_sessinfo"): self._generate_design() - outputs['session_info'] = self._sessinfo + outputs["session_info"] = self._sessinfo if isdefined(self.inputs.save_plot) and self.inputs.save_plot: - outputs['sparse_png_file'] = os.path.join(os.getcwd(), - 'sparse.png') - outputs['sparse_svg_file'] = os.path.join(os.getcwd(), - 'sparse.svg') + outputs["sparse_png_file"] = os.path.join(os.getcwd(), "sparse.png") + outputs["sparse_svg_file"] = os.path.join(os.getcwd(), "sparse.svg") return outputs diff --git a/nipype/algorithms/rapidart.py b/nipype/algorithms/rapidart.py index 6371458ca3..3f02ca8d29 100644 --- a/nipype/algorithms/rapidart.py +++ b/nipype/algorithms/rapidart.py @@ -19,13 +19,21 @@ import numpy as np from ..utils import NUMPY_MMAP -from ..interfaces.base import (BaseInterface, traits, InputMultiPath, - OutputMultiPath, TraitedSpec, File, - BaseInterfaceInputSpec, isdefined) +from ..interfaces.base import ( + BaseInterface, + traits, + InputMultiPath, + OutputMultiPath, + TraitedSpec, + File, + BaseInterfaceInputSpec, + isdefined, +) from ..utils.filemanip import ensure_list, save_json, split_filename from ..utils.misc import find_indices, normalize_mc_params from .. import logging, config -iflogger = logging.getLogger('nipype.interface') + +iflogger = logging.getLogger("nipype.interface") def _get_affine_matrix(params, source): @@ -35,19 +43,19 @@ def _get_affine_matrix(params, source): source : the package that generated the parameters supports SPM, AFNI, FSFAST, FSL, NIPY """ - if source == 'NIPY': + if source == "NIPY": # nipy does not store typical euler angles, use nipy to convert from nipy.algorithms.registration import to_matrix44 + return to_matrix44(params) params = normalize_mc_params(params, source) # process for FSL, SPM, AFNI and FSFAST - rotfunc = lambda x: np.array([[np.cos(x), np.sin(x)], - [-np.sin(x), np.cos(x)]]) + rotfunc = lambda x: np.array([[np.cos(x), np.sin(x)], [-np.sin(x), np.cos(x)]]) q = np.array([0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0]) if len(params) < 12: - params = np.hstack((params, q[len(params):])) - params.shape = (len(params), ) + params = np.hstack((params, q[len(params) :])) + params.shape = (len(params),) # Translation T = np.eye(4) T[0:3, -1] = params[0:3] @@ -64,7 +72,7 @@ def _get_affine_matrix(params, source): # Shear Sh = np.eye(4) Sh[(0, 0, 1), (1, 2, 2)] = params[9:12] - if source in ('AFNI', 'FSFAST'): + if source in ("AFNI", "FSFAST"): return np.dot(T, np.dot(Ry, np.dot(Rx, np.dot(Rz, np.dot(S, Sh))))) return np.dot(T, np.dot(Rx, np.dot(Ry, np.dot(Rz, np.dot(S, Sh))))) @@ -88,9 +96,7 @@ def _calc_norm(mc, use_differences, source, brain_pts=None): """ - affines = [ - _get_affine_matrix(mc[i, :], source) for i in range(mc.shape[0]) - ] + affines = [_get_affine_matrix(mc[i, :], source) for i in range(mc.shape[0])] return _calc_norm_affine(affines, use_differences, brain_pts) @@ -129,24 +135,34 @@ def _calc_norm_affine(affines, use_differences, brain_pts=None): displacement[i, :] = np.sqrt( np.sum( np.power( - np.reshape(newpos[i, :], - (3, all_pts.shape[1])) - all_pts[0:3, :], - 2), - axis=0)) + np.reshape(newpos[i, :], (3, all_pts.shape[1])) + - all_pts[0:3, :], + 2, + ), + axis=0, + ) + ) # np.savez('displacement.npz', newpos=newpos, pts=all_pts) normdata = np.zeros(len(affines)) if use_differences: newpos = np.concatenate( - (np.zeros((1, n_pts)), np.diff(newpos, n=1, axis=0)), axis=0) + (np.zeros((1, n_pts)), np.diff(newpos, n=1, axis=0)), axis=0 + ) for i in range(newpos.shape[0]): - normdata[i] = \ - np.max(np.sqrt(np.sum( - np.reshape(np.power(np.abs(newpos[i, :]), 2), - (3, all_pts.shape[1])), - axis=0))) + normdata[i] = np.max( + np.sqrt( + np.sum( + np.reshape( + np.power(np.abs(newpos[i, :]), 2), (3, all_pts.shape[1]) + ), + axis=0, + ) + ) + ) else: from scipy.signal import detrend - newpos = np.abs(detrend(newpos, axis=0, type='constant')) + + newpos = np.abs(detrend(newpos, axis=0, type="constant")) normdata = np.sqrt(np.mean(np.power(newpos, 2), axis=1)) return normdata, displacement @@ -154,15 +170,18 @@ def _calc_norm_affine(affines, use_differences, brain_pts=None): class ArtifactDetectInputSpec(BaseInterfaceInputSpec): realigned_files = InputMultiPath( File(exists=True), - desc=("Names of realigned functional data " - "files"), - mandatory=True) + desc=("Names of realigned functional data " "files"), + mandatory=True, + ) realignment_parameters = InputMultiPath( File(exists=True), mandatory=True, - desc=("Names of realignment " - "parameters corresponding to " - "the functional data files")) + desc=( + "Names of realignment " + "parameters corresponding to " + "the functional data files" + ), + ) parameter_source = traits.Enum( "SPM", "FSL", @@ -170,131 +189,167 @@ class ArtifactDetectInputSpec(BaseInterfaceInputSpec): "NiPy", "FSFAST", desc="Source of movement parameters", - mandatory=True) + mandatory=True, + ) use_differences = traits.ListBool( [True, False], minlen=2, maxlen=2, usedefault=True, - desc=("Use differences between successive" - " motion (first element) and " - "intensity parameter (second " - "element) estimates in order to " - "determine outliers. " - "(default is [True, False])")) + desc=( + "Use differences between successive" + " motion (first element) and " + "intensity parameter (second " + "element) estimates in order to " + "determine outliers. " + "(default is [True, False])" + ), + ) use_norm = traits.Bool( True, usedefault=True, - requires=['norm_threshold'], - desc=("Uses a composite of the motion parameters in " - "order to determine outliers.")) + requires=["norm_threshold"], + desc=( + "Uses a composite of the motion parameters in " + "order to determine outliers." + ), + ) norm_threshold = traits.Float( - xor=['rotation_threshold', 'translation_threshold'], + xor=["rotation_threshold", "translation_threshold"], mandatory=True, - desc=("Threshold to use to detect motion-rela" - "ted outliers when composite motion is " - "being used")) + desc=( + "Threshold to use to detect motion-rela" + "ted outliers when composite motion is " + "being used" + ), + ) rotation_threshold = traits.Float( mandatory=True, - xor=['norm_threshold'], - desc=("Threshold (in radians) to use to " - "detect rotation-related outliers")) + xor=["norm_threshold"], + desc=("Threshold (in radians) to use to " "detect rotation-related outliers"), + ) translation_threshold = traits.Float( mandatory=True, - xor=['norm_threshold'], - desc=("Threshold (in mm) to use to " - "detect translation-related " - "outliers")) + xor=["norm_threshold"], + desc=("Threshold (in mm) to use to " "detect translation-related " "outliers"), + ) zintensity_threshold = traits.Float( mandatory=True, - desc=("Intensity Z-threshold use to " - "detection images that deviate " - "from the mean")) + desc=( + "Intensity Z-threshold use to " + "detection images that deviate " + "from the mean" + ), + ) mask_type = traits.Enum( - 'spm_global', - 'file', - 'thresh', + "spm_global", + "file", + "thresh", mandatory=True, - desc=("Type of mask that should be used to mask the" - " functional data. *spm_global* uses an " - "spm_global like calculation to determine the" - " brain mask. *file* specifies a brain mask " - "file (should be an image file consisting of " - "0s and 1s). *thresh* specifies a threshold " - "to use. By default all voxels are used," - "unless one of these mask types are defined")) - mask_file = File( - exists=True, desc="Mask file to be used if mask_type is 'file'.") + desc=( + "Type of mask that should be used to mask the" + " functional data. *spm_global* uses an " + "spm_global like calculation to determine the" + " brain mask. *file* specifies a brain mask " + "file (should be an image file consisting of " + "0s and 1s). *thresh* specifies a threshold " + "to use. By default all voxels are used," + "unless one of these mask types are defined" + ), + ) + mask_file = File(exists=True, desc="Mask file to be used if mask_type is 'file'.") mask_threshold = traits.Float( - desc=("Mask threshold to be used if mask_type" - " is 'thresh'.")) + desc=("Mask threshold to be used if mask_type" " is 'thresh'.") + ) intersect_mask = traits.Bool( - True, usedefault=True, - desc=("Intersect the masks when computed from " - "spm_global.")) + True, + usedefault=True, + desc=("Intersect the masks when computed from " "spm_global."), + ) save_plot = traits.Bool( - True, desc="save plots containing outliers", usedefault=True) + True, desc="save plots containing outliers", usedefault=True + ) plot_type = traits.Enum( - 'png', - 'svg', - 'eps', - 'pdf', + "png", + "svg", + "eps", + "pdf", desc="file type of the outlier plot", - usedefault=True) + usedefault=True, + ) bound_by_brainmask = traits.Bool( False, - desc=("use the brain mask to " - "determine bounding box" - "for composite norm (works" - "for SPM and Nipy - currently" - "inaccurate for FSL, AFNI"), - usedefault=True) + desc=( + "use the brain mask to " + "determine bounding box" + "for composite norm (works" + "for SPM and Nipy - currently" + "inaccurate for FSL, AFNI" + ), + usedefault=True, + ) global_threshold = traits.Float( 8.0, - desc=("use this threshold when mask " - "type equal's spm_global"), - usedefault=True) + desc=("use this threshold when mask " "type equal's spm_global"), + usedefault=True, + ) class ArtifactDetectOutputSpec(TraitedSpec): outlier_files = OutputMultiPath( File(exists=True), - desc=("One file for each functional run " - "containing a list of 0-based indices" - " corresponding to outlier volumes")) + desc=( + "One file for each functional run " + "containing a list of 0-based indices" + " corresponding to outlier volumes" + ), + ) intensity_files = OutputMultiPath( File(exists=True), - desc=("One file for each functional run " - "containing the global intensity " - "values determined from the " - "brainmask")) + desc=( + "One file for each functional run " + "containing the global intensity " + "values determined from the " + "brainmask" + ), + ) norm_files = OutputMultiPath( - File, - desc=("One file for each functional run " - "containing the composite norm")) + File, desc=("One file for each functional run " "containing the composite norm") + ) statistic_files = OutputMultiPath( File(exists=True), - desc=("One file for each functional run " - "containing information about the " - "different types of artifacts and " - "if design info is provided then " - "details of stimulus correlated " - "motion and a listing or artifacts " - "by event type.")) + desc=( + "One file for each functional run " + "containing information about the " + "different types of artifacts and " + "if design info is provided then " + "details of stimulus correlated " + "motion and a listing or artifacts " + "by event type." + ), + ) plot_files = OutputMultiPath( File, - desc=("One image file for each functional run " - "containing the detected outliers")) + desc=( + "One image file for each functional run " "containing the detected outliers" + ), + ) mask_files = OutputMultiPath( File, - desc=("One image file for each functional run " - "containing the mask used for global " - "signal calculation")) + desc=( + "One image file for each functional run " + "containing the mask used for global " + "signal calculation" + ), + ) displacement_files = OutputMultiPath( File, - desc=("One image file for each " - "functional run containing the " - "voxel displacement timeseries")) + desc=( + "One image file for each " + "functional run containing the " + "voxel displacement timeseries" + ), + ) class ArtifactDetect(BaseInterface): @@ -344,62 +399,79 @@ def _get_output_filenames(self, motionfile, output_dir): else: raise Exception("Unknown type of file") _, filename, ext = split_filename(infile) - artifactfile = os.path.join(output_dir, ''.join(('art.', filename, - '_outliers.txt'))) - intensityfile = os.path.join(output_dir, ''.join(('global_intensity.', - filename, '.txt'))) - statsfile = os.path.join(output_dir, ''.join(('stats.', filename, - '.txt'))) - normfile = os.path.join(output_dir, ''.join(('norm.', filename, - '.txt'))) - plotfile = os.path.join(output_dir, ''.join(('plot.', filename, '.', - self.inputs.plot_type))) - displacementfile = os.path.join(output_dir, ''.join(('disp.', filename, - ext))) - maskfile = os.path.join(output_dir, ''.join(('mask.', filename, ext))) - return (artifactfile, intensityfile, statsfile, normfile, plotfile, - displacementfile, maskfile) + artifactfile = os.path.join( + output_dir, "".join(("art.", filename, "_outliers.txt")) + ) + intensityfile = os.path.join( + output_dir, "".join(("global_intensity.", filename, ".txt")) + ) + statsfile = os.path.join(output_dir, "".join(("stats.", filename, ".txt"))) + normfile = os.path.join(output_dir, "".join(("norm.", filename, ".txt"))) + plotfile = os.path.join( + output_dir, "".join(("plot.", filename, ".", self.inputs.plot_type)) + ) + displacementfile = os.path.join(output_dir, "".join(("disp.", filename, ext))) + maskfile = os.path.join(output_dir, "".join(("mask.", filename, ext))) + return ( + artifactfile, + intensityfile, + statsfile, + normfile, + plotfile, + displacementfile, + maskfile, + ) def _list_outputs(self): outputs = self._outputs().get() - outputs['outlier_files'] = [] - outputs['intensity_files'] = [] - outputs['statistic_files'] = [] - outputs['mask_files'] = [] + outputs["outlier_files"] = [] + outputs["intensity_files"] = [] + outputs["statistic_files"] = [] + outputs["mask_files"] = [] if isdefined(self.inputs.use_norm) and self.inputs.use_norm: - outputs['norm_files'] = [] + outputs["norm_files"] = [] if self.inputs.bound_by_brainmask: - outputs['displacement_files'] = [] + outputs["displacement_files"] = [] if isdefined(self.inputs.save_plot) and self.inputs.save_plot: - outputs['plot_files'] = [] + outputs["plot_files"] = [] for i, f in enumerate(ensure_list(self.inputs.realigned_files)): - (outlierfile, intensityfile, statsfile, normfile, plotfile, - displacementfile, maskfile) = \ - self._get_output_filenames(f, os.getcwd()) - outputs['outlier_files'].insert(i, outlierfile) - outputs['intensity_files'].insert(i, intensityfile) - outputs['statistic_files'].insert(i, statsfile) - outputs['mask_files'].insert(i, maskfile) + ( + outlierfile, + intensityfile, + statsfile, + normfile, + plotfile, + displacementfile, + maskfile, + ) = self._get_output_filenames(f, os.getcwd()) + outputs["outlier_files"].insert(i, outlierfile) + outputs["intensity_files"].insert(i, intensityfile) + outputs["statistic_files"].insert(i, statsfile) + outputs["mask_files"].insert(i, maskfile) if isdefined(self.inputs.use_norm) and self.inputs.use_norm: - outputs['norm_files'].insert(i, normfile) + outputs["norm_files"].insert(i, normfile) if self.inputs.bound_by_brainmask: - outputs['displacement_files'].insert(i, displacementfile) + outputs["displacement_files"].insert(i, displacementfile) if isdefined(self.inputs.save_plot) and self.inputs.save_plot: - outputs['plot_files'].insert(i, plotfile) + outputs["plot_files"].insert(i, plotfile) return outputs def _plot_outliers_with_wave(self, wave, outliers, name): import matplotlib + matplotlib.use(config.get("execution", "matplotlib_backend")) import matplotlib.pyplot as plt + plt.plot(wave) plt.ylim([wave.min(), wave.max()]) plt.xlim([0, len(wave) - 1]) if len(outliers): plt.plot( np.tile(outliers[:, None], (1, 2)).T, - np.tile([wave.min(), wave.max()], (len(outliers), 1)).T, 'r') - plt.xlabel('Scans - 0-based') + np.tile([wave.min(), wave.max()], (len(outliers), 1)).T, + "r", + ) + plt.xlabel("Scans - 0-based") plt.ylabel(name) def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): @@ -407,6 +479,7 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): Core routine for detecting outliers """ from scipy import signal + if not cwd: cwd = os.getcwd() @@ -427,16 +500,15 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): affine = nim.affine g = np.zeros((timepoints, 1)) masktype = self.inputs.mask_type - if masktype == 'spm_global': # spm_global like calculation - iflogger.debug('art: using spm global') + if masktype == "spm_global": # spm_global like calculation + iflogger.debug("art: using spm global") intersect_mask = self.inputs.intersect_mask if intersect_mask: mask = np.ones((x, y, z), dtype=bool) for t0 in range(timepoints): vol = data[:, :, :, t0] # Use an SPM like approach - mask_tmp = vol > \ - (np.nanmean(vol) / self.inputs.global_threshold) + mask_tmp = vol > (np.nanmean(vol) / self.inputs.global_threshold) mask = mask * mask_tmp for t0 in range(timepoints): vol = data[:, :, :, t0] @@ -445,15 +517,14 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): intersect_mask = False g = np.zeros((timepoints, 1)) if not intersect_mask: - iflogger.info('not intersect_mask is True') + iflogger.info("not intersect_mask is True") mask = np.zeros((x, y, z, timepoints)) for t0 in range(timepoints): vol = data[:, :, :, t0] - mask_tmp = vol > \ - (np.nanmean(vol) / self.inputs.global_threshold) + mask_tmp = vol > (np.nanmean(vol) / self.inputs.global_threshold) mask[:, :, :, t0] = mask_tmp g[t0] = np.nansum(vol * mask_tmp) / np.nansum(mask_tmp) - elif masktype == 'file': # uses a mask image to determine intensity + elif masktype == "file": # uses a mask image to determine intensity maskimg = load(self.inputs.mask_file, mmap=NUMPY_MMAP) mask = maskimg.get_data() affine = maskimg.affine @@ -461,7 +532,7 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): for t0 in range(timepoints): vol = data[:, :, :, t0] g[t0] = np.nanmean(vol[mask]) - elif masktype == 'thresh': # uses a fixed signal threshold + elif masktype == "thresh": # uses a fixed signal threshold for t0 in range(timepoints): vol = data[:, :, :, t0] mask = vol > self.inputs.mask_threshold @@ -473,8 +544,7 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): # compute normalized intensity values gz = signal.detrend(g, axis=0) # detrend the signal if self.inputs.use_differences[1]: - gz = np.concatenate( - (np.zeros((1, 1)), np.diff(gz, n=1, axis=0)), axis=0) + gz = np.concatenate((np.zeros((1, 1)), np.diff(gz, n=1, axis=0)), axis=0) gz = (gz - np.mean(gz)) / np.std(gz) # normalize the detrended signal iidx = find_indices(abs(gz) > self.inputs.zintensity_threshold) @@ -482,9 +552,15 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): mc_in = np.loadtxt(motionfile) mc = deepcopy(mc_in) - (artifactfile, intensityfile, statsfile, normfile, plotfile, - displacementfile, maskfile) = self._get_output_filenames( - imgfile, cwd) + ( + artifactfile, + intensityfile, + statsfile, + normfile, + plotfile, + displacementfile, + maskfile, + ) = self._get_output_filenames(imgfile, cwd) mask_img = Nifti1Image(mask.astype(np.uint8), affine) mask_img.to_filename(maskfile) @@ -492,122 +568,122 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): brain_pts = None if self.inputs.bound_by_brainmask: voxel_coords = np.nonzero(mask) - coords = np.vstack((voxel_coords[0], - np.vstack((voxel_coords[1], - voxel_coords[2])))).T - brain_pts = np.dot(affine, - np.hstack((coords, - np.ones((coords.shape[0], - 1)))).T) + coords = np.vstack( + (voxel_coords[0], np.vstack((voxel_coords[1], voxel_coords[2]))) + ).T + brain_pts = np.dot( + affine, np.hstack((coords, np.ones((coords.shape[0], 1)))).T + ) # calculate the norm of the motion parameters normval, displacement = _calc_norm( mc, self.inputs.use_differences[0], self.inputs.parameter_source, - brain_pts=brain_pts) + brain_pts=brain_pts, + ) tidx = find_indices(normval > self.inputs.norm_threshold) ridx = find_indices(normval < 0) if displacement is not None: dmap = np.zeros((x, y, z, timepoints), dtype=np.float) for i in range(timepoints): - dmap[voxel_coords[0], voxel_coords[1], voxel_coords[2], - i] = displacement[i, :] + dmap[ + voxel_coords[0], voxel_coords[1], voxel_coords[2], i + ] = displacement[i, :] dimg = Nifti1Image(dmap, affine) dimg.to_filename(displacementfile) else: if self.inputs.use_differences[0]: mc = np.concatenate( - (np.zeros((1, 6)), np.diff(mc_in, n=1, axis=0)), axis=0) + (np.zeros((1, 6)), np.diff(mc_in, n=1, axis=0)), axis=0 + ) traval = mc[:, 0:3] # translation parameters (mm) rotval = mc[:, 3:6] # rotation parameters (rad) tidx = find_indices( - np.sum(abs(traval) > self.inputs.translation_threshold, 1) > 0) + np.sum(abs(traval) > self.inputs.translation_threshold, 1) > 0 + ) ridx = find_indices( - np.sum(abs(rotval) > self.inputs.rotation_threshold, 1) > 0) + np.sum(abs(rotval) > self.inputs.rotation_threshold, 1) > 0 + ) outliers = np.unique(np.union1d(iidx, np.union1d(tidx, ridx))) # write output to outputfile - np.savetxt(artifactfile, outliers, fmt=b'%d', delimiter=' ') - np.savetxt(intensityfile, g, fmt=b'%.2f', delimiter=' ') + np.savetxt(artifactfile, outliers, fmt=b"%d", delimiter=" ") + np.savetxt(intensityfile, g, fmt=b"%.2f", delimiter=" ") if self.inputs.use_norm: - np.savetxt(normfile, normval, fmt=b'%.4f', delimiter=' ') + np.savetxt(normfile, normval, fmt=b"%.4f", delimiter=" ") if isdefined(self.inputs.save_plot) and self.inputs.save_plot: import matplotlib + matplotlib.use(config.get("execution", "matplotlib_backend")) import matplotlib.pyplot as plt + fig = plt.figure() if isdefined(self.inputs.use_norm) and self.inputs.use_norm: plt.subplot(211) else: plt.subplot(311) - self._plot_outliers_with_wave(gz, iidx, 'Intensity') + self._plot_outliers_with_wave(gz, iidx, "Intensity") if isdefined(self.inputs.use_norm) and self.inputs.use_norm: plt.subplot(212) - self._plot_outliers_with_wave(normval, np.union1d(tidx, ridx), - 'Norm (mm)') + self._plot_outliers_with_wave( + normval, np.union1d(tidx, ridx), "Norm (mm)" + ) else: - diff = '' + diff = "" if self.inputs.use_differences[0]: - diff = 'diff' + diff = "diff" plt.subplot(312) - self._plot_outliers_with_wave(traval, tidx, - 'Translation (mm)' + diff) + self._plot_outliers_with_wave(traval, tidx, "Translation (mm)" + diff) plt.subplot(313) - self._plot_outliers_with_wave(rotval, ridx, - 'Rotation (rad)' + diff) + self._plot_outliers_with_wave(rotval, ridx, "Rotation (rad)" + diff) plt.savefig(plotfile) plt.close(fig) motion_outliers = np.union1d(tidx, ridx) stats = [ + {"motion_file": motionfile, "functional_file": imgfile}, { - 'motion_file': motionfile, - 'functional_file': imgfile + "common_outliers": len(np.intersect1d(iidx, motion_outliers)), + "intensity_outliers": len(np.setdiff1d(iidx, motion_outliers)), + "motion_outliers": len(np.setdiff1d(motion_outliers, iidx)), }, { - 'common_outliers': len(np.intersect1d(iidx, motion_outliers)), - 'intensity_outliers': len(np.setdiff1d(iidx, motion_outliers)), - 'motion_outliers': len(np.setdiff1d(motion_outliers, iidx)), - }, - { - 'motion': [ - { - 'using differences': self.inputs.use_differences[0] - }, + "motion": [ + {"using differences": self.inputs.use_differences[0]}, { - 'mean': np.mean(mc_in, axis=0).tolist(), - 'min': np.min(mc_in, axis=0).tolist(), - 'max': np.max(mc_in, axis=0).tolist(), - 'std': np.std(mc_in, axis=0).tolist() + "mean": np.mean(mc_in, axis=0).tolist(), + "min": np.min(mc_in, axis=0).tolist(), + "max": np.max(mc_in, axis=0).tolist(), + "std": np.std(mc_in, axis=0).tolist(), }, ] }, { - 'intensity': [ - { - 'using differences': self.inputs.use_differences[1] - }, + "intensity": [ + {"using differences": self.inputs.use_differences[1]}, { - 'mean': np.mean(gz, axis=0).tolist(), - 'min': np.min(gz, axis=0).tolist(), - 'max': np.max(gz, axis=0).tolist(), - 'std': np.std(gz, axis=0).tolist() + "mean": np.mean(gz, axis=0).tolist(), + "min": np.min(gz, axis=0).tolist(), + "max": np.max(gz, axis=0).tolist(), + "std": np.std(gz, axis=0).tolist(), }, ] }, ] if self.inputs.use_norm: stats.insert( - 3, { - 'motion_norm': { - 'mean': np.mean(normval, axis=0).tolist(), - 'min': np.min(normval, axis=0).tolist(), - 'max': np.max(normval, axis=0).tolist(), - 'std': np.std(normval, axis=0).tolist(), + 3, + { + "motion_norm": { + "mean": np.mean(normval, axis=0).tolist(), + "min": np.min(normval, axis=0).tolist(), + "max": np.max(normval, axis=0).tolist(), + "std": np.std(normval, axis=0).tolist(), } - }) + }, + ) save_json(statsfile, stats) def _run_interface(self, runtime): @@ -616,8 +692,7 @@ def _run_interface(self, runtime): funcfilelist = ensure_list(self.inputs.realigned_files) motparamlist = ensure_list(self.inputs.realignment_parameters) for i, imgf in enumerate(funcfilelist): - self._detect_outliers_core( - imgf, motparamlist[i], i, cwd=os.getcwd()) + self._detect_outliers_core(imgf, motparamlist[i], i, cwd=os.getcwd()) return runtime @@ -625,29 +700,30 @@ class StimCorrInputSpec(BaseInterfaceInputSpec): realignment_parameters = InputMultiPath( File(exists=True), mandatory=True, - desc=("Names of realignment " - "parameters corresponding to " - "the functional data files")) + desc=( + "Names of realignment " + "parameters corresponding to " + "the functional data files" + ), + ) intensity_values = InputMultiPath( File(exists=True), mandatory=True, - desc=("Name of file containing intensity " - "values")) + desc=("Name of file containing intensity " "values"), + ) spm_mat_file = File( - exists=True, - mandatory=True, - desc="SPM mat file (use pre-estimate SPM.mat file)") + exists=True, mandatory=True, desc="SPM mat file (use pre-estimate SPM.mat file)" + ) concatenated_design = traits.Bool( mandatory=True, - desc=("state if the design matrix " - "contains concatenated sessions")) + desc=("state if the design matrix " "contains concatenated sessions"), + ) class StimCorrOutputSpec(TraitedSpec): stimcorr_files = OutputMultiPath( - File(exists=True), - desc=("List of files containing " - "correlation values")) + File(exists=True), desc=("List of files containing " "correlation values") + ) class StimulusCorrelation(BaseInterface): @@ -688,12 +764,10 @@ def _get_output_filenames(self, motionfile, output_dir): """ (_, filename) = os.path.split(motionfile) (filename, _) = os.path.splitext(filename) - corrfile = os.path.join(output_dir, ''.join(('qa.', filename, - '_stimcorr.txt'))) + corrfile = os.path.join(output_dir, "".join(("qa.", filename, "_stimcorr.txt"))) return corrfile - def _stimcorr_core(self, motionfile, intensityfile, designmatrix, - cwd=None): + def _stimcorr_core(self, motionfile, intensityfile, designmatrix, cwd=None): """ Core routine for determining stimulus correlation @@ -710,14 +784,14 @@ def _stimcorr_core(self, motionfile, intensityfile, designmatrix, cm = np.corrcoef(concat_matrix, rowvar=0) corrfile = self._get_output_filenames(motionfile, cwd) # write output to outputfile - file = open(corrfile, 'w') + file = open(corrfile, "w") file.write("Stats for:\n") file.write("Stimulus correlated motion:\n%s\n" % motionfile) for i in range(dcol): file.write("SCM.%d:" % i) for v in cm[i, dcol + np.arange(mccol)]: file.write(" %.2f" % v) - file.write('\n') + file.write("\n") file.write("Stimulus correlated intensity:\n%s\n" % intensityfile) for i in range(dcol): file.write("SCI.%d: %.2f\n" % (i, cm[i, -1])) @@ -732,21 +806,19 @@ def _get_spm_submatrix(self, spmmat, sessidx, rows=None): sessidx: int index to session that needs to be extracted. """ - designmatrix = spmmat['SPM'][0][0].xX[0][0].X - U = spmmat['SPM'][0][0].Sess[0][sessidx].U[0] + designmatrix = spmmat["SPM"][0][0].xX[0][0].X + U = spmmat["SPM"][0][0].Sess[0][sessidx].U[0] if rows is None: - rows = spmmat['SPM'][0][0].Sess[0][sessidx].row[0] - 1 - cols = (spmmat['SPM'][0][0].Sess[0][sessidx].col[0][list( - range(len(U)))] - 1) - outmatrix = designmatrix.take( - rows.tolist(), axis=0).take( - cols.tolist(), axis=1) + rows = spmmat["SPM"][0][0].Sess[0][sessidx].row[0] - 1 + cols = spmmat["SPM"][0][0].Sess[0][sessidx].col[0][list(range(len(U)))] - 1 + outmatrix = designmatrix.take(rows.tolist(), axis=0).take(cols.tolist(), axis=1) return outmatrix def _run_interface(self, runtime): """Execute this module. """ import scipy.io as sio + motparamlist = self.inputs.realignment_parameters intensityfiles = self.inputs.intensity_values spmmat = sio.loadmat(self.inputs.spm_mat_file, struct_as_record=False) @@ -760,8 +832,7 @@ def _run_interface(self, runtime): rows = np.sum(nrows) + np.arange(mc_in.shape[0]) nrows.append(mc_in.shape[0]) matrix = self._get_spm_submatrix(spmmat, sessidx, rows) - self._stimcorr_core(motparamlist[i], intensityfiles[i], matrix, - os.getcwd()) + self._stimcorr_core(motparamlist[i], intensityfiles[i], matrix, os.getcwd()) return runtime def _list_outputs(self): @@ -770,5 +841,5 @@ def _list_outputs(self): for i, f in enumerate(self.inputs.realignment_parameters): files.insert(i, self._get_output_filenames(f, os.getcwd())) if files: - outputs['stimcorr_files'] = files + outputs["stimcorr_files"] = files return outputs diff --git a/nipype/algorithms/stats.py b/nipype/algorithms/stats.py index 29a45f5844..2a8b00f614 100644 --- a/nipype/algorithms/stats.py +++ b/nipype/algorithms/stats.py @@ -9,24 +9,33 @@ import numpy as np from ..interfaces.base import ( - BaseInterfaceInputSpec, TraitedSpec, SimpleInterface, - traits, InputMultiPath, File + BaseInterfaceInputSpec, + TraitedSpec, + SimpleInterface, + traits, + InputMultiPath, + File, ) from ..utils.filemanip import split_filename class ActivationCountInputSpec(BaseInterfaceInputSpec): - in_files = InputMultiPath(File(exists=True), mandatory=True, - desc='input file, generally a list of z-stat maps') + in_files = InputMultiPath( + File(exists=True), + mandatory=True, + desc="input file, generally a list of z-stat maps", + ) threshold = traits.Float( - mandatory=True, desc='binarization threshold. E.g. a threshold of 1.65 ' - 'corresponds to a two-sided Z-test of p<.10') + mandatory=True, + desc="binarization threshold. E.g. a threshold of 1.65 " + "corresponds to a two-sided Z-test of p<.10", + ) class ActivationCountOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='output activation count map') - acm_pos = File(exists=True, desc='positive activation count map') - acm_neg = File(exists=True, desc='negative activation count map') + out_file = File(exists=True, desc="output activation count map") + acm_pos = File(exists=True, desc="positive activation count map") + acm_neg = File(exists=True, desc="negative activation count map") class ActivationCount(SimpleInterface): @@ -36,31 +45,35 @@ class ActivationCount(SimpleInterface): Adapted from: https://github.com/poldracklab/CNP_task_analysis/\ blob/61c27f5992db9d8800884f8ffceb73e6957db8af/CNP_2nd_level_ACM.py """ + input_spec = ActivationCountInputSpec output_spec = ActivationCountOutputSpec def _run_interface(self, runtime): allmaps = nb.concat_images(self.inputs.in_files).get_data() - acm_pos = np.mean(allmaps > self.inputs.threshold, - axis=3, dtype=np.float32) - acm_neg = np.mean(allmaps < -1.0 * self.inputs.threshold, - axis=3, dtype=np.float32) + acm_pos = np.mean(allmaps > self.inputs.threshold, axis=3, dtype=np.float32) + acm_neg = np.mean( + allmaps < -1.0 * self.inputs.threshold, axis=3, dtype=np.float32 + ) acm_diff = acm_pos - acm_neg template_fname = self.inputs.in_files[0] ext = split_filename(template_fname)[2] - fname_fmt = os.path.join(runtime.cwd, 'acm_{}' + ext).format + fname_fmt = os.path.join(runtime.cwd, "acm_{}" + ext).format - self._results['out_file'] = fname_fmt('diff') - self._results['acm_pos'] = fname_fmt('pos') - self._results['acm_neg'] = fname_fmt('neg') + self._results["out_file"] = fname_fmt("diff") + self._results["acm_pos"] = fname_fmt("pos") + self._results["acm_neg"] = fname_fmt("neg") img = nb.load(template_fname) img.__class__(acm_diff, img.affine, img.header).to_filename( - self._results['out_file']) + self._results["out_file"] + ) img.__class__(acm_pos, img.affine, img.header).to_filename( - self._results['acm_pos']) + self._results["acm_pos"] + ) img.__class__(acm_neg, img.affine, img.header).to_filename( - self._results['acm_neg']) + self._results["acm_neg"] + ) return runtime diff --git a/nipype/algorithms/tests/test_CompCor.py b/nipype/algorithms/tests/test_CompCor.py index 3aa535dc19..762d8a0889 100644 --- a/nipype/algorithms/tests/test_CompCor.py +++ b/nipype/algorithms/tests/test_CompCor.py @@ -10,14 +10,14 @@ from ..confounds import CompCor, TCompCor, ACompCor -class TestCompCor(): - ''' Note: Tests currently do a poor job of testing functionality ''' +class TestCompCor: + """ Note: Tests currently do a poor job of testing functionality """ filenames = { - 'functionalnii': 'compcorfunc.nii', - 'masknii': 'compcormask.nii', - 'masknii2': 'compcormask2.nii', - 'components_file': None + "functionalnii": "compcorfunc.nii", + "masknii": "compcormask.nii", + "masknii2": "compcormask2.nii", + "components_file": None, } @pytest.fixture(autouse=True) @@ -26,32 +26,38 @@ def setup_class(self, tmpdir): tmpdir.chdir() noise = np.fromfunction(self.fake_noise_fun, self.fake_data.shape) self.realigned_file = utils.save_toy_nii( - self.fake_data + noise, self.filenames['functionalnii']) + self.fake_data + noise, self.filenames["functionalnii"] + ) mask = np.ones(self.fake_data.shape[:3]) mask[0, 0, 0] = 0 mask[0, 0, 1] = 0 - mask1 = utils.save_toy_nii(mask, self.filenames['masknii']) + mask1 = utils.save_toy_nii(mask, self.filenames["masknii"]) other_mask = np.ones(self.fake_data.shape[:3]) other_mask[0, 1, 0] = 0 other_mask[1, 1, 0] = 0 - mask2 = utils.save_toy_nii(other_mask, self.filenames['masknii2']) + mask2 = utils.save_toy_nii(other_mask, self.filenames["masknii2"]) self.mask_files = [mask1, mask2] def test_compcor(self): - expected_components = [['-0.1989607212', '-0.5753813646'], [ - '0.5692369697', '0.5674945949' - ], ['-0.6662573243', - '0.4675843432'], ['0.4206466244', '-0.3361270124'], - ['-0.1246655485', '-0.1235705610']] + expected_components = [ + ["-0.1989607212", "-0.5753813646"], + ["0.5692369697", "0.5674945949"], + ["-0.6662573243", "0.4675843432"], + ["0.4206466244", "-0.3361270124"], + ["-0.1246655485", "-0.1235705610"], + ] self.run_cc( CompCor( num_components=6, realigned_file=self.realigned_file, mask_files=self.mask_files, - mask_index=0), expected_components) + mask_index=0, + ), + expected_components, + ) self.run_cc( ACompCor( @@ -59,50 +65,66 @@ def test_compcor(self): realigned_file=self.realigned_file, mask_files=self.mask_files, mask_index=0, - components_file='acc_components_file'), expected_components, - 'aCompCor') + components_file="acc_components_file", + ), + expected_components, + "aCompCor", + ) def test_compcor_variance_threshold_and_metadata(self): - expected_components = [['-0.2027150345', '-0.4954813834'], - ['0.2565929051', '0.7866217875'], - ['-0.3550986008', '-0.0089784905'], - ['0.7512786244', '-0.3599828482'], - ['-0.4500578942', '0.0778209345']] + expected_components = [ + ["-0.2027150345", "-0.4954813834"], + ["0.2565929051", "0.7866217875"], + ["-0.3550986008", "-0.0089784905"], + ["0.7512786244", "-0.3599828482"], + ["-0.4500578942", "0.0778209345"], + ] expected_metadata = { - 'component': 'CompCor00', - 'mask': 'mask', - 'singular_value': '4.0720553036', - 'variance_explained': '0.5527211465', - 'cumulative_variance_explained': '0.5527211465', - 'retained': 'True', + "component": "CompCor00", + "mask": "mask", + "singular_value": "4.0720553036", + "variance_explained": "0.5527211465", + "cumulative_variance_explained": "0.5527211465", + "retained": "True", } ccinterface = CompCor( - variance_threshold=0.7, - realigned_file=self.realigned_file, - mask_files=self.mask_files, - mask_names=['mask'], - mask_index=1, - save_metadata=True) - self.run_cc(ccinterface=ccinterface, - expected_components=expected_components, - expected_n_components=2, - expected_metadata=expected_metadata) + variance_threshold=0.7, + realigned_file=self.realigned_file, + mask_files=self.mask_files, + mask_names=["mask"], + mask_index=1, + save_metadata=True, + ) + self.run_cc( + ccinterface=ccinterface, + expected_components=expected_components, + expected_n_components=2, + expected_metadata=expected_metadata, + ) def test_tcompcor(self): - ccinterface = TCompCor(num_components=6, - realigned_file=self.realigned_file, percentile_threshold=0.75) - self.run_cc(ccinterface, [['-0.1114536190', '-0.4632908609'], [ - '0.4566907310', '0.6983205193' - ], ['-0.7132557407', '0.1340170559'], [ - '0.5022537643', '-0.5098322262' - ], ['-0.1342351356', '0.1407855119']], 'tCompCor') + ccinterface = TCompCor( + num_components=6, + realigned_file=self.realigned_file, + percentile_threshold=0.75, + ) + self.run_cc( + ccinterface, + [ + ["-0.1114536190", "-0.4632908609"], + ["0.4566907310", "0.6983205193"], + ["-0.7132557407", "0.1340170559"], + ["0.5022537643", "-0.5098322262"], + ["-0.1342351356", "0.1407855119"], + ], + "tCompCor", + ) def test_tcompcor_no_percentile(self): - ccinterface = TCompCor(num_components=6, - realigned_file=self.realigned_file) + ccinterface = TCompCor(num_components=6, realigned_file=self.realigned_file) ccinterface.run() - mask = nb.load('mask_000.nii.gz').get_data() + mask = nb.load("mask_000.nii.gz").get_data() num_nonmasked_voxels = np.count_nonzero(mask) assert num_nonmasked_voxels == 1 @@ -113,20 +135,25 @@ def test_compcor_no_regress_poly(self): realigned_file=self.realigned_file, mask_files=self.mask_files, mask_index=0, - pre_filter=False), [['0.4451946442', '-0.7683311482'], [ - '-0.4285129505', '-0.0926034137' - ], ['0.5721540256', '0.5608764842'], [ - '-0.5367548139', '0.0059943226' - ], ['-0.0520809054', '0.2940637551']]) + pre_filter=False, + ), + [ + ["0.4451946442", "-0.7683311482"], + ["-0.4285129505", "-0.0926034137"], + ["0.5721540256", "0.5608764842"], + ["-0.5367548139", "0.0059943226"], + ["-0.0520809054", "0.2940637551"], + ], + ) def test_tcompcor_asymmetric_dim(self): asymmetric_shape = (2, 3, 4, 5) asymmetric_data = utils.save_toy_nii( - np.zeros(asymmetric_shape), 'asymmetric.nii') + np.zeros(asymmetric_shape), "asymmetric.nii" + ) TCompCor(realigned_file=asymmetric_data).run() - assert nb.load( - 'mask_000.nii.gz').get_data().shape == asymmetric_shape[:3] + assert nb.load("mask_000.nii.gz").get_data().shape == asymmetric_shape[:3] def test_compcor_bad_input_shapes(self): # dim 0 is < dim 0 of self.mask_files (2) @@ -135,76 +162,81 @@ def test_compcor_bad_input_shapes(self): shape_more_than = (3, 3, 3, 5) for data_shape in (shape_less_than, shape_more_than): - data_file = utils.save_toy_nii(np.zeros(data_shape), 'temp.nii') - interface = CompCor( - realigned_file=data_file, mask_files=self.mask_files[0]) + data_file = utils.save_toy_nii(np.zeros(data_shape), "temp.nii") + interface = CompCor(realigned_file=data_file, mask_files=self.mask_files[0]) with pytest.raises(ValueError): interface.run() # Dimension mismatch def test_tcompcor_bad_input_dim(self): bad_dims = (2, 2, 2) - data_file = utils.save_toy_nii(np.zeros(bad_dims), 'temp.nii') + data_file = utils.save_toy_nii(np.zeros(bad_dims), "temp.nii") interface = TCompCor(realigned_file=data_file) with pytest.raises(ValueError): interface.run() # Not a 4D file def test_tcompcor_merge_intersect_masks(self): - for method in ['union', 'intersect']: + for method in ["union", "intersect"]: TCompCor( realigned_file=self.realigned_file, mask_files=self.mask_files, - merge_method=method).run() - if method == 'union': + merge_method=method, + ).run() + if method == "union": assert np.array_equal( - nb.load('mask_000.nii.gz').get_data(), - ([[[0, 0], [0, 0]], [[0, 0], [1, 0]]])) - if method == 'intersect': + nb.load("mask_000.nii.gz").get_data(), + ([[[0, 0], [0, 0]], [[0, 0], [1, 0]]]), + ) + if method == "intersect": assert np.array_equal( - nb.load('mask_000.nii.gz').get_data(), - ([[[0, 0], [0, 0]], [[0, 1], [0, 0]]])) + nb.load("mask_000.nii.gz").get_data(), + ([[[0, 0], [0, 0]], [[0, 1], [0, 0]]]), + ) def test_tcompcor_index_mask(self): TCompCor( - realigned_file=self.realigned_file, - mask_files=self.mask_files, - mask_index=1).run() + realigned_file=self.realigned_file, mask_files=self.mask_files, mask_index=1 + ).run() assert np.array_equal( - nb.load('mask_000.nii.gz').get_data(), - ([[[0, 0], [0, 0]], [[0, 1], [0, 0]]])) + nb.load("mask_000.nii.gz").get_data(), + ([[[0, 0], [0, 0]], [[0, 1], [0, 0]]]), + ) def test_tcompcor_multi_mask_no_index(self): interface = TCompCor( - realigned_file=self.realigned_file, mask_files=self.mask_files) + realigned_file=self.realigned_file, mask_files=self.mask_files + ) with pytest.raises(ValueError): interface.run() # more than one mask file - def run_cc(self, - ccinterface, - expected_components, - expected_header='CompCor', - expected_n_components=None, - expected_metadata=None): + def run_cc( + self, + ccinterface, + expected_components, + expected_header="CompCor", + expected_n_components=None, + expected_metadata=None, + ): # run ccresult = ccinterface.run() # assert - expected_file = ccinterface._list_outputs()['components_file'] + expected_file = ccinterface._list_outputs()["components_file"] assert ccresult.outputs.components_file == expected_file assert os.path.exists(expected_file) assert os.path.getsize(expected_file) > 0 - with open(ccresult.outputs.components_file, 'r') as components_file: + with open(ccresult.outputs.components_file, "r") as components_file: if expected_n_components is None: - expected_n_components = min(ccinterface.inputs.num_components, - self.fake_data.shape[3]) + expected_n_components = min( + ccinterface.inputs.num_components, self.fake_data.shape[3] + ) - components_data = [line.rstrip().split('\t') - for line in components_file] + components_data = [line.rstrip().split("\t") for line in components_file] # the first item will be '#', we can throw it out header = components_data.pop(0) expected_header = [ - expected_header + '{:02d}'.format(i) + expected_header + "{:02d}".format(i) for i in range(expected_n_components) ] for i, heading in enumerate(header): @@ -213,22 +245,22 @@ def run_cc(self, num_got_timepoints = len(components_data) assert num_got_timepoints == self.fake_data.shape[3] for index, timepoint in enumerate(components_data): - assert (len(timepoint) == expected_n_components) + assert len(timepoint) == expected_n_components assert timepoint[:2] == expected_components[index] if ccinterface.inputs.save_metadata: - expected_metadata_file = ( - ccinterface._list_outputs()['metadata_file']) + expected_metadata_file = ccinterface._list_outputs()["metadata_file"] assert ccresult.outputs.metadata_file == expected_metadata_file assert os.path.exists(expected_metadata_file) assert os.path.getsize(expected_metadata_file) > 0 - with open(ccresult.outputs.metadata_file, 'r') as metadata_file: - components_metadata = [line.rstrip().split('\t') - for line in metadata_file] - components_metadata = {i: j for i, j in - zip(components_metadata[0], - components_metadata[1])} + with open(ccresult.outputs.metadata_file, "r") as metadata_file: + components_metadata = [ + line.rstrip().split("\t") for line in metadata_file + ] + components_metadata = { + i: j for i, j in zip(components_metadata[0], components_metadata[1]) + } assert components_metadata == expected_metadata return ccresult @@ -237,7 +269,9 @@ def run_cc(self, def fake_noise_fun(i, j, l, m): return m * i + l - j - fake_data = np.array([[[[8, 5, 3, 8, 0], [6, 7, 4, 7, 1]], - [[7, 9, 1, 6, 5], [0, 7, 4, 7, 7]]], - [[[2, 4, 5, 7, 0], [1, 7, 0, 5, 4]], - [[7, 3, 9, 0, 4], [9, 4, 1, 5, 0]]]]) + fake_data = np.array( + [ + [[[8, 5, 3, 8, 0], [6, 7, 4, 7, 1]], [[7, 9, 1, 6, 5], [0, 7, 4, 7, 7]]], + [[[2, 4, 5, 7, 0], [1, 7, 0, 5, 4]], [[7, 3, 9, 0, 4], [9, 4, 1, 5, 0]]], + ] + ) diff --git a/nipype/algorithms/tests/test_ErrorMap.py b/nipype/algorithms/tests/test_ErrorMap.py index cfd30b0b74..4ae811008f 100644 --- a/nipype/algorithms/tests/test_ErrorMap.py +++ b/nipype/algorithms/tests/test_ErrorMap.py @@ -23,30 +23,30 @@ def test_errormap(tmpdir): img2 = nb.Nifti1Image(volume2, np.eye(4)) maskimg = nb.Nifti1Image(mask, np.eye(4)) - nb.save(img1, tmpdir.join('von.nii.gz').strpath) - nb.save(img2, tmpdir.join('alan.nii.gz').strpath) - nb.save(maskimg, tmpdir.join('mask.nii.gz').strpath) + nb.save(img1, tmpdir.join("von.nii.gz").strpath) + nb.save(img2, tmpdir.join("alan.nii.gz").strpath) + nb.save(maskimg, tmpdir.join("mask.nii.gz").strpath) # Default metric errmap = ErrorMap() - errmap.inputs.in_tst = tmpdir.join('von.nii.gz').strpath - errmap.inputs.in_ref = tmpdir.join('alan.nii.gz').strpath - errmap.out_map = tmpdir.join('out_map.nii.gz').strpath + errmap.inputs.in_tst = tmpdir.join("von.nii.gz").strpath + errmap.inputs.in_ref = tmpdir.join("alan.nii.gz").strpath + errmap.out_map = tmpdir.join("out_map.nii.gz").strpath result = errmap.run() assert result.outputs.distance == 1.125 # Square metric - errmap.inputs.metric = 'sqeuclidean' + errmap.inputs.metric = "sqeuclidean" result = errmap.run() assert result.outputs.distance == 1.125 # Linear metric - errmap.inputs.metric = 'euclidean' + errmap.inputs.metric = "euclidean" result = errmap.run() assert result.outputs.distance == 0.875 # Masked - errmap.inputs.mask = tmpdir.join('mask.nii.gz').strpath + errmap.inputs.mask = tmpdir.join("mask.nii.gz").strpath result = errmap.run() assert result.outputs.distance == 1.0 @@ -64,15 +64,15 @@ def test_errormap(tmpdir): msvolume2[:, :, :, 1] = volume1 msimg2 = nb.Nifti1Image(msvolume2, np.eye(4)) - nb.save(msimg1, tmpdir.join('von-ray.nii.gz').strpath) - nb.save(msimg2, tmpdir.join('alan-ray.nii.gz').strpath) + nb.save(msimg1, tmpdir.join("von-ray.nii.gz").strpath) + nb.save(msimg2, tmpdir.join("alan-ray.nii.gz").strpath) - errmap.inputs.in_tst = tmpdir.join('von-ray.nii.gz').strpath - errmap.inputs.in_ref = tmpdir.join('alan-ray.nii.gz').strpath - errmap.inputs.metric = 'sqeuclidean' + errmap.inputs.in_tst = tmpdir.join("von-ray.nii.gz").strpath + errmap.inputs.in_ref = tmpdir.join("alan-ray.nii.gz").strpath + errmap.inputs.metric = "sqeuclidean" result = errmap.run() assert result.outputs.distance == 5.5 - errmap.inputs.metric = 'euclidean' + errmap.inputs.metric = "euclidean" result = errmap.run() - assert result.outputs.distance == np.float32(1.25 * (2**0.5)) + assert result.outputs.distance == np.float32(1.25 * (2 ** 0.5)) diff --git a/nipype/algorithms/tests/test_Overlap.py b/nipype/algorithms/tests/test_Overlap.py index 786a7328b8..ea3b5a3f5d 100644 --- a/nipype/algorithms/tests/test_Overlap.py +++ b/nipype/algorithms/tests/test_Overlap.py @@ -5,7 +5,7 @@ import os -from nipype.testing import (example_data) +from nipype.testing import example_data import numpy as np @@ -15,10 +15,11 @@ def test_overlap(tmpdir): def check_close(val1, val2): import numpy.testing as npt + return npt.assert_almost_equal(val1, val2, decimal=3) - in1 = example_data('segmentation0.nii.gz') - in2 = example_data('segmentation1.nii.gz') + in1 = example_data("segmentation0.nii.gz") + in2 = example_data("segmentation1.nii.gz") tmpdir.chdir() overlap = Overlap() @@ -36,8 +37,7 @@ def check_close(val1, val2): overlap = Overlap() overlap.inputs.volume1 = in1 overlap.inputs.volume2 = in2 - overlap.inputs.vol_units = 'mm' + overlap.inputs.vol_units = "mm" res = overlap.run() check_close(res.outputs.jaccard, 0.99705) - check_close(res.outputs.roi_voldiff, np.array([0.0063086, -0.0025506, - 0.0])) + check_close(res.outputs.roi_voldiff, np.array([0.0063086, -0.0025506, 0.0])) diff --git a/nipype/algorithms/tests/test_TSNR.py b/nipype/algorithms/tests/test_TSNR.py index 1d192ec056..e00bf35e05 100644 --- a/nipype/algorithms/tests/test_TSNR.py +++ b/nipype/algorithms/tests/test_TSNR.py @@ -13,18 +13,18 @@ import os -class TestTSNR(): - ''' Note: Tests currently do a poor job of testing functionality ''' +class TestTSNR: + """ Note: Tests currently do a poor job of testing functionality """ in_filenames = { - 'in_file': 'tsnrinfile.nii', + "in_file": "tsnrinfile.nii", } out_filenames = { # default output file names - 'detrended_file': 'detrend.nii.gz', - 'mean_file': 'mean.nii.gz', - 'stddev_file': 'stdev.nii.gz', - 'tsnr_file': 'tsnr.nii.gz' + "detrended_file": "detrend.nii.gz", + "mean_file": "mean.nii.gz", + "stddev_file": "stdev.nii.gz", + "tsnr_file": "tsnr.nii.gz", } @pytest.fixture(autouse=True) @@ -32,78 +32,84 @@ def setup_class(self, tmpdir): # setup temp folder tmpdir.chdir() - utils.save_toy_nii(self.fake_data, self.in_filenames['in_file']) + utils.save_toy_nii(self.fake_data, self.in_filenames["in_file"]) def test_tsnr(self): # run - tsnrresult = TSNR(in_file=self.in_filenames['in_file']).run() + tsnrresult = TSNR(in_file=self.in_filenames["in_file"]).run() # assert self.assert_expected_outputs( - tsnrresult, { - 'mean_file': (2.8, 7.4), - 'stddev_file': (0.8, 2.9), - 'tsnr_file': (1.3, 9.25) - }) + tsnrresult, + { + "mean_file": (2.8, 7.4), + "stddev_file": (0.8, 2.9), + "tsnr_file": (1.3, 9.25), + }, + ) def test_tsnr_withpoly1(self): # run - tsnrresult = TSNR( - in_file=self.in_filenames['in_file'], regress_poly=1).run() + tsnrresult = TSNR(in_file=self.in_filenames["in_file"], regress_poly=1).run() # assert self.assert_expected_outputs_poly( - tsnrresult, { - 'detrended_file': (-0.1, 8.7), - 'mean_file': (2.8, 7.4), - 'stddev_file': (0.75, 2.75), - 'tsnr_file': (1.4, 9.9) - }) + tsnrresult, + { + "detrended_file": (-0.1, 8.7), + "mean_file": (2.8, 7.4), + "stddev_file": (0.75, 2.75), + "tsnr_file": (1.4, 9.9), + }, + ) def test_tsnr_withpoly2(self): # run - tsnrresult = TSNR( - in_file=self.in_filenames['in_file'], regress_poly=2).run() + tsnrresult = TSNR(in_file=self.in_filenames["in_file"], regress_poly=2).run() # assert self.assert_expected_outputs_poly( - tsnrresult, { - 'detrended_file': (-0.22, 8.55), - 'mean_file': (2.8, 7.7), - 'stddev_file': (0.21, 2.4), - 'tsnr_file': (1.7, 35.9) - }) + tsnrresult, + { + "detrended_file": (-0.22, 8.55), + "mean_file": (2.8, 7.7), + "stddev_file": (0.21, 2.4), + "tsnr_file": (1.7, 35.9), + }, + ) def test_tsnr_withpoly3(self): # run - tsnrresult = TSNR( - in_file=self.in_filenames['in_file'], regress_poly=3).run() + tsnrresult = TSNR(in_file=self.in_filenames["in_file"], regress_poly=3).run() # assert self.assert_expected_outputs_poly( - tsnrresult, { - 'detrended_file': (1.8, 7.95), - 'mean_file': (2.8, 7.7), - 'stddev_file': (0.1, 1.7), - 'tsnr_file': (2.6, 57.3) - }) - - @mock.patch('warnings.warn') + tsnrresult, + { + "detrended_file": (1.8, 7.95), + "mean_file": (2.8, 7.7), + "stddev_file": (0.1, 1.7), + "tsnr_file": (2.6, 57.3), + }, + ) + + @mock.patch("warnings.warn") def test_warning(self, mock_warn): - ''' test that usage of misc.TSNR trips a warning to use - confounds.TSNR instead ''' + """ test that usage of misc.TSNR trips a warning to use + confounds.TSNR instead """ # run - misc.TSNR(in_file=self.in_filenames['in_file']) + misc.TSNR(in_file=self.in_filenames["in_file"]) # assert assert True in [ - args[0].count('confounds') > 0 - for _, args, _ in mock_warn.mock_calls + args[0].count("confounds") > 0 for _, args, _ in mock_warn.mock_calls ] def assert_expected_outputs_poly(self, tsnrresult, expected_ranges): - assert os.path.basename(tsnrresult.outputs.detrended_file) == \ - self.out_filenames['detrended_file'] + assert ( + os.path.basename(tsnrresult.outputs.detrended_file) + == self.out_filenames["detrended_file"] + ) self.assert_expected_outputs(tsnrresult, expected_ranges) def assert_expected_outputs(self, tsnrresult, expected_ranges): @@ -111,12 +117,11 @@ def assert_expected_outputs(self, tsnrresult, expected_ranges): self.assert_unchanged(expected_ranges) def assert_default_outputs(self, outputs): - assert os.path.basename(outputs.mean_file) == \ - self.out_filenames['mean_file'] - assert os.path.basename(outputs.stddev_file) == \ - self.out_filenames['stddev_file'] - assert os.path.basename(outputs.tsnr_file) == \ - self.out_filenames['tsnr_file'] + assert os.path.basename(outputs.mean_file) == self.out_filenames["mean_file"] + assert ( + os.path.basename(outputs.stddev_file) == self.out_filenames["stddev_file"] + ) + assert os.path.basename(outputs.tsnr_file) == self.out_filenames["tsnr_file"] def assert_unchanged(self, expected_ranges): for key, (min_, max_) in expected_ranges.items(): @@ -124,7 +129,9 @@ def assert_unchanged(self, expected_ranges): npt.assert_almost_equal(np.amin(data), min_, decimal=1) npt.assert_almost_equal(np.amax(data), max_, decimal=1) - fake_data = np.array([[[[2, 4, 3, 9, 1], [3, 6, 4, 7, 4]], - [[8, 3, 4, 6, 2], [4, 0, 4, 4, 2]]], - [[[9, 7, 5, 5, 7], [7, 8, 4, 8, 4]], - [[0, 4, 7, 1, 7], [6, 8, 8, 8, 7]]]]) + fake_data = np.array( + [ + [[[2, 4, 3, 9, 1], [3, 6, 4, 7, 4]], [[8, 3, 4, 6, 2], [4, 0, 4, 4, 2]]], + [[[9, 7, 5, 5, 7], [7, 8, 4, 8, 4]], [[0, 4, 7, 1, 7], [6, 8, 8, 8, 7]]], + ] + ) diff --git a/nipype/algorithms/tests/test_auto_ACompCor.py b/nipype/algorithms/tests/test_auto_ACompCor.py index 598bbd7c5a..e2788e97d5 100644 --- a/nipype/algorithms/tests/test_auto_ACompCor.py +++ b/nipype/algorithms/tests/test_auto_ACompCor.py @@ -4,47 +4,37 @@ def test_ACompCor_inputs(): input_map = dict( - components_file=dict(usedefault=True, ), - failure_mode=dict(usedefault=True, ), + components_file=dict(usedefault=True,), + failure_mode=dict(usedefault=True,), header_prefix=dict(), - high_pass_cutoff=dict(usedefault=True, ), - ignore_initial_volumes=dict(usedefault=True, ), + high_pass_cutoff=dict(usedefault=True,), + ignore_initial_volumes=dict(usedefault=True,), mask_files=dict(), - mask_index=dict( - requires=['mask_files'], - xor=['merge_method'], - ), + mask_index=dict(requires=["mask_files"], xor=["merge_method"],), mask_names=dict(), - merge_method=dict( - requires=['mask_files'], - xor=['mask_index'], - ), - num_components=dict(xor=['variance_threshold'], ), - pre_filter=dict(usedefault=True, ), - realigned_file=dict( - extensions=None, - mandatory=True, - ), - regress_poly_degree=dict(usedefault=True, ), + merge_method=dict(requires=["mask_files"], xor=["mask_index"],), + num_components=dict(xor=["variance_threshold"],), + pre_filter=dict(usedefault=True,), + realigned_file=dict(extensions=None, mandatory=True,), + regress_poly_degree=dict(usedefault=True,), repetition_time=dict(), - save_metadata=dict(usedefault=True, ), - save_pre_filter=dict(usedefault=True, ), - use_regress_poly=dict( - deprecated='0.15.0', - new_name='pre_filter', - ), - variance_threshold=dict(xor=['num_components'], ), + save_metadata=dict(usedefault=True,), + save_pre_filter=dict(usedefault=True,), + use_regress_poly=dict(deprecated="0.15.0", new_name="pre_filter",), + variance_threshold=dict(xor=["num_components"],), ) inputs = ACompCor.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ACompCor_outputs(): output_map = dict( - components_file=dict(extensions=None, ), - metadata_file=dict(extensions=None, ), - pre_filter_file=dict(extensions=None, ), + components_file=dict(extensions=None,), + metadata_file=dict(extensions=None,), + pre_filter_file=dict(extensions=None,), ) outputs = ACompCor.output_spec() diff --git a/nipype/algorithms/tests/test_auto_ActivationCount.py b/nipype/algorithms/tests/test_auto_ActivationCount.py index f3b8d77f23..6fc50301ac 100644 --- a/nipype/algorithms/tests/test_auto_ActivationCount.py +++ b/nipype/algorithms/tests/test_auto_ActivationCount.py @@ -3,20 +3,19 @@ def test_ActivationCount_inputs(): - input_map = dict( - in_files=dict(mandatory=True, ), - threshold=dict(mandatory=True, ), - ) + input_map = dict(in_files=dict(mandatory=True,), threshold=dict(mandatory=True,),) inputs = ActivationCount.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ActivationCount_outputs(): output_map = dict( - acm_neg=dict(extensions=None, ), - acm_pos=dict(extensions=None, ), - out_file=dict(extensions=None, ), + acm_neg=dict(extensions=None,), + acm_pos=dict(extensions=None,), + out_file=dict(extensions=None,), ) outputs = ActivationCount.output_spec() diff --git a/nipype/algorithms/tests/test_auto_AddCSVColumn.py b/nipype/algorithms/tests/test_auto_AddCSVColumn.py index b761ad25f5..a2d82b6eec 100644 --- a/nipype/algorithms/tests/test_auto_AddCSVColumn.py +++ b/nipype/algorithms/tests/test_auto_AddCSVColumn.py @@ -6,22 +6,18 @@ def test_AddCSVColumn_inputs(): input_map = dict( extra_column_heading=dict(), extra_field=dict(), - in_file=dict( - extensions=None, - mandatory=True, - ), - out_file=dict( - extensions=None, - usedefault=True, - ), + in_file=dict(extensions=None, mandatory=True,), + out_file=dict(extensions=None, usedefault=True,), ) inputs = AddCSVColumn.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AddCSVColumn_outputs(): - output_map = dict(csv_file=dict(extensions=None, ), ) + output_map = dict(csv_file=dict(extensions=None,),) outputs = AddCSVColumn.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_AddCSVRow.py b/nipype/algorithms/tests/test_auto_AddCSVRow.py index 13d3da48b9..39d6d40abb 100644 --- a/nipype/algorithms/tests/test_auto_AddCSVRow.py +++ b/nipype/algorithms/tests/test_auto_AddCSVRow.py @@ -4,19 +4,17 @@ def test_AddCSVRow_inputs(): input_map = dict( - _outputs=dict(usedefault=True, ), - in_file=dict( - extensions=None, - mandatory=True, - ), + _outputs=dict(usedefault=True,), in_file=dict(extensions=None, mandatory=True,), ) inputs = AddCSVRow.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AddCSVRow_outputs(): - output_map = dict(csv_file=dict(extensions=None, ), ) + output_map = dict(csv_file=dict(extensions=None,),) outputs = AddCSVRow.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_AddNoise.py b/nipype/algorithms/tests/test_auto_AddNoise.py index e98a761ca7..ad1e8734e8 100644 --- a/nipype/algorithms/tests/test_auto_AddNoise.py +++ b/nipype/algorithms/tests/test_auto_AddNoise.py @@ -4,29 +4,22 @@ def test_AddNoise_inputs(): input_map = dict( - bg_dist=dict( - mandatory=True, - usedefault=True, - ), - dist=dict( - mandatory=True, - usedefault=True, - ), - in_file=dict( - extensions=None, - mandatory=True, - ), - in_mask=dict(extensions=None, ), - out_file=dict(extensions=None, ), - snr=dict(usedefault=True, ), + bg_dist=dict(mandatory=True, usedefault=True,), + dist=dict(mandatory=True, usedefault=True,), + in_file=dict(extensions=None, mandatory=True,), + in_mask=dict(extensions=None,), + out_file=dict(extensions=None,), + snr=dict(usedefault=True,), ) inputs = AddNoise.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AddNoise_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = AddNoise.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_ArtifactDetect.py b/nipype/algorithms/tests/test_auto_ArtifactDetect.py index 6d49a4bd27..9340982472 100644 --- a/nipype/algorithms/tests/test_auto_ArtifactDetect.py +++ b/nipype/algorithms/tests/test_auto_ArtifactDetect.py @@ -4,45 +4,33 @@ def test_ArtifactDetect_inputs(): input_map = dict( - bound_by_brainmask=dict(usedefault=True, ), - global_threshold=dict(usedefault=True, ), - intersect_mask=dict(usedefault=True, ), - mask_file=dict(extensions=None, ), + bound_by_brainmask=dict(usedefault=True,), + global_threshold=dict(usedefault=True,), + intersect_mask=dict(usedefault=True,), + mask_file=dict(extensions=None,), mask_threshold=dict(), - mask_type=dict(mandatory=True, ), + mask_type=dict(mandatory=True,), norm_threshold=dict( - mandatory=True, - xor=['rotation_threshold', 'translation_threshold'], + mandatory=True, xor=["rotation_threshold", "translation_threshold"], ), - parameter_source=dict(mandatory=True, ), - plot_type=dict(usedefault=True, ), - realigned_files=dict(mandatory=True, ), - realignment_parameters=dict(mandatory=True, ), - rotation_threshold=dict( - mandatory=True, - xor=['norm_threshold'], - ), - save_plot=dict(usedefault=True, ), - translation_threshold=dict( - mandatory=True, - xor=['norm_threshold'], - ), - use_differences=dict( - maxlen=2, - minlen=2, - usedefault=True, - ), - use_norm=dict( - requires=['norm_threshold'], - usedefault=True, - ), - zintensity_threshold=dict(mandatory=True, ), + parameter_source=dict(mandatory=True,), + plot_type=dict(usedefault=True,), + realigned_files=dict(mandatory=True,), + realignment_parameters=dict(mandatory=True,), + rotation_threshold=dict(mandatory=True, xor=["norm_threshold"],), + save_plot=dict(usedefault=True,), + translation_threshold=dict(mandatory=True, xor=["norm_threshold"],), + use_differences=dict(maxlen=2, minlen=2, usedefault=True,), + use_norm=dict(requires=["norm_threshold"], usedefault=True,), + zintensity_threshold=dict(mandatory=True,), ) inputs = ArtifactDetect.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ArtifactDetect_outputs(): output_map = dict( displacement_files=dict(), diff --git a/nipype/algorithms/tests/test_auto_CalculateMedian.py b/nipype/algorithms/tests/test_auto_CalculateMedian.py index 1da36b096f..ff8d9edd7a 100644 --- a/nipype/algorithms/tests/test_auto_CalculateMedian.py +++ b/nipype/algorithms/tests/test_auto_CalculateMedian.py @@ -4,17 +4,17 @@ def test_CalculateMedian_inputs(): input_map = dict( - in_files=dict(), - median_file=dict(), - median_per_file=dict(usedefault=True, ), + in_files=dict(), median_file=dict(), median_per_file=dict(usedefault=True,), ) inputs = CalculateMedian.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CalculateMedian_outputs(): - output_map = dict(median_files=dict(), ) + output_map = dict(median_files=dict(),) outputs = CalculateMedian.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_CalculateNormalizedMoments.py b/nipype/algorithms/tests/test_auto_CalculateNormalizedMoments.py index 06ad3c3508..102ec2c205 100644 --- a/nipype/algorithms/tests/test_auto_CalculateNormalizedMoments.py +++ b/nipype/algorithms/tests/test_auto_CalculateNormalizedMoments.py @@ -4,19 +4,18 @@ def test_CalculateNormalizedMoments_inputs(): input_map = dict( - moment=dict(mandatory=True, ), - timeseries_file=dict( - extensions=None, - mandatory=True, - ), + moment=dict(mandatory=True,), + timeseries_file=dict(extensions=None, mandatory=True,), ) inputs = CalculateNormalizedMoments.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CalculateNormalizedMoments_outputs(): - output_map = dict(moments=dict(), ) + output_map = dict(moments=dict(),) outputs = CalculateNormalizedMoments.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_ComputeDVARS.py b/nipype/algorithms/tests/test_auto_ComputeDVARS.py index 3c68d0bd98..ed51de0b0a 100644 --- a/nipype/algorithms/tests/test_auto_ComputeDVARS.py +++ b/nipype/algorithms/tests/test_auto_ComputeDVARS.py @@ -4,24 +4,18 @@ def test_ComputeDVARS_inputs(): input_map = dict( - figdpi=dict(usedefault=True, ), - figformat=dict(usedefault=True, ), - figsize=dict(usedefault=True, ), - in_file=dict( - extensions=None, - mandatory=True, - ), - in_mask=dict( - extensions=None, - mandatory=True, - ), - intensity_normalization=dict(usedefault=True, ), - remove_zerovariance=dict(usedefault=True, ), - save_all=dict(usedefault=True, ), - save_nstd=dict(usedefault=True, ), - save_plot=dict(usedefault=True, ), - save_std=dict(usedefault=True, ), - save_vxstd=dict(usedefault=True, ), + figdpi=dict(usedefault=True,), + figformat=dict(usedefault=True,), + figsize=dict(usedefault=True,), + in_file=dict(extensions=None, mandatory=True,), + in_mask=dict(extensions=None, mandatory=True,), + intensity_normalization=dict(usedefault=True,), + remove_zerovariance=dict(usedefault=True,), + save_all=dict(usedefault=True,), + save_nstd=dict(usedefault=True,), + save_plot=dict(usedefault=True,), + save_std=dict(usedefault=True,), + save_vxstd=dict(usedefault=True,), series_tr=dict(), ) inputs = ComputeDVARS.input_spec() @@ -29,18 +23,20 @@ def test_ComputeDVARS_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComputeDVARS_outputs(): output_map = dict( avg_nstd=dict(), avg_std=dict(), avg_vxstd=dict(), - fig_nstd=dict(extensions=None, ), - fig_std=dict(extensions=None, ), - fig_vxstd=dict(extensions=None, ), - out_all=dict(extensions=None, ), - out_nstd=dict(extensions=None, ), - out_std=dict(extensions=None, ), - out_vxstd=dict(extensions=None, ), + fig_nstd=dict(extensions=None,), + fig_std=dict(extensions=None,), + fig_vxstd=dict(extensions=None,), + out_all=dict(extensions=None,), + out_nstd=dict(extensions=None,), + out_std=dict(extensions=None,), + out_vxstd=dict(extensions=None,), ) outputs = ComputeDVARS.output_spec() diff --git a/nipype/algorithms/tests/test_auto_ComputeMeshWarp.py b/nipype/algorithms/tests/test_auto_ComputeMeshWarp.py index cf76cd7467..871564b817 100644 --- a/nipype/algorithms/tests/test_auto_ComputeMeshWarp.py +++ b/nipype/algorithms/tests/test_auto_ComputeMeshWarp.py @@ -4,35 +4,25 @@ def test_ComputeMeshWarp_inputs(): input_map = dict( - metric=dict(usedefault=True, ), - out_file=dict( - extensions=None, - usedefault=True, - ), - out_warp=dict( - extensions=None, - usedefault=True, - ), - surface1=dict( - extensions=None, - mandatory=True, - ), - surface2=dict( - extensions=None, - mandatory=True, - ), - weighting=dict(usedefault=True, ), + metric=dict(usedefault=True,), + out_file=dict(extensions=None, usedefault=True,), + out_warp=dict(extensions=None, usedefault=True,), + surface1=dict(extensions=None, mandatory=True,), + surface2=dict(extensions=None, mandatory=True,), + weighting=dict(usedefault=True,), ) inputs = ComputeMeshWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComputeMeshWarp_outputs(): output_map = dict( distance=dict(), - out_file=dict(extensions=None, ), - out_warp=dict(extensions=None, ), + out_file=dict(extensions=None,), + out_warp=dict(extensions=None,), ) outputs = ComputeMeshWarp.output_spec() diff --git a/nipype/algorithms/tests/test_auto_CreateNifti.py b/nipype/algorithms/tests/test_auto_CreateNifti.py index af9853227c..d4989386b4 100644 --- a/nipype/algorithms/tests/test_auto_CreateNifti.py +++ b/nipype/algorithms/tests/test_auto_CreateNifti.py @@ -5,22 +5,18 @@ def test_CreateNifti_inputs(): input_map = dict( affine=dict(), - data_file=dict( - extensions=None, - mandatory=True, - ), - header_file=dict( - extensions=None, - mandatory=True, - ), + data_file=dict(extensions=None, mandatory=True,), + header_file=dict(extensions=None, mandatory=True,), ) inputs = CreateNifti.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CreateNifti_outputs(): - output_map = dict(nifti_file=dict(extensions=None, ), ) + output_map = dict(nifti_file=dict(extensions=None,),) outputs = CreateNifti.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_Distance.py b/nipype/algorithms/tests/test_auto_Distance.py index 254a6b23d2..e334e3a0f1 100644 --- a/nipype/algorithms/tests/test_auto_Distance.py +++ b/nipype/algorithms/tests/test_auto_Distance.py @@ -4,28 +4,21 @@ def test_Distance_inputs(): input_map = dict( - mask_volume=dict(extensions=None, ), - method=dict(usedefault=True, ), - volume1=dict( - extensions=None, - mandatory=True, - ), - volume2=dict( - extensions=None, - mandatory=True, - ), + mask_volume=dict(extensions=None,), + method=dict(usedefault=True,), + volume1=dict(extensions=None, mandatory=True,), + volume2=dict(extensions=None, mandatory=True,), ) inputs = Distance.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Distance_outputs(): output_map = dict( - distance=dict(), - histogram=dict(extensions=None, ), - point1=dict(), - point2=dict(), + distance=dict(), histogram=dict(extensions=None,), point1=dict(), point2=dict(), ) outputs = Distance.output_spec() diff --git a/nipype/algorithms/tests/test_auto_FramewiseDisplacement.py b/nipype/algorithms/tests/test_auto_FramewiseDisplacement.py index e529b891e7..1bc46fba64 100644 --- a/nipype/algorithms/tests/test_auto_FramewiseDisplacement.py +++ b/nipype/algorithms/tests/test_auto_FramewiseDisplacement.py @@ -4,24 +4,15 @@ def test_FramewiseDisplacement_inputs(): input_map = dict( - figdpi=dict(usedefault=True, ), - figsize=dict(usedefault=True, ), - in_file=dict( - extensions=None, - mandatory=True, - ), - normalize=dict(usedefault=True, ), - out_figure=dict( - extensions=None, - usedefault=True, - ), - out_file=dict( - extensions=None, - usedefault=True, - ), - parameter_source=dict(mandatory=True, ), - radius=dict(usedefault=True, ), - save_plot=dict(usedefault=True, ), + figdpi=dict(usedefault=True,), + figsize=dict(usedefault=True,), + in_file=dict(extensions=None, mandatory=True,), + normalize=dict(usedefault=True,), + out_figure=dict(extensions=None, usedefault=True,), + out_file=dict(extensions=None, usedefault=True,), + parameter_source=dict(mandatory=True,), + radius=dict(usedefault=True,), + save_plot=dict(usedefault=True,), series_tr=dict(), ) inputs = FramewiseDisplacement.input_spec() @@ -29,11 +20,13 @@ def test_FramewiseDisplacement_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FramewiseDisplacement_outputs(): output_map = dict( fd_average=dict(), - out_figure=dict(extensions=None, ), - out_file=dict(extensions=None, ), + out_figure=dict(extensions=None,), + out_file=dict(extensions=None,), ) outputs = FramewiseDisplacement.output_spec() diff --git a/nipype/algorithms/tests/test_auto_FuzzyOverlap.py b/nipype/algorithms/tests/test_auto_FuzzyOverlap.py index 80513bd850..877f864bee 100644 --- a/nipype/algorithms/tests/test_auto_FuzzyOverlap.py +++ b/nipype/algorithms/tests/test_auto_FuzzyOverlap.py @@ -4,27 +4,21 @@ def test_FuzzyOverlap_inputs(): input_map = dict( - in_mask=dict(extensions=None, ), - in_ref=dict(mandatory=True, ), - in_tst=dict(mandatory=True, ), - out_file=dict( - extensions=None, - usedefault=True, - ), - weighting=dict(usedefault=True, ), + in_mask=dict(extensions=None,), + in_ref=dict(mandatory=True,), + in_tst=dict(mandatory=True,), + out_file=dict(extensions=None, usedefault=True,), + weighting=dict(usedefault=True,), ) inputs = FuzzyOverlap.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FuzzyOverlap_outputs(): - output_map = dict( - class_fdi=dict(), - class_fji=dict(), - dice=dict(), - jaccard=dict(), - ) + output_map = dict(class_fdi=dict(), class_fji=dict(), dice=dict(), jaccard=dict(),) outputs = FuzzyOverlap.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_Gunzip.py b/nipype/algorithms/tests/test_auto_Gunzip.py index 765a1a3f68..40a1f44531 100644 --- a/nipype/algorithms/tests/test_auto_Gunzip.py +++ b/nipype/algorithms/tests/test_auto_Gunzip.py @@ -3,17 +3,16 @@ def test_Gunzip_inputs(): - input_map = dict(in_file=dict( - extensions=None, - mandatory=True, - ), ) + input_map = dict(in_file=dict(extensions=None, mandatory=True,),) inputs = Gunzip.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Gunzip_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Gunzip.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_ICC.py b/nipype/algorithms/tests/test_auto_ICC.py index 2056ad000e..9797fdb4af 100644 --- a/nipype/algorithms/tests/test_auto_ICC.py +++ b/nipype/algorithms/tests/test_auto_ICC.py @@ -4,22 +4,21 @@ def test_ICC_inputs(): input_map = dict( - mask=dict( - extensions=None, - mandatory=True, - ), - subjects_sessions=dict(mandatory=True, ), + mask=dict(extensions=None, mandatory=True,), + subjects_sessions=dict(mandatory=True,), ) inputs = ICC.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ICC_outputs(): output_map = dict( - icc_map=dict(extensions=None, ), - session_var_map=dict(extensions=None, ), - subject_var_map=dict(extensions=None, ), + icc_map=dict(extensions=None,), + session_var_map=dict(extensions=None,), + subject_var_map=dict(extensions=None,), ) outputs = ICC.output_spec() diff --git a/nipype/algorithms/tests/test_auto_Matlab2CSV.py b/nipype/algorithms/tests/test_auto_Matlab2CSV.py index b783f317f7..665dbc3fed 100644 --- a/nipype/algorithms/tests/test_auto_Matlab2CSV.py +++ b/nipype/algorithms/tests/test_auto_Matlab2CSV.py @@ -4,19 +4,18 @@ def test_Matlab2CSV_inputs(): input_map = dict( - in_file=dict( - extensions=None, - mandatory=True, - ), - reshape_matrix=dict(usedefault=True, ), + in_file=dict(extensions=None, mandatory=True,), + reshape_matrix=dict(usedefault=True,), ) inputs = Matlab2CSV.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Matlab2CSV_outputs(): - output_map = dict(csv_files=dict(), ) + output_map = dict(csv_files=dict(),) outputs = Matlab2CSV.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_MergeCSVFiles.py b/nipype/algorithms/tests/test_auto_MergeCSVFiles.py index aeacb0eaf5..f4f7bc54a0 100644 --- a/nipype/algorithms/tests/test_auto_MergeCSVFiles.py +++ b/nipype/algorithms/tests/test_auto_MergeCSVFiles.py @@ -7,12 +7,9 @@ def test_MergeCSVFiles_inputs(): column_headings=dict(), extra_column_heading=dict(), extra_field=dict(), - in_files=dict(mandatory=True, ), - out_file=dict( - extensions=None, - usedefault=True, - ), - row_heading_title=dict(usedefault=True, ), + in_files=dict(mandatory=True,), + out_file=dict(extensions=None, usedefault=True,), + row_heading_title=dict(usedefault=True,), row_headings=dict(), ) inputs = MergeCSVFiles.input_spec() @@ -20,8 +17,10 @@ def test_MergeCSVFiles_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MergeCSVFiles_outputs(): - output_map = dict(csv_file=dict(extensions=None, ), ) + output_map = dict(csv_file=dict(extensions=None,),) outputs = MergeCSVFiles.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_MergeROIs.py b/nipype/algorithms/tests/test_auto_MergeROIs.py index a588955342..7f56b9d08c 100644 --- a/nipype/algorithms/tests/test_auto_MergeROIs.py +++ b/nipype/algorithms/tests/test_auto_MergeROIs.py @@ -4,17 +4,17 @@ def test_MergeROIs_inputs(): input_map = dict( - in_files=dict(), - in_index=dict(), - in_reference=dict(extensions=None, ), + in_files=dict(), in_index=dict(), in_reference=dict(extensions=None,), ) inputs = MergeROIs.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MergeROIs_outputs(): - output_map = dict(merged_file=dict(extensions=None, ), ) + output_map = dict(merged_file=dict(extensions=None,),) outputs = MergeROIs.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_MeshWarpMaths.py b/nipype/algorithms/tests/test_auto_MeshWarpMaths.py index 887f5a54e7..be0de9e541 100644 --- a/nipype/algorithms/tests/test_auto_MeshWarpMaths.py +++ b/nipype/algorithms/tests/test_auto_MeshWarpMaths.py @@ -5,34 +5,21 @@ def test_MeshWarpMaths_inputs(): input_map = dict( float_trait=dict(), - in_surf=dict( - extensions=None, - mandatory=True, - ), - operation=dict(usedefault=True, ), - operator=dict( - mandatory=True, - usedefault=True, - ), - out_file=dict( - extensions=None, - usedefault=True, - ), - out_warp=dict( - extensions=None, - usedefault=True, - ), + in_surf=dict(extensions=None, mandatory=True,), + operation=dict(usedefault=True,), + operator=dict(mandatory=True, usedefault=True,), + out_file=dict(extensions=None, usedefault=True,), + out_warp=dict(extensions=None, usedefault=True,), ) inputs = MeshWarpMaths.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MeshWarpMaths_outputs(): - output_map = dict( - out_file=dict(extensions=None, ), - out_warp=dict(extensions=None, ), - ) + output_map = dict(out_file=dict(extensions=None,), out_warp=dict(extensions=None,),) outputs = MeshWarpMaths.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_ModifyAffine.py b/nipype/algorithms/tests/test_auto_ModifyAffine.py index d524a3cf58..a4a441e662 100644 --- a/nipype/algorithms/tests/test_auto_ModifyAffine.py +++ b/nipype/algorithms/tests/test_auto_ModifyAffine.py @@ -4,16 +4,17 @@ def test_ModifyAffine_inputs(): input_map = dict( - transformation_matrix=dict(usedefault=True, ), - volumes=dict(mandatory=True, ), + transformation_matrix=dict(usedefault=True,), volumes=dict(mandatory=True,), ) inputs = ModifyAffine.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ModifyAffine_outputs(): - output_map = dict(transformed_volumes=dict(), ) + output_map = dict(transformed_volumes=dict(),) outputs = ModifyAffine.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_NonSteadyStateDetector.py b/nipype/algorithms/tests/test_auto_NonSteadyStateDetector.py index 453f7c17f0..5d42bcf0e7 100644 --- a/nipype/algorithms/tests/test_auto_NonSteadyStateDetector.py +++ b/nipype/algorithms/tests/test_auto_NonSteadyStateDetector.py @@ -3,17 +3,16 @@ def test_NonSteadyStateDetector_inputs(): - input_map = dict(in_file=dict( - extensions=None, - mandatory=True, - ), ) + input_map = dict(in_file=dict(extensions=None, mandatory=True,),) inputs = NonSteadyStateDetector.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NonSteadyStateDetector_outputs(): - output_map = dict(n_volumes_to_discard=dict(), ) + output_map = dict(n_volumes_to_discard=dict(),) outputs = NonSteadyStateDetector.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_NormalizeProbabilityMapSet.py b/nipype/algorithms/tests/test_auto_NormalizeProbabilityMapSet.py index da95a8b2a2..41b8cc030d 100644 --- a/nipype/algorithms/tests/test_auto_NormalizeProbabilityMapSet.py +++ b/nipype/algorithms/tests/test_auto_NormalizeProbabilityMapSet.py @@ -3,17 +3,16 @@ def test_NormalizeProbabilityMapSet_inputs(): - input_map = dict( - in_files=dict(), - in_mask=dict(extensions=None, ), - ) + input_map = dict(in_files=dict(), in_mask=dict(extensions=None,),) inputs = NormalizeProbabilityMapSet.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NormalizeProbabilityMapSet_outputs(): - output_map = dict(out_files=dict(), ) + output_map = dict(out_files=dict(),) outputs = NormalizeProbabilityMapSet.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_P2PDistance.py b/nipype/algorithms/tests/test_auto_P2PDistance.py index a10b6e4b58..0c11648576 100644 --- a/nipype/algorithms/tests/test_auto_P2PDistance.py +++ b/nipype/algorithms/tests/test_auto_P2PDistance.py @@ -4,35 +4,25 @@ def test_P2PDistance_inputs(): input_map = dict( - metric=dict(usedefault=True, ), - out_file=dict( - extensions=None, - usedefault=True, - ), - out_warp=dict( - extensions=None, - usedefault=True, - ), - surface1=dict( - extensions=None, - mandatory=True, - ), - surface2=dict( - extensions=None, - mandatory=True, - ), - weighting=dict(usedefault=True, ), + metric=dict(usedefault=True,), + out_file=dict(extensions=None, usedefault=True,), + out_warp=dict(extensions=None, usedefault=True,), + surface1=dict(extensions=None, mandatory=True,), + surface2=dict(extensions=None, mandatory=True,), + weighting=dict(usedefault=True,), ) inputs = P2PDistance.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_P2PDistance_outputs(): output_map = dict( distance=dict(), - out_file=dict(extensions=None, ), - out_warp=dict(extensions=None, ), + out_file=dict(extensions=None,), + out_warp=dict(extensions=None,), ) outputs = P2PDistance.output_spec() diff --git a/nipype/algorithms/tests/test_auto_PickAtlas.py b/nipype/algorithms/tests/test_auto_PickAtlas.py index 7fbe81307e..71a76aba5c 100644 --- a/nipype/algorithms/tests/test_auto_PickAtlas.py +++ b/nipype/algorithms/tests/test_auto_PickAtlas.py @@ -4,22 +4,21 @@ def test_PickAtlas_inputs(): input_map = dict( - atlas=dict( - extensions=None, - mandatory=True, - ), - dilation_size=dict(usedefault=True, ), - hemi=dict(usedefault=True, ), - labels=dict(mandatory=True, ), - output_file=dict(extensions=None, ), + atlas=dict(extensions=None, mandatory=True,), + dilation_size=dict(usedefault=True,), + hemi=dict(usedefault=True,), + labels=dict(mandatory=True,), + output_file=dict(extensions=None,), ) inputs = PickAtlas.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PickAtlas_outputs(): - output_map = dict(mask_file=dict(extensions=None, ), ) + output_map = dict(mask_file=dict(extensions=None,),) outputs = PickAtlas.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_Similarity.py b/nipype/algorithms/tests/test_auto_Similarity.py index f138e36295..3a851d0d30 100644 --- a/nipype/algorithms/tests/test_auto_Similarity.py +++ b/nipype/algorithms/tests/test_auto_Similarity.py @@ -4,25 +4,21 @@ def test_Similarity_inputs(): input_map = dict( - mask1=dict(extensions=None, ), - mask2=dict(extensions=None, ), - metric=dict(usedefault=True, ), - volume1=dict( - extensions=None, - mandatory=True, - ), - volume2=dict( - extensions=None, - mandatory=True, - ), + mask1=dict(extensions=None,), + mask2=dict(extensions=None,), + metric=dict(usedefault=True,), + volume1=dict(extensions=None, mandatory=True,), + volume2=dict(extensions=None, mandatory=True,), ) inputs = Similarity.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Similarity_outputs(): - output_map = dict(similarity=dict(), ) + output_map = dict(similarity=dict(),) outputs = Similarity.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_SimpleThreshold.py b/nipype/algorithms/tests/test_auto_SimpleThreshold.py index 2c66dc9f76..7a1c531c3d 100644 --- a/nipype/algorithms/tests/test_auto_SimpleThreshold.py +++ b/nipype/algorithms/tests/test_auto_SimpleThreshold.py @@ -3,17 +3,16 @@ def test_SimpleThreshold_inputs(): - input_map = dict( - threshold=dict(mandatory=True, ), - volumes=dict(mandatory=True, ), - ) + input_map = dict(threshold=dict(mandatory=True,), volumes=dict(mandatory=True,),) inputs = SimpleThreshold.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SimpleThreshold_outputs(): - output_map = dict(thresholded_volumes=dict(), ) + output_map = dict(thresholded_volumes=dict(),) outputs = SimpleThreshold.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_SpecifyModel.py b/nipype/algorithms/tests/test_auto_SpecifyModel.py index 4c856de1bb..fd583f42bc 100644 --- a/nipype/algorithms/tests/test_auto_SpecifyModel.py +++ b/nipype/algorithms/tests/test_auto_SpecifyModel.py @@ -5,37 +5,33 @@ def test_SpecifyModel_inputs(): input_map = dict( bids_amplitude_column=dict(), - bids_condition_column=dict(usedefault=True, ), + bids_condition_column=dict(usedefault=True,), bids_event_file=dict( - mandatory=True, - xor=['subject_info', 'event_files', 'bids_event_file'], + mandatory=True, xor=["subject_info", "event_files", "bids_event_file"], ), event_files=dict( - mandatory=True, - xor=['subject_info', 'event_files', 'bids_event_file'], + mandatory=True, xor=["subject_info", "event_files", "bids_event_file"], ), - functional_runs=dict( - copyfile=False, - mandatory=True, - ), - high_pass_filter_cutoff=dict(mandatory=True, ), - input_units=dict(mandatory=True, ), - outlier_files=dict(copyfile=False, ), - parameter_source=dict(usedefault=True, ), - realignment_parameters=dict(copyfile=False, ), + functional_runs=dict(copyfile=False, mandatory=True,), + high_pass_filter_cutoff=dict(mandatory=True,), + input_units=dict(mandatory=True,), + outlier_files=dict(copyfile=False,), + parameter_source=dict(usedefault=True,), + realignment_parameters=dict(copyfile=False,), subject_info=dict( - mandatory=True, - xor=['subject_info', 'event_files', 'bids_event_file'], + mandatory=True, xor=["subject_info", "event_files", "bids_event_file"], ), - time_repetition=dict(mandatory=True, ), + time_repetition=dict(mandatory=True,), ) inputs = SpecifyModel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SpecifyModel_outputs(): - output_map = dict(session_info=dict(), ) + output_map = dict(session_info=dict(),) outputs = SpecifyModel.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_SpecifySPMModel.py b/nipype/algorithms/tests/test_auto_SpecifySPMModel.py index a0f2c5a1a6..cb8c5f7a17 100644 --- a/nipype/algorithms/tests/test_auto_SpecifySPMModel.py +++ b/nipype/algorithms/tests/test_auto_SpecifySPMModel.py @@ -5,39 +5,35 @@ def test_SpecifySPMModel_inputs(): input_map = dict( bids_amplitude_column=dict(), - bids_condition_column=dict(usedefault=True, ), + bids_condition_column=dict(usedefault=True,), bids_event_file=dict( - mandatory=True, - xor=['subject_info', 'event_files', 'bids_event_file'], + mandatory=True, xor=["subject_info", "event_files", "bids_event_file"], ), - concatenate_runs=dict(usedefault=True, ), + concatenate_runs=dict(usedefault=True,), event_files=dict( - mandatory=True, - xor=['subject_info', 'event_files', 'bids_event_file'], + mandatory=True, xor=["subject_info", "event_files", "bids_event_file"], ), - functional_runs=dict( - copyfile=False, - mandatory=True, - ), - high_pass_filter_cutoff=dict(mandatory=True, ), - input_units=dict(mandatory=True, ), - outlier_files=dict(copyfile=False, ), - output_units=dict(usedefault=True, ), - parameter_source=dict(usedefault=True, ), - realignment_parameters=dict(copyfile=False, ), + functional_runs=dict(copyfile=False, mandatory=True,), + high_pass_filter_cutoff=dict(mandatory=True,), + input_units=dict(mandatory=True,), + outlier_files=dict(copyfile=False,), + output_units=dict(usedefault=True,), + parameter_source=dict(usedefault=True,), + realignment_parameters=dict(copyfile=False,), subject_info=dict( - mandatory=True, - xor=['subject_info', 'event_files', 'bids_event_file'], + mandatory=True, xor=["subject_info", "event_files", "bids_event_file"], ), - time_repetition=dict(mandatory=True, ), + time_repetition=dict(mandatory=True,), ) inputs = SpecifySPMModel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SpecifySPMModel_outputs(): - output_map = dict(session_info=dict(), ) + output_map = dict(session_info=dict(),) outputs = SpecifySPMModel.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_SpecifySparseModel.py b/nipype/algorithms/tests/test_auto_SpecifySparseModel.py index 839cba7e60..ad116a86b1 100644 --- a/nipype/algorithms/tests/test_auto_SpecifySparseModel.py +++ b/nipype/algorithms/tests/test_auto_SpecifySparseModel.py @@ -5,48 +5,44 @@ def test_SpecifySparseModel_inputs(): input_map = dict( bids_amplitude_column=dict(), - bids_condition_column=dict(usedefault=True, ), + bids_condition_column=dict(usedefault=True,), bids_event_file=dict( - mandatory=True, - xor=['subject_info', 'event_files', 'bids_event_file'], + mandatory=True, xor=["subject_info", "event_files", "bids_event_file"], ), event_files=dict( - mandatory=True, - xor=['subject_info', 'event_files', 'bids_event_file'], + mandatory=True, xor=["subject_info", "event_files", "bids_event_file"], ), - functional_runs=dict( - copyfile=False, - mandatory=True, - ), - high_pass_filter_cutoff=dict(mandatory=True, ), - input_units=dict(mandatory=True, ), + functional_runs=dict(copyfile=False, mandatory=True,), + high_pass_filter_cutoff=dict(mandatory=True,), + input_units=dict(mandatory=True,), model_hrf=dict(), - outlier_files=dict(copyfile=False, ), - parameter_source=dict(usedefault=True, ), - realignment_parameters=dict(copyfile=False, ), + outlier_files=dict(copyfile=False,), + parameter_source=dict(usedefault=True,), + realignment_parameters=dict(copyfile=False,), save_plot=dict(), - scale_regressors=dict(usedefault=True, ), - scan_onset=dict(usedefault=True, ), - stimuli_as_impulses=dict(usedefault=True, ), + scale_regressors=dict(usedefault=True,), + scan_onset=dict(usedefault=True,), + stimuli_as_impulses=dict(usedefault=True,), subject_info=dict( - mandatory=True, - xor=['subject_info', 'event_files', 'bids_event_file'], + mandatory=True, xor=["subject_info", "event_files", "bids_event_file"], ), - time_acquisition=dict(mandatory=True, ), - time_repetition=dict(mandatory=True, ), - use_temporal_deriv=dict(requires=['model_hrf'], ), - volumes_in_cluster=dict(usedefault=True, ), + time_acquisition=dict(mandatory=True,), + time_repetition=dict(mandatory=True,), + use_temporal_deriv=dict(requires=["model_hrf"],), + volumes_in_cluster=dict(usedefault=True,), ) inputs = SpecifySparseModel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SpecifySparseModel_outputs(): output_map = dict( session_info=dict(), - sparse_png_file=dict(extensions=None, ), - sparse_svg_file=dict(extensions=None, ), + sparse_png_file=dict(extensions=None,), + sparse_svg_file=dict(extensions=None,), ) outputs = SpecifySparseModel.output_spec() diff --git a/nipype/algorithms/tests/test_auto_SplitROIs.py b/nipype/algorithms/tests/test_auto_SplitROIs.py index acaef12eee..a9f3844775 100644 --- a/nipype/algorithms/tests/test_auto_SplitROIs.py +++ b/nipype/algorithms/tests/test_auto_SplitROIs.py @@ -4,11 +4,8 @@ def test_SplitROIs_inputs(): input_map = dict( - in_file=dict( - extensions=None, - mandatory=True, - ), - in_mask=dict(extensions=None, ), + in_file=dict(extensions=None, mandatory=True,), + in_mask=dict(extensions=None,), roi_size=dict(), ) inputs = SplitROIs.input_spec() @@ -16,12 +13,10 @@ def test_SplitROIs_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SplitROIs_outputs(): - output_map = dict( - out_files=dict(), - out_index=dict(), - out_masks=dict(), - ) + output_map = dict(out_files=dict(), out_index=dict(), out_masks=dict(),) outputs = SplitROIs.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_StimulusCorrelation.py b/nipype/algorithms/tests/test_auto_StimulusCorrelation.py index c39fa0bc97..2e95175ca8 100644 --- a/nipype/algorithms/tests/test_auto_StimulusCorrelation.py +++ b/nipype/algorithms/tests/test_auto_StimulusCorrelation.py @@ -4,21 +4,20 @@ def test_StimulusCorrelation_inputs(): input_map = dict( - concatenated_design=dict(mandatory=True, ), - intensity_values=dict(mandatory=True, ), - realignment_parameters=dict(mandatory=True, ), - spm_mat_file=dict( - extensions=None, - mandatory=True, - ), + concatenated_design=dict(mandatory=True,), + intensity_values=dict(mandatory=True,), + realignment_parameters=dict(mandatory=True,), + spm_mat_file=dict(extensions=None, mandatory=True,), ) inputs = StimulusCorrelation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_StimulusCorrelation_outputs(): - output_map = dict(stimcorr_files=dict(), ) + output_map = dict(stimcorr_files=dict(),) outputs = StimulusCorrelation.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_auto_TCompCor.py b/nipype/algorithms/tests/test_auto_TCompCor.py index 9fe97f946b..d8cb0d7ae6 100644 --- a/nipype/algorithms/tests/test_auto_TCompCor.py +++ b/nipype/algorithms/tests/test_auto_TCompCor.py @@ -4,49 +4,39 @@ def test_TCompCor_inputs(): input_map = dict( - components_file=dict(usedefault=True, ), - failure_mode=dict(usedefault=True, ), + components_file=dict(usedefault=True,), + failure_mode=dict(usedefault=True,), header_prefix=dict(), - high_pass_cutoff=dict(usedefault=True, ), - ignore_initial_volumes=dict(usedefault=True, ), + high_pass_cutoff=dict(usedefault=True,), + ignore_initial_volumes=dict(usedefault=True,), mask_files=dict(), - mask_index=dict( - requires=['mask_files'], - xor=['merge_method'], - ), + mask_index=dict(requires=["mask_files"], xor=["merge_method"],), mask_names=dict(), - merge_method=dict( - requires=['mask_files'], - xor=['mask_index'], - ), - num_components=dict(xor=['variance_threshold'], ), - percentile_threshold=dict(usedefault=True, ), - pre_filter=dict(usedefault=True, ), - realigned_file=dict( - extensions=None, - mandatory=True, - ), - regress_poly_degree=dict(usedefault=True, ), + merge_method=dict(requires=["mask_files"], xor=["mask_index"],), + num_components=dict(xor=["variance_threshold"],), + percentile_threshold=dict(usedefault=True,), + pre_filter=dict(usedefault=True,), + realigned_file=dict(extensions=None, mandatory=True,), + regress_poly_degree=dict(usedefault=True,), repetition_time=dict(), - save_metadata=dict(usedefault=True, ), - save_pre_filter=dict(usedefault=True, ), - use_regress_poly=dict( - deprecated='0.15.0', - new_name='pre_filter', - ), - variance_threshold=dict(xor=['num_components'], ), + save_metadata=dict(usedefault=True,), + save_pre_filter=dict(usedefault=True,), + use_regress_poly=dict(deprecated="0.15.0", new_name="pre_filter",), + variance_threshold=dict(xor=["num_components"],), ) inputs = TCompCor.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TCompCor_outputs(): output_map = dict( - components_file=dict(extensions=None, ), + components_file=dict(extensions=None,), high_variance_masks=dict(), - metadata_file=dict(extensions=None, ), - pre_filter_file=dict(extensions=None, ), + metadata_file=dict(extensions=None,), + pre_filter_file=dict(extensions=None,), ) outputs = TCompCor.output_spec() diff --git a/nipype/algorithms/tests/test_auto_WarpPoints.py b/nipype/algorithms/tests/test_auto_WarpPoints.py index 3c18186db7..493f9fb26f 100644 --- a/nipype/algorithms/tests/test_auto_WarpPoints.py +++ b/nipype/algorithms/tests/test_auto_WarpPoints.py @@ -4,33 +4,26 @@ def test_WarpPoints_inputs(): input_map = dict( - interp=dict( - mandatory=True, - usedefault=True, - ), + interp=dict(mandatory=True, usedefault=True,), out_points=dict( extensions=None, keep_extension=True, - name_source='points', - name_template='%s_warped', - output_name='out_points', - ), - points=dict( - extensions=None, - mandatory=True, - ), - warp=dict( - extensions=None, - mandatory=True, + name_source="points", + name_template="%s_warped", + output_name="out_points", ), + points=dict(extensions=None, mandatory=True,), + warp=dict(extensions=None, mandatory=True,), ) inputs = WarpPoints.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_WarpPoints_outputs(): - output_map = dict(out_points=dict(extensions=None, ), ) + output_map = dict(out_points=dict(extensions=None,),) outputs = WarpPoints.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/algorithms/tests/test_confounds.py b/nipype/algorithms/tests/test_confounds.py index c9ef93f49f..29f18c9221 100644 --- a/nipype/algorithms/tests/test_confounds.py +++ b/nipype/algorithms/tests/test_confounds.py @@ -4,13 +4,13 @@ import pytest from nipype.testing import example_data -from nipype.algorithms.confounds import FramewiseDisplacement, ComputeDVARS, \ - is_outlier +from nipype.algorithms.confounds import FramewiseDisplacement, ComputeDVARS, is_outlier import numpy as np nonitime = True try: import nitime + nonitime = False except ImportError: pass @@ -18,31 +18,34 @@ def test_fd(tmpdir): tempdir = tmpdir.strpath - ground_truth = np.loadtxt(example_data('fsl_motion_outliers_fd.txt')) + ground_truth = np.loadtxt(example_data("fsl_motion_outliers_fd.txt")) fdisplacement = FramewiseDisplacement( - in_file=example_data('fsl_mcflirt_movpar.txt'), - out_file=tempdir + '/fd.txt', - parameter_source="FSL") + in_file=example_data("fsl_mcflirt_movpar.txt"), + out_file=tempdir + "/fd.txt", + parameter_source="FSL", + ) res = fdisplacement.run() with open(res.outputs.out_file) as all_lines: for line in all_lines: - assert 'FramewiseDisplacement' in line + assert "FramewiseDisplacement" in line break assert np.allclose( - ground_truth, np.loadtxt(res.outputs.out_file, skiprows=1), atol=.16) + ground_truth, np.loadtxt(res.outputs.out_file, skiprows=1), atol=0.16 + ) assert np.abs(ground_truth.mean() - res.outputs.fd_average) < 1e-2 @pytest.mark.skipif(nonitime, reason="nitime is not installed") def test_dvars(tmpdir): - ground_truth = np.loadtxt(example_data('ds003_sub-01_mc.DVARS')) + ground_truth = np.loadtxt(example_data("ds003_sub-01_mc.DVARS")) dvars = ComputeDVARS( - in_file=example_data('ds003_sub-01_mc.nii.gz'), - in_mask=example_data('ds003_sub-01_mc_brainmask.nii.gz'), + in_file=example_data("ds003_sub-01_mc.nii.gz"), + in_mask=example_data("ds003_sub-01_mc_brainmask.nii.gz"), save_all=True, - intensity_normalization=0) + intensity_normalization=0, + ) tmpdir.chdir() res = dvars.run() @@ -54,9 +57,10 @@ def test_dvars(tmpdir): assert (np.abs(dv1[:, 2] - ground_truth[:, 2]).sum() / len(dv1)) < 0.05 dvars = ComputeDVARS( - in_file=example_data('ds003_sub-01_mc.nii.gz'), - in_mask=example_data('ds003_sub-01_mc_brainmask.nii.gz'), - save_all=True) + in_file=example_data("ds003_sub-01_mc.nii.gz"), + in_mask=example_data("ds003_sub-01_mc_brainmask.nii.gz"), + save_all=True, + ) res = dvars.run() dv1 = np.loadtxt(res.outputs.out_all, skiprows=1) diff --git a/nipype/algorithms/tests/test_icc_anova.py b/nipype/algorithms/tests/test_icc_anova.py index 79ed312f40..bd0fe3525b 100644 --- a/nipype/algorithms/tests/test_icc_anova.py +++ b/nipype/algorithms/tests/test_icc_anova.py @@ -7,8 +7,16 @@ def test_ICC_rep_anova(): # see table 2 in P. E. Shrout & Joseph L. Fleiss (1979). "Intraclass # Correlations: Uses in Assessing Rater Reliability". Psychological # Bulletin 86 (2): 420-428 - Y = np.array([[9, 2, 5, 8], [6, 1, 3, 2], [8, 4, 6, 8], [7, 1, 2, 6], - [10, 5, 6, 9], [6, 2, 4, 7]]) + Y = np.array( + [ + [9, 2, 5, 8], + [6, 1, 3, 2], + [8, 4, 6, 8], + [7, 1, 2, 6], + [10, 5, 6, 9], + [6, 2, 4, 7], + ] + ) icc, r_var, e_var, _, dfc, dfe = ICC_rep_anova(Y) # see table 4 diff --git a/nipype/algorithms/tests/test_mesh_ops.py b/nipype/algorithms/tests/test_mesh_ops.py index a08a5a97c3..8be59e08c0 100644 --- a/nipype/algorithms/tests/test_mesh_ops.py +++ b/nipype/algorithms/tests/test_mesh_ops.py @@ -17,15 +17,15 @@ def test_ident_distances(tmpdir): tmpdir.chdir() - in_surf = example_data('surf01.vtk') + in_surf = example_data("surf01.vtk") dist_ident = m.ComputeMeshWarp() dist_ident.inputs.surface1 = in_surf dist_ident.inputs.surface2 = in_surf - dist_ident.inputs.out_file = tmpdir.join('distance.npy').strpath + dist_ident.inputs.out_file = tmpdir.join("distance.npy").strpath res = dist_ident.run() assert res.outputs.distance == 0.0 - dist_ident.inputs.weighting = 'area' + dist_ident.inputs.weighting = "area" res = dist_ident.run() assert res.outputs.distance == 0.0 @@ -34,8 +34,8 @@ def test_ident_distances(tmpdir): def test_trans_distances(tmpdir): from ...interfaces.vtkbase import tvtk - in_surf = example_data('surf01.vtk') - warped_surf = tmpdir.join('warped.vtk').strpath + in_surf = example_data("surf01.vtk") + warped_surf = tmpdir.join("warped.vtk").strpath inc = np.array([0.7, 0.3, -0.2]) @@ -51,10 +51,10 @@ def test_trans_distances(tmpdir): dist = m.ComputeMeshWarp() dist.inputs.surface1 = in_surf dist.inputs.surface2 = warped_surf - dist.inputs.out_file = tmpdir.join('distance.npy').strpath + dist.inputs.out_file = tmpdir.join("distance.npy").strpath res = dist.run() assert np.allclose(res.outputs.distance, np.linalg.norm(inc), 4) - dist.inputs.weighting = 'area' + dist.inputs.weighting = "area" res = dist.run() assert np.allclose(res.outputs.distance, np.linalg.norm(inc), 4) diff --git a/nipype/algorithms/tests/test_metrics.py b/nipype/algorithms/tests/test_metrics.py index fb876b3c72..ad7502992e 100644 --- a/nipype/algorithms/tests/test_metrics.py +++ b/nipype/algorithms/tests/test_metrics.py @@ -11,24 +11,21 @@ def test_fuzzy_overlap(tmpdir): tmpdir.chdir() # Tests with tissue probability maps - in_mask = example_data('tpms_msk.nii.gz') - tpms = [example_data('tpm_%02d.nii.gz' % i) for i in range(3)] + in_mask = example_data("tpms_msk.nii.gz") + tpms = [example_data("tpm_%02d.nii.gz" % i) for i in range(3)] out = FuzzyOverlap(in_ref=tpms[0], in_tst=tpms[0]).run().outputs assert out.dice == 1 - out = FuzzyOverlap( - in_mask=in_mask, in_ref=tpms[0], in_tst=tpms[0]).run().outputs + out = FuzzyOverlap(in_mask=in_mask, in_ref=tpms[0], in_tst=tpms[0]).run().outputs assert out.dice == 1 - out = FuzzyOverlap( - in_mask=in_mask, in_ref=tpms[0], in_tst=tpms[1]).run().outputs + out = FuzzyOverlap(in_mask=in_mask, in_ref=tpms[0], in_tst=tpms[1]).run().outputs assert 0 < out.dice < 1 out = FuzzyOverlap(in_ref=tpms, in_tst=tpms).run().outputs assert out.dice == 1.0 - out = FuzzyOverlap( - in_mask=in_mask, in_ref=tpms, in_tst=tpms).run().outputs + out = FuzzyOverlap(in_mask=in_mask, in_ref=tpms, in_tst=tpms).run().outputs assert out.dice == 1.0 # Tests with synthetic 3x3x3 images @@ -36,14 +33,14 @@ def test_fuzzy_overlap(tmpdir): data[0, 0, 0] = 0.5 data[2, 2, 2] = 0.25 data[1, 1, 1] = 0.3 - nb.Nifti1Image(data, np.eye(4)).to_filename('test1.nii.gz') + nb.Nifti1Image(data, np.eye(4)).to_filename("test1.nii.gz") data = np.zeros((3, 3, 3), dtype=float) data[0, 0, 0] = 0.6 data[1, 1, 1] = 0.3 - nb.Nifti1Image(data, np.eye(4)).to_filename('test2.nii.gz') + nb.Nifti1Image(data, np.eye(4)).to_filename("test2.nii.gz") - out = FuzzyOverlap(in_ref='test1.nii.gz', in_tst='test2.nii.gz').run().outputs + out = FuzzyOverlap(in_ref="test1.nii.gz", in_tst="test2.nii.gz").run().outputs assert np.allclose(out.dice, 0.82051) # Just considering the mask, the central pixel @@ -51,8 +48,13 @@ def test_fuzzy_overlap(tmpdir): data = np.zeros((3, 3, 3), dtype=int) data[0, 0, 0] = 1 data[2, 2, 2] = 1 - nb.Nifti1Image(data, np.eye(4)).to_filename('mask.nii.gz') - - out = FuzzyOverlap(in_ref='test1.nii.gz', in_tst='test2.nii.gz', - in_mask='mask.nii.gz').run().outputs + nb.Nifti1Image(data, np.eye(4)).to_filename("mask.nii.gz") + + out = ( + FuzzyOverlap( + in_ref="test1.nii.gz", in_tst="test2.nii.gz", in_mask="mask.nii.gz" + ) + .run() + .outputs + ) assert np.allclose(out.dice, 0.74074) diff --git a/nipype/algorithms/tests/test_misc.py b/nipype/algorithms/tests/test_misc.py index e9d5cbdb3c..40aab24b2a 100644 --- a/nipype/algorithms/tests/test_misc.py +++ b/nipype/algorithms/tests/test_misc.py @@ -26,7 +26,8 @@ def test_CreateNifti(create_analyze_pair_file_in_directory): # .inputs based parameters setting create_nifti.inputs.header_file = filelist[0] create_nifti.inputs.data_file = fname_presuffix( - filelist[0], '', '.img', use_ext=False) + filelist[0], "", ".img", use_ext=False + ) result = create_nifti.run() @@ -41,7 +42,7 @@ def test_CalculateMedian(create_analyze_pair_file_in_directory): with pytest.raises(TypeError): mean.run() - mean.inputs.in_files = example_data('ds003_sub-01_mc.nii.gz') + mean.inputs.in_files = example_data("ds003_sub-01_mc.nii.gz") eg = mean.run() assert os.path.exists(eg.outputs.median_files) diff --git a/nipype/algorithms/tests/test_modelgen.py b/nipype/algorithms/tests/test_modelgen.py index 759e53b0a4..a2c85f6747 100644 --- a/nipype/algorithms/tests/test_modelgen.py +++ b/nipype/algorithms/tests/test_modelgen.py @@ -12,234 +12,234 @@ import numpy.testing as npt from nipype.testing import example_data from nipype.interfaces.base import Bunch, TraitError -from nipype.algorithms.modelgen import (bids_gen_info, SpecifyModel, - SpecifySparseModel, SpecifySPMModel) +from nipype.algorithms.modelgen import ( + bids_gen_info, + SpecifyModel, + SpecifySparseModel, + SpecifySPMModel, +) def test_bids_gen_info(): - fname = example_data('events.tsv') + fname = example_data("events.tsv") res = bids_gen_info([fname]) - assert res[0].onsets == [[183.75, 313.75, 483.75, 633.75, 783.75, 933.75, 1083.75, 1233.75]] + assert res[0].onsets == [ + [183.75, 313.75, 483.75, 633.75, 783.75, 933.75, 1083.75, 1233.75] + ] assert res[0].durations == [[20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0]] - assert res[0].amplitudes ==[[1, 1, 1, 1, 1, 1, 1, 1]] - assert res[0].conditions == ['ev0'] + assert res[0].amplitudes == [[1, 1, 1, 1, 1, 1, 1, 1]] + assert res[0].conditions == ["ev0"] def test_modelgen1(tmpdir): - filename1 = tmpdir.join('test1.nii').strpath - filename2 = tmpdir.join('test2.nii').strpath - Nifti1Image(np.random.rand(10, 10, 10, 200), - np.eye(4)).to_filename(filename1) - Nifti1Image(np.random.rand(10, 10, 10, 200), - np.eye(4)).to_filename(filename2) + filename1 = tmpdir.join("test1.nii").strpath + filename2 = tmpdir.join("test2.nii").strpath + Nifti1Image(np.random.rand(10, 10, 10, 200), np.eye(4)).to_filename(filename1) + Nifti1Image(np.random.rand(10, 10, 10, 200), np.eye(4)).to_filename(filename2) s = SpecifyModel() - s.inputs.input_units = 'scans' - set_output_units = lambda: setattr(s.inputs, 'output_units', 'scans') + s.inputs.input_units = "scans" + set_output_units = lambda: setattr(s.inputs, "output_units", "scans") with pytest.raises(TraitError): set_output_units() s.inputs.functional_runs = [filename1, filename2] s.inputs.time_repetition = 6 - s.inputs.high_pass_filter_cutoff = 128. + s.inputs.high_pass_filter_cutoff = 128.0 info = [ Bunch( - conditions=['cond1'], + conditions=["cond1"], onsets=[[2, 50, 100, 180]], durations=[[1]], amplitudes=None, pmod=None, regressors=None, regressor_names=None, - tmod=None), + tmod=None, + ), Bunch( - conditions=['cond1'], + conditions=["cond1"], onsets=[[30, 40, 100, 150]], durations=[[1]], amplitudes=None, pmod=None, regressors=None, regressor_names=None, - tmod=None) + tmod=None, + ), ] s.inputs.subject_info = info res = s.run() assert len(res.outputs.session_info) == 2 - assert len(res.outputs.session_info[0]['regress']) == 0 - assert len(res.outputs.session_info[0]['cond']) == 1 + assert len(res.outputs.session_info[0]["regress"]) == 0 + assert len(res.outputs.session_info[0]["cond"]) == 1 npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][0]['onset']), - np.array([12, 300, 600, 1080])) + np.array(res.outputs.session_info[0]["cond"][0]["onset"]), + np.array([12, 300, 600, 1080]), + ) info = [ - Bunch(conditions=['cond1'], onsets=[[2]], durations=[[1]]), - Bunch(conditions=['cond1'], onsets=[[3]], durations=[[1]]) + Bunch(conditions=["cond1"], onsets=[[2]], durations=[[1]]), + Bunch(conditions=["cond1"], onsets=[[3]], durations=[[1]]), ] s.inputs.subject_info = deepcopy(info) res = s.run() npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][0]['duration']), - np.array([6.])) + np.array(res.outputs.session_info[0]["cond"][0]["duration"]), np.array([6.0]) + ) npt.assert_almost_equal( - np.array(res.outputs.session_info[1]['cond'][0]['duration']), - np.array([6.])) + np.array(res.outputs.session_info[1]["cond"][0]["duration"]), np.array([6.0]) + ) info = [ Bunch( - conditions=['cond1', 'cond2'], - onsets=[[2, 3], [2]], - durations=[[1, 1], [1]]), + conditions=["cond1", "cond2"], onsets=[[2, 3], [2]], durations=[[1, 1], [1]] + ), Bunch( - conditions=['cond1', 'cond2'], + conditions=["cond1", "cond2"], onsets=[[2, 3], [2, 4]], - durations=[[1, 1], [1, 1]]) + durations=[[1, 1], [1, 1]], + ), ] s.inputs.subject_info = deepcopy(info) - s.inputs.input_units = 'scans' + s.inputs.input_units = "scans" res = s.run() npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][0]['duration']), - np.array([6., 6.])) + np.array(res.outputs.session_info[0]["cond"][0]["duration"]), + np.array([6.0, 6.0]), + ) npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][1]['duration']), - np.array([ - 6., - ])) + np.array(res.outputs.session_info[0]["cond"][1]["duration"]), np.array([6.0,]) + ) npt.assert_almost_equal( - np.array(res.outputs.session_info[1]['cond'][1]['duration']), - np.array([6., 6.])) + np.array(res.outputs.session_info[1]["cond"][1]["duration"]), + np.array([6.0, 6.0]), + ) def test_modelgen_spm_concat(tmpdir): - filename1 = tmpdir.join('test1.nii').strpath - filename2 = tmpdir.join('test2.nii').strpath - Nifti1Image(np.random.rand(10, 10, 10, 30), - np.eye(4)).to_filename(filename1) - Nifti1Image(np.random.rand(10, 10, 10, 30), - np.eye(4)).to_filename(filename2) + filename1 = tmpdir.join("test1.nii").strpath + filename2 = tmpdir.join("test2.nii").strpath + Nifti1Image(np.random.rand(10, 10, 10, 30), np.eye(4)).to_filename(filename1) + Nifti1Image(np.random.rand(10, 10, 10, 30), np.eye(4)).to_filename(filename2) # Test case when only one duration is passed, as being the same for all onsets. s = SpecifySPMModel() - s.inputs.input_units = 'secs' + s.inputs.input_units = "secs" s.inputs.concatenate_runs = True - setattr(s.inputs, 'output_units', 'secs') - assert s.inputs.output_units == 'secs' + setattr(s.inputs, "output_units", "secs") + assert s.inputs.output_units == "secs" s.inputs.functional_runs = [filename1, filename2] s.inputs.time_repetition = 6 - s.inputs.high_pass_filter_cutoff = 128. + s.inputs.high_pass_filter_cutoff = 128.0 info = [ - Bunch( - conditions=['cond1'], onsets=[[2, 50, 100, 170]], durations=[[1]]), - Bunch( - conditions=['cond1'], onsets=[[30, 40, 100, 150]], durations=[[1]]) + Bunch(conditions=["cond1"], onsets=[[2, 50, 100, 170]], durations=[[1]]), + Bunch(conditions=["cond1"], onsets=[[30, 40, 100, 150]], durations=[[1]]), ] s.inputs.subject_info = deepcopy(info) res = s.run() assert len(res.outputs.session_info) == 1 - assert len(res.outputs.session_info[0]['regress']) == 1 - assert np.sum(res.outputs.session_info[0]['regress'][0]['val']) == 30 - assert len(res.outputs.session_info[0]['cond']) == 1 + assert len(res.outputs.session_info[0]["regress"]) == 1 + assert np.sum(res.outputs.session_info[0]["regress"][0]["val"]) == 30 + assert len(res.outputs.session_info[0]["cond"]) == 1 npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][0]['onset']), - np.array([2.0, 50.0, 100.0, 170.0, 210.0, 220.0, 280.0, 330.0])) + np.array(res.outputs.session_info[0]["cond"][0]["onset"]), + np.array([2.0, 50.0, 100.0, 170.0, 210.0, 220.0, 280.0, 330.0]), + ) npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][0]['duration']), - np.array([1., 1., 1., 1., 1., 1., 1., 1.])) + np.array(res.outputs.session_info[0]["cond"][0]["duration"]), + np.array([1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]), + ) # Test case of scans as output units instead of seconds - setattr(s.inputs, 'output_units', 'scans') - assert s.inputs.output_units == 'scans' + setattr(s.inputs, "output_units", "scans") + assert s.inputs.output_units == "scans" s.inputs.subject_info = deepcopy(info) res = s.run() npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][0]['onset']), - np.array([2.0, 50.0, 100.0, 170.0, 210.0, 220.0, 280.0, 330.0]) / 6) + np.array(res.outputs.session_info[0]["cond"][0]["onset"]), + np.array([2.0, 50.0, 100.0, 170.0, 210.0, 220.0, 280.0, 330.0]) / 6, + ) # Test case for no concatenation with seconds as output units s.inputs.concatenate_runs = False s.inputs.subject_info = deepcopy(info) - s.inputs.output_units = 'secs' + s.inputs.output_units = "secs" res = s.run() npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][0]['onset']), - np.array([2.0, 50.0, 100.0, 170.0])) + np.array(res.outputs.session_info[0]["cond"][0]["onset"]), + np.array([2.0, 50.0, 100.0, 170.0]), + ) # Test case for variable number of events in separate runs, sometimes unique. - filename3 = tmpdir.join('test3.nii').strpath - Nifti1Image(np.random.rand(10, 10, 10, 30), - np.eye(4)).to_filename(filename3) + filename3 = tmpdir.join("test3.nii").strpath + Nifti1Image(np.random.rand(10, 10, 10, 30), np.eye(4)).to_filename(filename3) s.inputs.functional_runs = [filename1, filename2, filename3] info = [ Bunch( - conditions=['cond1', 'cond2'], - onsets=[[2, 3], [2]], - durations=[[1, 1], [1]]), + conditions=["cond1", "cond2"], onsets=[[2, 3], [2]], durations=[[1, 1], [1]] + ), Bunch( - conditions=['cond1', 'cond2'], + conditions=["cond1", "cond2"], onsets=[[2, 3], [2, 4]], - durations=[[1, 1], [1, 1]]), + durations=[[1, 1], [1, 1]], + ), Bunch( - conditions=['cond1', 'cond2'], - onsets=[[2, 3], [2]], - durations=[[1, 1], [1]]) + conditions=["cond1", "cond2"], onsets=[[2, 3], [2]], durations=[[1, 1], [1]] + ), ] s.inputs.subject_info = deepcopy(info) res = s.run() npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][0]['duration']), - np.array([1., 1.])) + np.array(res.outputs.session_info[0]["cond"][0]["duration"]), + np.array([1.0, 1.0]), + ) npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][1]['duration']), - np.array([ - 1., - ])) + np.array(res.outputs.session_info[0]["cond"][1]["duration"]), np.array([1.0,]) + ) npt.assert_almost_equal( - np.array(res.outputs.session_info[1]['cond'][1]['duration']), - np.array([1., 1.])) + np.array(res.outputs.session_info[1]["cond"][1]["duration"]), + np.array([1.0, 1.0]), + ) npt.assert_almost_equal( - np.array(res.outputs.session_info[2]['cond'][1]['duration']), - np.array([ - 1., - ])) + np.array(res.outputs.session_info[2]["cond"][1]["duration"]), np.array([1.0,]) + ) # Test case for variable number of events in concatenated runs, sometimes unique. s.inputs.concatenate_runs = True info = [ Bunch( - conditions=['cond1', 'cond2'], - onsets=[[2, 3], [2]], - durations=[[1, 1], [1]]), + conditions=["cond1", "cond2"], onsets=[[2, 3], [2]], durations=[[1, 1], [1]] + ), Bunch( - conditions=['cond1', 'cond2'], + conditions=["cond1", "cond2"], onsets=[[2, 3], [2, 4]], - durations=[[1, 1], [1, 1]]), + durations=[[1, 1], [1, 1]], + ), Bunch( - conditions=['cond1', 'cond2'], - onsets=[[2, 3], [2]], - durations=[[1, 1], [1]]) + conditions=["cond1", "cond2"], onsets=[[2, 3], [2]], durations=[[1, 1], [1]] + ), ] s.inputs.subject_info = deepcopy(info) res = s.run() npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][0]['duration']), - np.array([1., 1., 1., 1., 1., 1.])) + np.array(res.outputs.session_info[0]["cond"][0]["duration"]), + np.array([1.0, 1.0, 1.0, 1.0, 1.0, 1.0]), + ) npt.assert_almost_equal( - np.array(res.outputs.session_info[0]['cond'][1]['duration']), - np.array([1., 1., 1., 1.])) + np.array(res.outputs.session_info[0]["cond"][1]["duration"]), + np.array([1.0, 1.0, 1.0, 1.0]), + ) def test_modelgen_sparse(tmpdir): - filename1 = tmpdir.join('test1.nii').strpath - filename2 = tmpdir.join('test2.nii').strpath - Nifti1Image(np.random.rand(10, 10, 10, 50), - np.eye(4)).to_filename(filename1) - Nifti1Image(np.random.rand(10, 10, 10, 50), - np.eye(4)).to_filename(filename2) + filename1 = tmpdir.join("test1.nii").strpath + filename2 = tmpdir.join("test2.nii").strpath + Nifti1Image(np.random.rand(10, 10, 10, 50), np.eye(4)).to_filename(filename1) + Nifti1Image(np.random.rand(10, 10, 10, 50), np.eye(4)).to_filename(filename2) s = SpecifySparseModel() - s.inputs.input_units = 'secs' + s.inputs.input_units = "secs" s.inputs.functional_runs = [filename1, filename2] s.inputs.time_repetition = 6 info = [ - Bunch( - conditions=['cond1'], onsets=[[0, 50, 100, 180]], durations=[[2]]), - Bunch( - conditions=['cond1'], onsets=[[30, 40, 100, 150]], durations=[[1]]) + Bunch(conditions=["cond1"], onsets=[[0, 50, 100, 180]], durations=[[2]]), + Bunch(conditions=["cond1"], onsets=[[30, 40, 100, 150]], durations=[[1]]), ] s.inputs.subject_info = info s.inputs.volumes_in_cluster = 1 @@ -247,26 +247,26 @@ def test_modelgen_sparse(tmpdir): s.inputs.high_pass_filter_cutoff = np.inf res = s.run() assert len(res.outputs.session_info) == 2 - assert len(res.outputs.session_info[0]['regress']) == 1 - assert len(res.outputs.session_info[0]['cond']) == 0 + assert len(res.outputs.session_info[0]["regress"]) == 1 + assert len(res.outputs.session_info[0]["cond"]) == 0 s.inputs.stimuli_as_impulses = False res = s.run() - assert res.outputs.session_info[0]['regress'][0]['val'][0] == 1.0 + assert res.outputs.session_info[0]["regress"][0]["val"][0] == 1.0 s.inputs.model_hrf = True res = s.run() npt.assert_almost_equal( - res.outputs.session_info[0]['regress'][0]['val'][0], - 0.016675298129743384) - assert len(res.outputs.session_info[0]['regress']) == 1 + res.outputs.session_info[0]["regress"][0]["val"][0], 0.016675298129743384 + ) + assert len(res.outputs.session_info[0]["regress"]) == 1 s.inputs.use_temporal_deriv = True res = s.run() - assert len(res.outputs.session_info[0]['regress']) == 2 + assert len(res.outputs.session_info[0]["regress"]) == 2 npt.assert_almost_equal( - res.outputs.session_info[0]['regress'][0]['val'][0], - 0.016675298129743384) + res.outputs.session_info[0]["regress"][0]["val"][0], 0.016675298129743384 + ) npt.assert_almost_equal( - res.outputs.session_info[1]['regress'][1]['val'][5], - 0.007671459162258378) + res.outputs.session_info[1]["regress"][1]["val"][5], 0.007671459162258378 + ) diff --git a/nipype/algorithms/tests/test_moments.py b/nipype/algorithms/tests/test_moments.py index fa174a79e4..91e6313193 100644 --- a/nipype/algorithms/tests/test_moments.py +++ b/nipype/algorithms/tests/test_moments.py @@ -130,8 +130,16 @@ def test_skew(tmpdir): skewness = calc_moments(f.strpath, 3) assert np.allclose( skewness, - np.array([ - -0.23418937314622, 0.2946365564954823, -0.05781002053540932, - -0.3512508282578762, -0.07035664150233077, -0.01935867699166935, - 0.00483863369427428, 0.21879460029850167 - ])) + np.array( + [ + -0.23418937314622, + 0.2946365564954823, + -0.05781002053540932, + -0.3512508282578762, + -0.07035664150233077, + -0.01935867699166935, + 0.00483863369427428, + 0.21879460029850167, + ] + ), + ) diff --git a/nipype/algorithms/tests/test_normalize_tpms.py b/nipype/algorithms/tests/test_normalize_tpms.py index 31eb2b96dd..7ff482f23f 100644 --- a/nipype/algorithms/tests/test_normalize_tpms.py +++ b/nipype/algorithms/tests/test_normalize_tpms.py @@ -18,7 +18,7 @@ def test_normalize_tpms(tmpdir): - in_mask = example_data('tpms_msk.nii.gz') + in_mask = example_data("tpms_msk.nii.gz") mskdata = nb.load(in_mask, mmap=NUMPY_MMAP).get_data() mskdata[mskdata > 0.0] = 1.0 @@ -27,16 +27,17 @@ def test_normalize_tpms(tmpdir): out_files = [] for i in range(3): - mapname = example_data('tpm_%02d.nii.gz' % i) - filename = tmpdir.join('modtpm_%02d.nii.gz' % i).strpath - out_files.append(tmpdir.join('normtpm_%02d.nii.gz' % i).strpath) + mapname = example_data("tpm_%02d.nii.gz" % i) + filename = tmpdir.join("modtpm_%02d.nii.gz" % i).strpath + out_files.append(tmpdir.join("normtpm_%02d.nii.gz" % i).strpath) im = nb.load(mapname, mmap=NUMPY_MMAP) data = im.get_data() mapdata.append(data.copy()) - nb.Nifti1Image(2.0 * (data * mskdata), im.affine, - im.header).to_filename(filename) + nb.Nifti1Image(2.0 * (data * mskdata), im.affine, im.header).to_filename( + filename + ) in_files.append(filename) normalize_tpms(in_files, in_mask, out_files=out_files) diff --git a/nipype/algorithms/tests/test_rapidart.py b/nipype/algorithms/tests/test_rapidart.py index fbdce11819..fdf0716805 100644 --- a/nipype/algorithms/tests/test_rapidart.py +++ b/nipype/algorithms/tests/test_rapidart.py @@ -16,58 +16,79 @@ def test_ad_init(): def test_ad_output_filenames(): ad = ra.ArtifactDetect() - outputdir = '/tmp' - f = 'motion.nii' - (outlierfile, intensityfile, statsfile, normfile, plotfile, - displacementfile, maskfile) = ad._get_output_filenames(f, outputdir) - assert outlierfile == '/tmp/art.motion_outliers.txt' - assert intensityfile == '/tmp/global_intensity.motion.txt' - assert statsfile == '/tmp/stats.motion.txt' - assert normfile == '/tmp/norm.motion.txt' - assert plotfile == '/tmp/plot.motion.png' - assert displacementfile == '/tmp/disp.motion.nii' - assert maskfile == '/tmp/mask.motion.nii' + outputdir = "/tmp" + f = "motion.nii" + ( + outlierfile, + intensityfile, + statsfile, + normfile, + plotfile, + displacementfile, + maskfile, + ) = ad._get_output_filenames(f, outputdir) + assert outlierfile == "/tmp/art.motion_outliers.txt" + assert intensityfile == "/tmp/global_intensity.motion.txt" + assert statsfile == "/tmp/stats.motion.txt" + assert normfile == "/tmp/norm.motion.txt" + assert plotfile == "/tmp/plot.motion.png" + assert displacementfile == "/tmp/disp.motion.nii" + assert maskfile == "/tmp/mask.motion.nii" def test_ad_get_affine_matrix(): - matrix = ra._get_affine_matrix(np.array([0]), 'SPM') + matrix = ra._get_affine_matrix(np.array([0]), "SPM") npt.assert_equal(matrix, np.eye(4)) # test translation params = [1, 2, 3] - matrix = ra._get_affine_matrix(params, 'SPM') + matrix = ra._get_affine_matrix(params, "SPM") out = np.eye(4) out[0:3, 3] = params npt.assert_equal(matrix, out) # test rotation params = np.array([0, 0, 0, np.pi / 2, np.pi / 2, np.pi / 2]) - matrix = ra._get_affine_matrix(params, 'SPM') - out = np.array([0, 0, 1, 0, 0, -1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1]).reshape( - (4, 4)) + matrix = ra._get_affine_matrix(params, "SPM") + out = np.array([0, 0, 1, 0, 0, -1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1]).reshape((4, 4)) npt.assert_almost_equal(matrix, out) # test scaling params = np.array([0, 0, 0, 0, 0, 0, 1, 2, 3]) - matrix = ra._get_affine_matrix(params, 'SPM') - out = np.array([1, 0, 0, 0, 0, 2, 0, 0, 0, 0, 3, 0, 0, 0, 0, 1]).reshape( - (4, 4)) + matrix = ra._get_affine_matrix(params, "SPM") + out = np.array([1, 0, 0, 0, 0, 2, 0, 0, 0, 0, 3, 0, 0, 0, 0, 1]).reshape((4, 4)) npt.assert_equal(matrix, out) # test shear params = np.array([0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 3]) - matrix = ra._get_affine_matrix(params, 'SPM') - out = np.array([1, 1, 2, 0, 0, 1, 3, 0, 0, 0, 1, 0, 0, 0, 0, 1]).reshape( - (4, 4)) + matrix = ra._get_affine_matrix(params, "SPM") + out = np.array([1, 1, 2, 0, 0, 1, 3, 0, 0, 0, 1, 0, 0, 0, 0, 1]).reshape((4, 4)) npt.assert_equal(matrix, out) def test_ad_get_norm(): - params = np.array([ - 0, 0, 0, 0, 0, 0, 0, 0, 0, np.pi / 4, np.pi / 4, np.pi / 4, 0, 0, 0, - -np.pi / 4, -np.pi / 4, -np.pi / 4 - ]).reshape((3, 6)) - norm, _ = ra._calc_norm(params, False, 'SPM') - npt.assert_almost_equal(norm, - np.array([18.86436316, 37.74610158, 31.29780829])) - norm, _ = ra._calc_norm(params, True, 'SPM') - npt.assert_almost_equal(norm, np.array([0., 143.72192614, 173.92527131])) + params = np.array( + [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + np.pi / 4, + np.pi / 4, + np.pi / 4, + 0, + 0, + 0, + -np.pi / 4, + -np.pi / 4, + -np.pi / 4, + ] + ).reshape((3, 6)) + norm, _ = ra._calc_norm(params, False, "SPM") + npt.assert_almost_equal(norm, np.array([18.86436316, 37.74610158, 31.29780829])) + norm, _ = ra._calc_norm(params, True, "SPM") + npt.assert_almost_equal(norm, np.array([0.0, 143.72192614, 173.92527131])) def test_sc_init(): @@ -81,13 +102,14 @@ def test_sc_populate_inputs(): realignment_parameters=None, intensity_values=None, spm_mat_file=None, - concatenated_design=None) + concatenated_design=None, + ) assert set(sc.inputs.__dict__.keys()) == set(inputs.__dict__.keys()) def test_sc_output_filenames(): sc = ra.StimulusCorrelation() - outputdir = '/tmp' - f = 'motion.nii' + outputdir = "/tmp" + f = "motion.nii" corrfile = sc._get_output_filenames(f, outputdir) - assert corrfile == '/tmp/qa.motion_stimcorr.txt' + assert corrfile == "/tmp/qa.motion_stimcorr.txt" diff --git a/nipype/algorithms/tests/test_splitmerge.py b/nipype/algorithms/tests/test_splitmerge.py index f05d291028..96e60c6cbb 100644 --- a/nipype/algorithms/tests/test_splitmerge.py +++ b/nipype/algorithms/tests/test_splitmerge.py @@ -13,8 +13,8 @@ def test_split_and_merge(tmpdir): from nipype.algorithms.misc import split_rois, merge_rois - in_mask = example_data('tpms_msk.nii.gz') - dwfile = tmpdir.join('dwi.nii.gz').strpath + in_mask = example_data("tpms_msk.nii.gz") + dwfile = tmpdir.join("dwi.nii.gz").strpath mskdata = nb.load(in_mask, mmap=NUMPY_MMAP).get_data() aff = nb.load(in_mask, mmap=NUMPY_MMAP).affine diff --git a/nipype/algorithms/tests/test_stats.py b/nipype/algorithms/tests/test_stats.py index 9a4c7525b5..29305e7a7a 100644 --- a/nipype/algorithms/tests/test_stats.py +++ b/nipype/algorithms/tests/test_stats.py @@ -10,10 +10,9 @@ def test_ActivationCount(tmpdir): tmpdir.chdir() - in_files = ['{:d}.nii'.format(i) for i in range(3)] + in_files = ["{:d}.nii".format(i) for i in range(3)] for fname in in_files: - nb.Nifti1Image(np.random.normal(size=(5, 5, 5)), - np.eye(4)).to_filename(fname) + nb.Nifti1Image(np.random.normal(size=(5, 5, 5)), np.eye(4)).to_filename(fname) acm = ActivationCount(in_files=in_files, threshold=1.65) res = acm.run() @@ -23,23 +22,29 @@ def test_ActivationCount(tmpdir): assert np.allclose(diff.get_data(), pos.get_data() - neg.get_data()) -@pytest.mark.parametrize("threshold, above_thresh", [ - (1, 15.865), # above one standard deviation (one side) - (2, 2.275), # above two standard deviations (one side) - (3, 0.135) # above three standard deviations (one side) -]) +@pytest.mark.parametrize( + "threshold, above_thresh", + [ + (1, 15.865), # above one standard deviation (one side) + (2, 2.275), # above two standard deviations (one side) + (3, 0.135), # above three standard deviations (one side) + ], +) def test_ActivationCount_normaldistr(tmpdir, threshold, above_thresh): tmpdir.chdir() - in_files = ['{:d}.nii'.format(i) for i in range(3)] + in_files = ["{:d}.nii".format(i) for i in range(3)] for fname in in_files: - nb.Nifti1Image(np.random.normal(size=(100, 100, 100)), - np.eye(4)).to_filename(fname) + nb.Nifti1Image(np.random.normal(size=(100, 100, 100)), np.eye(4)).to_filename( + fname + ) acm = ActivationCount(in_files=in_files, threshold=threshold) res = acm.run() pos = nb.load(res.outputs.acm_pos) neg = nb.load(res.outputs.acm_neg) - assert np.isclose(pos.get_data().mean(), - above_thresh * 1.e-2, rtol=0.1, atol=1.e-4) - assert np.isclose(neg.get_data().mean(), - above_thresh * 1.e-2, rtol=0.1, atol=1.e-4) + assert np.isclose( + pos.get_data().mean(), above_thresh * 1.0e-2, rtol=0.1, atol=1.0e-4 + ) + assert np.isclose( + neg.get_data().mean(), above_thresh * 1.0e-2, rtol=0.1, atol=1.0e-4 + ) diff --git a/nipype/caching/memory.py b/nipype/caching/memory.py index 53bf92cce7..4f773f0c3c 100644 --- a/nipype/caching/memory.py +++ b/nipype/caching/memory.py @@ -42,18 +42,18 @@ def __init__(self, interface, base_dir, callback=None): An optional callable called each time after the function is called. """ - if not (isinstance(interface, type) - and issubclass(interface, BaseInterface)): - raise ValueError('the interface argument should be a nipype ' - 'interface class, but %s (type %s) was passed.' % - (interface, type(interface))) + if not (isinstance(interface, type) and issubclass(interface, BaseInterface)): + raise ValueError( + "the interface argument should be a nipype " + "interface class, but %s (type %s) was passed." + % (interface, type(interface)) + ) self.interface = interface base_dir = os.path.abspath(base_dir) if not os.path.exists(base_dir) and os.path.isdir(base_dir): - raise ValueError('base_dir should be an existing directory') + raise ValueError("base_dir should be an existing directory") self.base_dir = base_dir - doc = '%s\n%s' % (self.interface.__doc__, - self.interface.help(returnhelp=True)) + doc = "%s\n%s" % (self.interface.__doc__, self.interface.help(returnhelp=True)) self.__doc__ = doc self.callback = callback @@ -64,10 +64,12 @@ def __call__(self, **kwargs): interface.inputs.trait_set(**kwargs) # Make a name for our node inputs = interface.inputs.get_hashval() - hasher = hashlib.new('md5') + hasher = hashlib.new("md5") hasher.update(pickle.dumps(inputs)) - dir_name = '%s-%s' % (interface.__class__.__module__.replace('.', '-'), - interface.__class__.__name__) + dir_name = "%s-%s" % ( + interface.__class__.__module__.replace(".", "-"), + interface.__class__.__name__, + ) job_name = hasher.hexdigest() node = Node(interface, name=job_name) node.base_dir = os.path.join(self.base_dir, dir_name) @@ -84,9 +86,12 @@ def __call__(self, **kwargs): return out def __repr__(self): - return '{}({}.{}), base_dir={})'.format( - self.__class__.__name__, self.interface.__module__, - self.interface.__name__, self.base_dir) + return "{}({}.{}), base_dir={})".format( + self.__class__.__name__, + self.interface.__module__, + self.interface.__name__, + self.base_dir, + ) ############################################################################### @@ -98,9 +103,9 @@ def read_log(filename, run_dict=None): if run_dict is None: run_dict = dict() - with open(filename, 'r') as logfile: + with open(filename, "r") as logfile: for line in logfile: - dir_name, job_name = line[:-1].split('/') + dir_name, job_name = line[:-1].split("/") jobs = run_dict.get(dir_name, set()) jobs.add(job_name) run_dict[dir_name] = jobs @@ -122,13 +127,13 @@ def rm_all_but(base_dir, dirs_to_keep, warn=False): except OSError: "Dir has been deleted" return - all_dirs = [d for d in all_dirs if not d.startswith('log.')] + all_dirs = [d for d in all_dirs if not d.startswith("log.")] dirs_to_rm = list(dirs_to_keep.symmetric_difference(all_dirs)) for dir_name in dirs_to_rm: dir_name = os.path.join(base_dir, dir_name) if os.path.exists(dir_name): if warn: - print('removing directory: %s' % dir_name) + print("removing directory: %s" % dir_name) shutil.rmtree(dir_name) @@ -163,13 +168,13 @@ class Memory(object): """ def __init__(self, base_dir): - base_dir = os.path.join(os.path.abspath(base_dir), 'nipype_mem') + base_dir = os.path.join(os.path.abspath(base_dir), "nipype_mem") if not os.path.exists(base_dir): os.mkdir(base_dir) elif not os.path.isdir(base_dir): - raise ValueError('base_dir should be a directory') + raise ValueError("base_dir should be a directory") self.base_dir = base_dir - open(os.path.join(base_dir, 'log.current'), 'a').close() + open(os.path.join(base_dir, "log.current"), "a").close() def cache(self, interface): """ Returns a callable that caches the output of an interface @@ -219,24 +224,23 @@ def _log_name(self, dir_name, job_name): # Every counter is a file opened in append mode and closed # immediately to avoid race conditions in parallel computing: # file appends are atomic - with open(os.path.join(base_dir, 'log.current'), 'a') as currentlog: - currentlog.write('%s/%s\n' % (dir_name, job_name)) + with open(os.path.join(base_dir, "log.current"), "a") as currentlog: + currentlog.write("%s/%s\n" % (dir_name, job_name)) t = time.localtime() - year_dir = os.path.join(base_dir, 'log.%i' % t.tm_year) + year_dir = os.path.join(base_dir, "log.%i" % t.tm_year) try: os.mkdir(year_dir) except OSError: "Dir exists" - month_dir = os.path.join(year_dir, '%02i' % t.tm_mon) + month_dir = os.path.join(year_dir, "%02i" % t.tm_mon) try: os.mkdir(month_dir) except OSError: "Dir exists" - with open(os.path.join(month_dir, '%02i.log' % t.tm_mday), - 'a') as rotatefile: - rotatefile.write('%s/%s\n' % (dir_name, job_name)) + with open(os.path.join(month_dir, "%02i.log" % t.tm_mday), "a") as rotatefile: + rotatefile.write("%s/%s\n" % (dir_name, job_name)) def clear_previous_runs(self, warn=True): """ Remove all the cache that where not used in the latest run of @@ -250,7 +254,7 @@ def clear_previous_runs(self, warn=True): removed """ base_dir = self.base_dir - latest_runs = read_log(os.path.join(base_dir, 'log.current')) + latest_runs = read_log(os.path.join(base_dir, "log.current")) self._clear_all_but(latest_runs, warn=warn) def clear_runs_since(self, day=None, month=None, year=None, warn=True): @@ -271,10 +275,10 @@ def clear_runs_since(self, day=None, month=None, year=None, warn=True): month = month if month is not None else t.tm_mon year = year if year is not None else t.tm_year base_dir = self.base_dir - cut_off_file = '%s/log.%i/%02i/%02i.log' % (base_dir, year, month, day) + cut_off_file = "%s/log.%i/%02i/%02i.log" % (base_dir, year, month, day) logs_to_flush = list() recent_runs = dict() - for log_name in glob.glob('%s/log.*/*/*.log' % base_dir): + for log_name in glob.glob("%s/log.*/*/*.log" % base_dir): if log_name < cut_off_file: logs_to_flush.append(log_name) else: @@ -289,8 +293,7 @@ def _clear_all_but(self, runs, warn=True): """ rm_all_but(self.base_dir, set(runs.keys()), warn=warn) for dir_name, job_names in list(runs.items()): - rm_all_but( - os.path.join(self.base_dir, dir_name), job_names, warn=warn) + rm_all_but(os.path.join(self.base_dir, dir_name), job_names, warn=warn) def __repr__(self): - return '{}(base_dir={})'.format(self.__class__.__name__, self.base_dir) + return "{}(base_dir={})".format(self.__class__.__name__, self.base_dir) diff --git a/nipype/caching/tests/test_memory.py b/nipype/caching/tests/test_memory.py index 642fee363d..ef80869f03 100644 --- a/nipype/caching/tests/test_memory.py +++ b/nipype/caching/tests/test_memory.py @@ -6,6 +6,7 @@ from ...pipeline.engine.tests.test_engine import EngineTestInterface from ... import config + config.set_default_config() nb_runs = 0 @@ -19,10 +20,10 @@ def _run_interface(self, runtime): def test_caching(tmpdir): - old_rerun = config.get('execution', 'stop_on_first_rerun') + old_rerun = config.get("execution", "stop_on_first_rerun") try: # Prevent rerun to check that evaluation is computed only once - config.set('execution', 'stop_on_first_rerun', 'true') + config.set("execution", "stop_on_first_rerun", "true") mem = Memory(tmpdir.strpath) first_nb_run = nb_runs results = mem.cache(SideEffectInterface)(input1=2, input2=1) @@ -37,4 +38,4 @@ def test_caching(tmpdir): assert nb_runs == first_nb_run + 2 assert results.outputs.output1 == [1, 1] finally: - config.set('execution', 'stop_on_first_rerun', old_rerun) + config.set("execution", "stop_on_first_rerun", old_rerun) diff --git a/nipype/conftest.py b/nipype/conftest.py index 9a9175ce28..b099fd0078 100644 --- a/nipype/conftest.py +++ b/nipype/conftest.py @@ -6,16 +6,17 @@ import py.path as pp NIPYPE_DATADIR = os.path.realpath( - os.path.join(os.path.dirname(__file__), 'testing/data')) + os.path.join(os.path.dirname(__file__), "testing/data") +) temp_folder = mkdtemp() -data_dir = os.path.join(temp_folder, 'data') +data_dir = os.path.join(temp_folder, "data") shutil.copytree(NIPYPE_DATADIR, data_dir) @pytest.fixture(autouse=True) def add_np(doctest_namespace): - doctest_namespace['np'] = numpy - doctest_namespace['os'] = os + doctest_namespace["np"] = numpy + doctest_namespace["os"] = os doctest_namespace["datadir"] = data_dir diff --git a/nipype/external/cloghandler.py b/nipype/external/cloghandler.py index 05e28968dd..c5ee9d7a6f 100644 --- a/nipype/external/cloghandler.py +++ b/nipype/external/cloghandler.py @@ -74,14 +74,16 @@ class ConcurrentRotatingFileHandler(BaseRotatingHandler): exceed the given size. """ - def __init__(self, - filename, - mode='a', - maxBytes=0, - backupCount=0, - encoding=None, - debug=True, - supress_abs_warn=False): + def __init__( + self, + filename, + mode="a", + maxBytes=0, + backupCount=0, + encoding=None, + debug=True, + supress_abs_warn=False, + ): """ Open the specified file and use it as the stream for logging. @@ -139,15 +141,16 @@ def __init__(self, # if the given filename contains no path, we make an absolute path if not os.path.isabs(filename): - if FORCE_ABSOLUTE_PATH or \ - not os.path.split(filename)[0]: + if FORCE_ABSOLUTE_PATH or not os.path.split(filename)[0]: filename = os.path.abspath(filename) elif not supress_abs_warn: from warnings import warn + warn( "The given 'filename' should be an absolute path. If your " "application calls os.chdir(), your logs may get messed up. " - "Use 'supress_abs_warn=True' to hide this message.") + "Use 'supress_abs_warn=True' to hide this message." + ) try: BaseRotatingHandler.__init__(self, filename, mode, encoding) except TypeError: # Due to a different logging release without encoding support (Python 2.4.1 and earlier?) @@ -158,7 +161,7 @@ def __init__(self, self.maxBytes = maxBytes self.backupCount = backupCount # Prevent multiple extensions on the lock file (Only handles the normal "*.log" case.) - self.lock_file = '%s.lock' % filename + self.lock_file = "%s.lock" % filename self.stream_lock = SoftFileLock(self.lock_file) # For debug mode, swap out the "_degrade()" method with a more a verbose one. @@ -231,13 +234,17 @@ def _degrade_debug(self, degrade, msg, *args): """ if degrade: if not self._rotateFailed: - sys.stderr.write("Degrade mode - ENTERING - (pid=%d) %s\n" % - (os.getpid(), msg % args)) + sys.stderr.write( + "Degrade mode - ENTERING - (pid=%d) %s\n" + % (os.getpid(), msg % args) + ) self._rotateFailed = True else: if self._rotateFailed: - sys.stderr.write("Degrade mode - EXITING - (pid=%d) %s\n" % - (os.getpid(), msg % args)) + sys.stderr.write( + "Degrade mode - EXITING - (pid=%d) %s\n" + % (os.getpid(), msg % args) + ) self._rotateFailed = False def doRollover(self): @@ -255,15 +262,15 @@ def doRollover(self): # Attempt to rename logfile to tempname: There is a slight race-condition here, but it seems unavoidable tmpname = None while not tmpname or os.path.exists(tmpname): - tmpname = "%s.rotate.%08d" % (self.baseFilename, - randint(0, 99999999)) + tmpname = "%s.rotate.%08d" % (self.baseFilename, randint(0, 99999999)) try: # Do a rename test to determine if we can successfully rename the log file os.rename(self.baseFilename, tmpname) except (IOError, OSError): exc_value = sys.exc_info()[1] - self._degrade(True, "rename failed. File in use? " - "exception=%s", exc_value) + self._degrade( + True, "rename failed. File in use? " "exception=%s", exc_value + ) return # Q: Is there some way to protect this code from a KeboardInterupt? @@ -311,19 +318,18 @@ def shouldRollover(self, record): def _shouldRollover(self): if self.maxBytes > 0: # are we rolling over? try: - self.stream.seek( - 0, 2) # due to non-posix-compliant Windows feature + self.stream.seek(0, 2) # due to non-posix-compliant Windows feature except IOError: return True if self.stream.tell() >= self.maxBytes: return True else: - self._degrade(False, - "Rotation done or not needed at this time") + self._degrade(False, "Rotation done or not needed at this time") return False # Publish this class to the "logging.handlers" module so that it can be use # from a logging config file via logging.config.fileConfig(). import logging.handlers + logging.handlers.ConcurrentRotatingFileHandler = ConcurrentRotatingFileHandler diff --git a/nipype/external/due.py b/nipype/external/due.py index c360435bae..fc436d5d45 100644 --- a/nipype/external/due.py +++ b/nipype/external/due.py @@ -24,7 +24,7 @@ License: BSD-2 """ -__version__ = '0.0.5' +__version__ = "0.0.5" class InactiveDueCreditCollector(object): @@ -45,7 +45,7 @@ def nondecorating_decorator(func): cite = load = add = _donothing def __repr__(self): - return '{}()'.format(self.__class__.__name__) + return "{}()".format(self.__class__.__name__) def _donothing_func(*args, **kwargs): @@ -55,9 +55,9 @@ def _donothing_func(*args, **kwargs): try: from duecredit import due, BibTeX, Doi, Url - if 'due' in locals() and not hasattr(due, 'cite'): - raise RuntimeError( - "Imported due lacks .cite. DueCredit is now disabled") + + if "due" in locals() and not hasattr(due, "cite"): + raise RuntimeError("Imported due lacks .cite. DueCredit is now disabled") except ImportError: # Initiate due stub due = InactiveDueCreditCollector() diff --git a/nipype/external/fsl_imglob.py b/nipype/external/fsl_imglob.py index ea4cfe245a..3707e4750d 100755 --- a/nipype/external/fsl_imglob.py +++ b/nipype/external/fsl_imglob.py @@ -79,9 +79,9 @@ def usage(): # basename and extension pair ) def isImage(input, allExtensions): for extension in allExtensions: - if input[-len(extension):] == extension: - return True, input[:-len(extension)], extension - return False, input, '' + if input[-len(extension) :] == extension: + return True, input[: -len(extension)], extension + return False, input, "" def removeImageExtension(input, allExtensions): @@ -95,13 +95,14 @@ def main(): if sys.version_info < (2, 4): import sets from sets import Set + setAvailable = False else: setAvailable = True deleteExtensions = True - primaryExtensions = ['.nii.gz', '.nii', '.hdr.gz', '.hdr'] - secondaryExtensions = ['.img.gz', '.img'] + primaryExtensions = [".nii.gz", ".nii", ".hdr.gz", ".hdr"] + secondaryExtensions = [".img.gz", ".img"] allExtensions = primaryExtensions + secondaryExtensions validExtensions = primaryExtensions startingArg = 1 @@ -125,13 +126,14 @@ def main(): for currentExtension in validExtensions: filelist.extend( glob.glob( - removeImageExtension(sys.argv[arg], allExtensions) + - currentExtension)) + removeImageExtension(sys.argv[arg], allExtensions) + + currentExtension + ) + ) if deleteExtensions: for file in range(0, len(filelist)): - filelist[file] = removeImageExtension(filelist[file], - allExtensions) + filelist[file] = removeImageExtension(filelist[file], allExtensions) if setAvailable: filelist = list(set(filelist)) else: @@ -139,9 +141,9 @@ def main(): filelist.sort() for file in range(0, len(filelist)): - print(filelist[file], end=' ') + print(filelist[file], end=" ") if file < len(filelist) - 1: - print(" ", end=' ') + print(" ", end=" ") if __name__ == "__main__": diff --git a/nipype/info.py b/nipype/info.py index 9858e045dc..17d9a833a2 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -5,7 +5,7 @@ # nipype version information # Remove -dev for release -__version__ = '1.4.0-dev' +__version__ = "1.4.0-dev" def get_nipype_gitversion(): @@ -18,46 +18,50 @@ def get_nipype_gitversion(): """ import os import subprocess + try: import nipype + gitpath = os.path.realpath( - os.path.join(os.path.dirname(nipype.__file__), os.path.pardir)) + os.path.join(os.path.dirname(nipype.__file__), os.path.pardir) + ) except: gitpath = os.getcwd() - gitpathgit = os.path.join(gitpath, '.git') + gitpathgit = os.path.join(gitpath, ".git") if not os.path.exists(gitpathgit): return None ver = None try: o, _ = subprocess.Popen( - 'git describe', shell=True, cwd=gitpath, - stdout=subprocess.PIPE).communicate() + "git describe", shell=True, cwd=gitpath, stdout=subprocess.PIPE + ).communicate() except Exception: pass else: - ver = o.decode().strip().split('-')[-1] + ver = o.decode().strip().split("-")[-1] return ver -if __version__.endswith('-dev'): +if __version__.endswith("-dev"): gitversion = get_nipype_gitversion() if gitversion: - __version__ = '{}+{}'.format(__version__, gitversion) + __version__ = "{}+{}".format(__version__, gitversion) CLASSIFIERS = [ - 'Development Status :: 5 - Production/Stable', 'Environment :: Console', - 'Intended Audience :: Science/Research', - 'License :: OSI Approved :: Apache Software License', - 'Operating System :: MacOS :: MacOS X', - 'Operating System :: POSIX :: Linux', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Topic :: Scientific/Engineering' + "Development Status :: 5 - Production/Stable", + "Environment :: Console", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: Apache Software License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Topic :: Scientific/Engineering", ] PYTHON_REQUIRES = ">= 3.5" -description = 'Neuroimaging in Python: Pipelines and Interfaces' +description = "Neuroimaging in Python: Pipelines and Interfaces" # Note: this long_description is actually a copy/paste from the top-level # README.txt, so that it shows up nicely on PyPI. So please remember to edit @@ -94,86 +98,88 @@ def get_nipype_gitversion(): """ # versions -NIBABEL_MIN_VERSION = '2.1.0' -NETWORKX_MIN_VERSION = '1.9' -NETWORKX_MAX_VERSION_27 = '2.2' -NUMPY_MIN_VERSION = '1.12' +NIBABEL_MIN_VERSION = "2.1.0" +NETWORKX_MIN_VERSION = "1.9" +NETWORKX_MAX_VERSION_27 = "2.2" +NUMPY_MIN_VERSION = "1.12" # Numpy bug in python 3.7: # https://www.opensourceanswers.com/blog/you-shouldnt-use-python-37-for-data-science-right-now.html -NUMPY_MIN_VERSION_37 = '1.15.3' -SCIPY_MIN_VERSION = '0.14' +NUMPY_MIN_VERSION_37 = "1.15.3" +SCIPY_MIN_VERSION = "0.14" # Scipy drops 2.7 and 3.4 support in 1.3 -SCIPY_MAX_VERSION_34 = '1.3.0' -TRAITS_MIN_VERSION = '4.6' -DATEUTIL_MIN_VERSION = '2.2' -FUTURE_MIN_VERSION = '0.16.0' -SIMPLEJSON_MIN_VERSION = '3.8.0' -PROV_VERSION = '1.5.2' -CLICK_MIN_VERSION = '6.6.0' -PYDOT_MIN_VERSION = '1.2.3' - -NAME = 'nipype' -MAINTAINER = 'nipype developers' -MAINTAINER_EMAIL = 'neuroimaging@python.org' +SCIPY_MAX_VERSION_34 = "1.3.0" +TRAITS_MIN_VERSION = "4.6" +DATEUTIL_MIN_VERSION = "2.2" +FUTURE_MIN_VERSION = "0.16.0" +SIMPLEJSON_MIN_VERSION = "3.8.0" +PROV_VERSION = "1.5.2" +CLICK_MIN_VERSION = "6.6.0" +PYDOT_MIN_VERSION = "1.2.3" + +NAME = "nipype" +MAINTAINER = "nipype developers" +MAINTAINER_EMAIL = "neuroimaging@python.org" DESCRIPTION = description LONG_DESCRIPTION = long_description -URL = 'http://nipy.org/nipype' -DOWNLOAD_URL = 'http://github.com/nipy/nipype/archives/master' -LICENSE = 'Apache License, 2.0' -AUTHOR = 'nipype developers' -AUTHOR_EMAIL = 'neuroimaging@python.org' -PLATFORMS = 'OS Independent' -MAJOR = __version__.split('.')[0] -MINOR = __version__.split('.')[1] -MICRO = __version__.replace('-', '.').split('.')[2] -ISRELEASE = (len(__version__.replace('-', '.').split('.')) == 3 - or 'post' in __version__.replace('-', '.').split('.')[-1]) +URL = "http://nipy.org/nipype" +DOWNLOAD_URL = "http://github.com/nipy/nipype/archives/master" +LICENSE = "Apache License, 2.0" +AUTHOR = "nipype developers" +AUTHOR_EMAIL = "neuroimaging@python.org" +PLATFORMS = "OS Independent" +MAJOR = __version__.split(".")[0] +MINOR = __version__.split(".")[1] +MICRO = __version__.replace("-", ".").split(".")[2] +ISRELEASE = ( + len(__version__.replace("-", ".").split(".")) == 3 + or "post" in __version__.replace("-", ".").split(".")[-1] +) VERSION = __version__ -PROVIDES = ['nipype'] +PROVIDES = ["nipype"] REQUIRES = [ - 'click>=%s' % CLICK_MIN_VERSION, - 'funcsigs', - 'future>=%s' % FUTURE_MIN_VERSION, - 'networkx>=%s' % NETWORKX_MIN_VERSION, - 'nibabel>=%s' % NIBABEL_MIN_VERSION, + "click>=%s" % CLICK_MIN_VERSION, + "funcsigs", + "future>=%s" % FUTURE_MIN_VERSION, + "networkx>=%s" % NETWORKX_MIN_VERSION, + "nibabel>=%s" % NIBABEL_MIN_VERSION, 'numpy>=%s ; python_version < "3.7"' % NUMPY_MIN_VERSION, 'numpy>=%s ; python_version >= "3.7"' % NUMPY_MIN_VERSION_37, - 'packaging', - 'prov>=%s' % PROV_VERSION, - 'pydot>=%s' % PYDOT_MIN_VERSION, - 'pydotplus', - 'python-dateutil>=%s' % DATEUTIL_MIN_VERSION, - 'scipy>=%s' % SCIPY_MIN_VERSION, - 'simplejson>=%s' % SIMPLEJSON_MIN_VERSION, - 'traits>=%s,!=5.0' % TRAITS_MIN_VERSION, - 'filelock>=3.0.0', - 'etelemetry', + "packaging", + "prov>=%s" % PROV_VERSION, + "pydot>=%s" % PYDOT_MIN_VERSION, + "pydotplus", + "python-dateutil>=%s" % DATEUTIL_MIN_VERSION, + "scipy>=%s" % SCIPY_MIN_VERSION, + "simplejson>=%s" % SIMPLEJSON_MIN_VERSION, + "traits>=%s,!=5.0" % TRAITS_MIN_VERSION, + "filelock>=3.0.0", + "etelemetry", ] # neurdflib has to come after prov # https://github.com/nipy/nipype/pull/2961#issuecomment-512035484 -REQUIRES += ['neurdflib'] +REQUIRES += ["neurdflib"] TESTS_REQUIRES = [ - 'codecov', - 'coverage<5', - 'mock', - 'pytest', - 'pytest-cov', - 'pytest-env', + "codecov", + "coverage<5", + "mock", + "pytest", + "pytest-cov", + "pytest-env", ] EXTRA_REQUIRES = { - 'data': ['datalad'], - 'doc': ['Sphinx>=1.4', 'numpydoc', 'matplotlib', 'pydotplus', 'pydot>=1.2.3'], - 'duecredit': ['duecredit'], - 'nipy': ['nitime', 'nilearn<0.5.0', 'dipy', 'nipy', 'matplotlib'], - 'profiler': ['psutil>=5.0'], - 'pybids': ['pybids>=0.7.0'], - 'specs': ['yapf>=0.27'], - 'ssh': ['paramiko'], - 'tests': TESTS_REQUIRES, - 'xvfbwrapper': ['xvfbwrapper'], + "data": ["datalad"], + "doc": ["Sphinx>=1.4", "numpydoc", "matplotlib", "pydotplus", "pydot>=1.2.3"], + "duecredit": ["duecredit"], + "nipy": ["nitime", "nilearn<0.5.0", "dipy", "nipy", "matplotlib"], + "profiler": ["psutil>=5.0"], + "pybids": ["pybids>=0.7.0"], + "specs": ["black"], + "ssh": ["paramiko"], + "tests": TESTS_REQUIRES, + "xvfbwrapper": ["xvfbwrapper"], # 'mesh': ['mayavi'] # Enable when it works } @@ -183,9 +189,10 @@ def _list_union(iterable): # Enable a handle to install all extra dependencies at once -EXTRA_REQUIRES['all'] = _list_union(EXTRA_REQUIRES.values()) +EXTRA_REQUIRES["all"] = _list_union(EXTRA_REQUIRES.values()) # dev = doc + tests + specs -EXTRA_REQUIRES['dev'] = _list_union(val for key, val in EXTRA_REQUIRES.items() - if key in ('doc', 'tests', 'specs')) +EXTRA_REQUIRES["dev"] = _list_union( + val for key, val in EXTRA_REQUIRES.items() if key in ("doc", "tests", "specs") +) -STATUS = 'stable' +STATUS = "stable" diff --git a/nipype/interfaces/__init__.py b/nipype/interfaces/__init__.py index 7ad7166476..fe1bf9c9e5 100644 --- a/nipype/interfaces/__init__.py +++ b/nipype/interfaces/__init__.py @@ -6,7 +6,7 @@ Requires Packages to be installed """ -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" from .io import DataGrabber, DataSink, SelectFiles, BIDSDataGrabber from .utility import IdentityInterface, Rename, Function, Select, Merge diff --git a/nipype/interfaces/afni/__init__.py b/nipype/interfaces/afni/__init__.py index 015f17df73..f795e347a3 100644 --- a/nipype/interfaces/afni/__init__.py +++ b/nipype/interfaces/afni/__init__.py @@ -9,16 +9,82 @@ from .base import Info from .preprocess import ( - AlignEpiAnatPy, Allineate, Automask, AutoTcorrelate, AutoTLRC, Bandpass, - BlurInMask, BlurToFWHM, ClipLevel, DegreeCentrality, Despike, Detrend, ECM, - Fim, Fourier, Hist, LFCD, Maskave, Means, OutlierCount, QualityIndex, - ROIStats, Retroicor, Seg, SkullStrip, TCorr1D, TCorrMap, TCorrelate, TNorm, - TProject, TShift, TSmooth, Volreg, Warp, QwarpPlusMinus, Qwarp) -from .svm import (SVMTest, SVMTrain) + AlignEpiAnatPy, + Allineate, + Automask, + AutoTcorrelate, + AutoTLRC, + Bandpass, + BlurInMask, + BlurToFWHM, + ClipLevel, + DegreeCentrality, + Despike, + Detrend, + ECM, + Fim, + Fourier, + Hist, + LFCD, + Maskave, + Means, + OutlierCount, + QualityIndex, + ROIStats, + Retroicor, + Seg, + SkullStrip, + TCorr1D, + TCorrMap, + TCorrelate, + TNorm, + TProject, + TShift, + TSmooth, + Volreg, + Warp, + QwarpPlusMinus, + Qwarp, +) +from .svm import SVMTest, SVMTrain from .utils import ( - ABoverlap, AFNItoNIFTI, Autobox, Axialize, BrickStat, Bucket, Calc, Cat, - CatMatvec, CenterMass, ConvertDset, Copy, Dot, Edge3, Eval, FWHMx, - LocalBistat, Localstat, MaskTool, Merge, Notes, NwarpApply, NwarpAdjust, - NwarpCat, OneDToolPy, Refit, ReHo, Resample, TCat, TCatSubBrick, TStat, - To3D, Unifize, Undump, ZCutUp, GCOR, Zcat, Zeropad) -from .model import (Deconvolve, Remlfit, Synthesize) + ABoverlap, + AFNItoNIFTI, + Autobox, + Axialize, + BrickStat, + Bucket, + Calc, + Cat, + CatMatvec, + CenterMass, + ConvertDset, + Copy, + Dot, + Edge3, + Eval, + FWHMx, + LocalBistat, + Localstat, + MaskTool, + Merge, + Notes, + NwarpApply, + NwarpAdjust, + NwarpCat, + OneDToolPy, + Refit, + ReHo, + Resample, + TCat, + TCatSubBrick, + TStat, + To3D, + Unifize, + Undump, + ZCutUp, + GCOR, + Zcat, + Zeropad, +) +from .model import Deconvolve, Remlfit, Synthesize diff --git a/nipype/interfaces/afni/base.py b/nipype/interfaces/afni/base.py index f83b40c4be..c1b181b85d 100644 --- a/nipype/interfaces/afni/base.py +++ b/nipype/interfaces/afni/base.py @@ -8,33 +8,40 @@ from ... import logging, LooseVersion from ...utils.filemanip import split_filename, fname_presuffix -from ..base import (CommandLine, traits, CommandLineInputSpec, isdefined, File, - TraitedSpec, PackageInfo) +from ..base import ( + CommandLine, + traits, + CommandLineInputSpec, + isdefined, + File, + TraitedSpec, + PackageInfo, +) from ...external.due import BibTeX # Use nipype's logging system -IFLOGGER = logging.getLogger('nipype.interface') +IFLOGGER = logging.getLogger("nipype.interface") class Info(PackageInfo): """Handle afni output type and version information.""" - __outputtype = 'AFNI' - ftypes = {'NIFTI': '.nii', 'AFNI': '', 'NIFTI_GZ': '.nii.gz'} - version_cmd = 'afni --version' + __outputtype = "AFNI" + ftypes = {"NIFTI": ".nii", "AFNI": "", "NIFTI_GZ": ".nii.gz"} + version_cmd = "afni --version" @staticmethod def parse_version(raw_info): """Check and parse AFNI's version.""" - version_stamp = raw_info.split('\n')[0].split('Version ')[1] - if version_stamp.startswith('AFNI'): - version_stamp = version_stamp.split('AFNI_')[1] - elif version_stamp.startswith('Debian'): - version_stamp = version_stamp.split('Debian-')[1].split('~')[0] + version_stamp = raw_info.split("\n")[0].split("Version ")[1] + if version_stamp.startswith("AFNI"): + version_stamp = version_stamp.split("AFNI_")[1] + elif version_stamp.startswith("Debian"): + version_stamp = version_stamp.split("Debian-")[1].split("~")[0] else: return None - version = LooseVersion(version_stamp.replace('_', '.')).version[:3] + version = LooseVersion(version_stamp.replace("_", ".")).version[:3] if version[0] < 1000: version[0] = version[0] + 2000 return tuple(version) @@ -58,7 +65,7 @@ def output_type_to_ext(cls, outputtype): try: return cls.ftypes[outputtype] except KeyError as e: - msg = 'Invalid AFNIOUTPUTTYPE: ', outputtype + msg = "Invalid AFNIOUTPUTTYPE: ", outputtype raise KeyError(msg) from e @classmethod @@ -75,7 +82,7 @@ def outputtype(cls): None """ - return 'AFNI' + return "AFNI" @staticmethod def standard_image(img_name): @@ -86,10 +93,11 @@ def standard_image(img_name): """ clout = CommandLine( - 'which afni', + "which afni", ignore_exception=True, resource_monitor=False, - terminal_output='allatonce').run() + terminal_output="allatonce", + ).run() if clout.runtime.returncode is not 0: return None @@ -106,25 +114,28 @@ class AFNICommandBase(CommandLine): """ def _run_interface(self, runtime): - if platform == 'darwin': - runtime.environ['DYLD_FALLBACK_LIBRARY_PATH'] = '/usr/local/afni/' + if platform == "darwin": + runtime.environ["DYLD_FALLBACK_LIBRARY_PATH"] = "/usr/local/afni/" return super(AFNICommandBase, self)._run_interface(runtime) class AFNICommandInputSpec(CommandLineInputSpec): num_threads = traits.Int( - 1, usedefault=True, nohash=True, desc='set number of threads') + 1, usedefault=True, nohash=True, desc="set number of threads" + ) outputtype = traits.Enum( - 'AFNI', list(Info.ftypes.keys()), desc='AFNI output filetype') + "AFNI", list(Info.ftypes.keys()), desc="AFNI output filetype" + ) out_file = File( name_template="%s_afni", - desc='output image file name', - argstr='-prefix %s', - name_source=["in_file"]) + desc="output image file name", + argstr="-prefix %s", + name_source=["in_file"], + ) class AFNICommandOutputSpec(TraitedSpec): - out_file = File(desc='output file', exists=True) + out_file = File(desc="output file", exists=True) class AFNICommand(AFNICommandBase): @@ -133,34 +144,39 @@ class AFNICommand(AFNICommandBase): input_spec = AFNICommandInputSpec _outputtype = None - references_ = [{ - 'entry': - BibTeX('@article{Cox1996,' - 'author={R.W. Cox},' - 'title={AFNI: software for analysis and ' - 'visualization of functional magnetic ' - 'resonance neuroimages},' - 'journal={Computers and Biomedical research},' - 'volume={29},' - 'number={3},' - 'pages={162-173},' - 'year={1996},' - '}'), - 'tags': ['implementation'], - }, { - 'entry': - BibTeX('@article{CoxHyde1997,' - 'author={R.W. Cox and J.S. Hyde},' - 'title={Software tools for analysis and ' - 'visualization of fMRI data},' - 'journal={NMR in Biomedicine},' - 'volume={10},' - 'number={45},' - 'pages={171-178},' - 'year={1997},' - '}'), - 'tags': ['implementation'], - }] + references_ = [ + { + "entry": BibTeX( + "@article{Cox1996," + "author={R.W. Cox}," + "title={AFNI: software for analysis and " + "visualization of functional magnetic " + "resonance neuroimages}," + "journal={Computers and Biomedical research}," + "volume={29}," + "number={3}," + "pages={162-173}," + "year={1996}," + "}" + ), + "tags": ["implementation"], + }, + { + "entry": BibTeX( + "@article{CoxHyde1997," + "author={R.W. Cox and J.S. Hyde}," + "title={Software tools for analysis and " + "visualization of fMRI data}," + "journal={NMR in Biomedicine}," + "volume={10}," + "number={45}," + "pages={171-178}," + "year={1997}," + "}" + ), + "tags": ["implementation"], + }, + ] @property def num_threads(self): @@ -184,15 +200,15 @@ def set_default_output_type(cls, outputtype): if outputtype in Info.ftypes: cls._outputtype = outputtype else: - raise AttributeError('Invalid AFNI outputtype: %s' % outputtype) + raise AttributeError("Invalid AFNI outputtype: %s" % outputtype) def __init__(self, **inputs): """Instantiate an AFNI command tool wrapper.""" super(AFNICommand, self).__init__(**inputs) - self.inputs.on_trait_change(self._output_update, 'outputtype') + self.inputs.on_trait_change(self._output_update, "outputtype") - if hasattr(self.inputs, 'num_threads'): - self.inputs.on_trait_change(self._nthreads_update, 'num_threads') + if hasattr(self.inputs, "num_threads"): + self.inputs.on_trait_change(self._nthreads_update, "num_threads") if self._outputtype is None: self._outputtype = Info.outputtype() @@ -204,7 +220,7 @@ def __init__(self, **inputs): def _nthreads_update(self): """Update environment with new number of threads.""" - self.inputs.environ['OMP_NUM_THREADS'] = '%d' % self.inputs.num_threads + self.inputs.environ["OMP_NUM_THREADS"] = "%d" % self.inputs.num_threads def _output_update(self): """ @@ -219,7 +235,8 @@ def _output_update(self): def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) return os.path.join( - path, base + Info.output_type_to_ext(self.inputs.outputtype)) + path, base + Info.output_type_to_ext(self.inputs.outputtype) + ) def _list_outputs(self): outputs = super(AFNICommand, self)._list_outputs() @@ -233,12 +250,7 @@ def _list_outputs(self): outputs[name] = outputs[name] + "+orig.BRIK" return outputs - def _gen_fname(self, - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None): + def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True, ext=None): """ Generate a filename based on the given parameters. @@ -265,8 +277,8 @@ def _gen_fname(self, """ if not basename: - msg = 'Unable to generate filename for command %s. ' % self.cmd - msg += 'basename is not set!' + msg = "Unable to generate filename for command %s. " % self.cmd + msg += "basename is not set!" raise ValueError(msg) if cwd is None: @@ -274,12 +286,11 @@ def _gen_fname(self, if ext is None: ext = Info.output_type_to_ext(self.inputs.outputtype) if change_ext: - suffix = ''.join((suffix, ext)) if suffix else ext + suffix = "".join((suffix, ext)) if suffix else ext if suffix is None: - suffix = '' - fname = fname_presuffix( - basename, suffix=suffix, use_ext=False, newpath=cwd) + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) return fname @@ -292,9 +303,11 @@ def no_afni(): class AFNIPythonCommandInputSpec(CommandLineInputSpec): outputtype = traits.Enum( - 'AFNI', list(Info.ftypes.keys()), desc='AFNI output filetype') + "AFNI", list(Info.ftypes.keys()), desc="AFNI output filetype" + ) py27_path = traits.Either( - 'python2', File(exists=True), usedefault=True, default='python2') + "python2", File(exists=True), usedefault=True, default="python2" + ) class AFNIPythonCommand(AFNICommand): diff --git a/nipype/interfaces/afni/model.py b/nipype/interfaces/afni/model.py index efc4bf7b59..2e6d2fc15a 100644 --- a/nipype/interfaces/afni/model.py +++ b/nipype/interfaces/afni/model.py @@ -12,220 +12,263 @@ import os -from ..base import (CommandLineInputSpec, CommandLine, Directory, TraitedSpec, - traits, isdefined, File, InputMultiPath, Undefined, Str) +from ..base import ( + CommandLineInputSpec, + CommandLine, + Directory, + TraitedSpec, + traits, + isdefined, + File, + InputMultiPath, + Undefined, + Str, +) from ...external.due import BibTeX -from .base import (AFNICommandBase, AFNICommand, AFNICommandInputSpec, - AFNICommandOutputSpec, Info) +from .base import ( + AFNICommandBase, + AFNICommand, + AFNICommandInputSpec, + AFNICommandOutputSpec, + Info, +) class DeconvolveInputSpec(AFNICommandInputSpec): in_files = InputMultiPath( File(exists=True), - desc='filenames of 3D+time input datasets. More than one filename can ' - 'be given and the datasets will be auto-catenated in time. ' - 'You can input a 1D time series file here, but the time axis ' - 'should run along the ROW direction, not the COLUMN direction as ' - 'in the \'input1D\' option.', - argstr='-input %s', + desc="filenames of 3D+time input datasets. More than one filename can " + "be given and the datasets will be auto-catenated in time. " + "You can input a 1D time series file here, but the time axis " + "should run along the ROW direction, not the COLUMN direction as " + "in the 'input1D' option.", + argstr="-input %s", copyfile=False, sep=" ", - position=1) + position=1, + ) sat = traits.Bool( - desc='check the dataset time series for initial saturation transients,' - ' which should normally have been excised before data analysis.', - argstr='-sat', - xor=['trans']) + desc="check the dataset time series for initial saturation transients," + " which should normally have been excised before data analysis.", + argstr="-sat", + xor=["trans"], + ) trans = traits.Bool( - desc='check the dataset time series for initial saturation transients,' - ' which should normally have been excised before data analysis.', - argstr='-trans', - xor=['sat']) + desc="check the dataset time series for initial saturation transients," + " which should normally have been excised before data analysis.", + argstr="-trans", + xor=["sat"], + ) noblock = traits.Bool( - desc='normally, if you input multiple datasets with \'input\', then ' - 'the separate datasets are taken to be separate image runs that ' - 'get separate baseline models. Use this options if you want to ' - 'have the program consider these to be all one big run.' - '* If any of the input dataset has only 1 sub-brick, then this ' - 'option is automatically invoked!' - '* If the auto-catenation feature isn\'t used, then this option ' - 'has no effect, no how, no way.', - argstr='-noblock') + desc="normally, if you input multiple datasets with 'input', then " + "the separate datasets are taken to be separate image runs that " + "get separate baseline models. Use this options if you want to " + "have the program consider these to be all one big run." + "* If any of the input dataset has only 1 sub-brick, then this " + "option is automatically invoked!" + "* If the auto-catenation feature isn't used, then this option " + "has no effect, no how, no way.", + argstr="-noblock", + ) force_TR = traits.Float( - desc='use this value instead of the TR in the \'input\' ' - 'dataset. (It\'s better to fix the input using Refit.)', - argstr='-force_TR %f', - position=0) + desc="use this value instead of the TR in the 'input' " + "dataset. (It's better to fix the input using Refit.)", + argstr="-force_TR %f", + position=0, + ) input1D = File( - desc='filename of single (fMRI) .1D time series where time runs down ' - 'the column.', - argstr='-input1D %s', - exists=True) + desc="filename of single (fMRI) .1D time series where time runs down " + "the column.", + argstr="-input1D %s", + exists=True, + ) TR_1D = traits.Float( - desc='TR to use with \'input1D\'. This option has no effect if you do ' - 'not also use \'input1D\'.', - argstr='-TR_1D %f') + desc="TR to use with 'input1D'. This option has no effect if you do " + "not also use 'input1D'.", + argstr="-TR_1D %f", + ) legendre = traits.Bool( - desc='use Legendre polynomials for null hypothesis (baseline model)', - argstr='-legendre') + desc="use Legendre polynomials for null hypothesis (baseline model)", + argstr="-legendre", + ) nolegendre = traits.Bool( - desc='use power polynomials for null hypotheses. Don\'t do this ' - 'unless you are crazy!', - argstr='-nolegendre') + desc="use power polynomials for null hypotheses. Don't do this " + "unless you are crazy!", + argstr="-nolegendre", + ) nodmbase = traits.Bool( - desc='don\'t de-mean baseline time series', argstr='-nodmbase') + desc="don't de-mean baseline time series", argstr="-nodmbase" + ) dmbase = traits.Bool( - desc='de-mean baseline time series (default if \'polort\' >= 0)', - argstr='-dmbase') + desc="de-mean baseline time series (default if 'polort' >= 0)", argstr="-dmbase" + ) svd = traits.Bool( - desc='use SVD instead of Gaussian elimination (default)', - argstr='-svd') - nosvd = traits.Bool( - desc='use Gaussian elimination instead of SVD', argstr='-nosvd') + desc="use SVD instead of Gaussian elimination (default)", argstr="-svd" + ) + nosvd = traits.Bool(desc="use Gaussian elimination instead of SVD", argstr="-nosvd") rmsmin = traits.Float( - desc='minimum rms error to reject reduced model (default = 0; don\'t ' - 'use this option normally!)', - argstr='-rmsmin %f') + desc="minimum rms error to reject reduced model (default = 0; don't " + "use this option normally!)", + argstr="-rmsmin %f", + ) nocond = traits.Bool( - desc='DON\'T calculate matrix condition number', argstr='-nocond') + desc="DON'T calculate matrix condition number", argstr="-nocond" + ) singvals = traits.Bool( - desc='print out the matrix singular values', argstr='-singvals') + desc="print out the matrix singular values", argstr="-singvals" + ) goforit = traits.Int( - desc='use this to proceed even if the matrix has bad problems (e.g., ' - 'duplicate columns, large condition number, etc.).', - argstr='-GOFORIT %i') + desc="use this to proceed even if the matrix has bad problems (e.g., " + "duplicate columns, large condition number, etc.).", + argstr="-GOFORIT %i", + ) allzero_OK = traits.Bool( - desc='don\'t consider all zero matrix columns to be the type of error ' - 'that \'gotforit\' is needed to ignore.', - argstr='-allzero_OK') + desc="don't consider all zero matrix columns to be the type of error " + "that 'gotforit' is needed to ignore.", + argstr="-allzero_OK", + ) dname = traits.Tuple( - Str, - Str, - desc='set environmental variable to provided value', - argstr='-D%s=%s') + Str, Str, desc="set environmental variable to provided value", argstr="-D%s=%s" + ) mask = File( - desc='filename of 3D mask dataset; only data time series from within ' - 'the mask will be analyzed; results for voxels outside the mask ' - 'will be set to zero.', - argstr='-mask %s', - exists=True) + desc="filename of 3D mask dataset; only data time series from within " + "the mask will be analyzed; results for voxels outside the mask " + "will be set to zero.", + argstr="-mask %s", + exists=True, + ) automask = traits.Bool( - desc='build a mask automatically from input data (will be slow for ' - 'long time series datasets)', - argstr='-automask') + desc="build a mask automatically from input data (will be slow for " + "long time series datasets)", + argstr="-automask", + ) STATmask = File( - desc='build a mask from provided file, and use this mask for the ' - 'purpose of reporting truncation-to float issues AND for ' - 'computing the FDR curves. The actual results ARE not masked ' - 'with this option (only with \'mask\' or \'automask\' options).', - argstr='-STATmask %s', - exists=True) + desc="build a mask from provided file, and use this mask for the " + "purpose of reporting truncation-to float issues AND for " + "computing the FDR curves. The actual results ARE not masked " + "with this option (only with 'mask' or 'automask' options).", + argstr="-STATmask %s", + exists=True, + ) censor = File( - desc='filename of censor .1D time series. This is a file of 1s and ' - '0s, indicating which time points are to be included (1) and ' - 'which are to be excluded (0).', - argstr='-censor %s', - exists=True) + desc="filename of censor .1D time series. This is a file of 1s and " + "0s, indicating which time points are to be included (1) and " + "which are to be excluded (0).", + argstr="-censor %s", + exists=True, + ) polort = traits.Int( - desc='degree of polynomial corresponding to the null hypothesis ' - '[default: 1]', - argstr='-polort %d') + desc="degree of polynomial corresponding to the null hypothesis " + "[default: 1]", + argstr="-polort %d", + ) ortvec = traits.Tuple( - File(desc='filename', exists=True), - Str(desc='label'), - desc='this option lets you input a rectangular array of 1 or more ' - 'baseline vectors from a file. This method is a fast way to ' - 'include a lot of baseline regressors in one step. ', - argstr='-ortvec %s %s') - x1D = File(desc='specify name for saved X matrix', argstr='-x1D %s') + File(desc="filename", exists=True), + Str(desc="label"), + desc="this option lets you input a rectangular array of 1 or more " + "baseline vectors from a file. This method is a fast way to " + "include a lot of baseline regressors in one step. ", + argstr="-ortvec %s %s", + ) + x1D = File(desc="specify name for saved X matrix", argstr="-x1D %s") x1D_stop = traits.Bool( - desc='stop running after writing .xmat.1D file', argstr='-x1D_stop') + desc="stop running after writing .xmat.1D file", argstr="-x1D_stop" + ) cbucket = traits.Str( - desc='Name for dataset in which to save the regression ' - 'coefficients (no statistics). This dataset ' - 'will be used in a -xrestore run [not yet implemented] ' - 'instead of the bucket dataset, if possible.', - argstr='-cbucket %s') - out_file = File(desc='output statistics file', argstr='-bucket %s') + desc="Name for dataset in which to save the regression " + "coefficients (no statistics). This dataset " + "will be used in a -xrestore run [not yet implemented] " + "instead of the bucket dataset, if possible.", + argstr="-cbucket %s", + ) + out_file = File(desc="output statistics file", argstr="-bucket %s") num_threads = traits.Int( - desc='run the program with provided number of sub-processes', - argstr='-jobs %d', - nohash=True) - fout = traits.Bool( - desc='output F-statistic for each stimulus', argstr='-fout') + desc="run the program with provided number of sub-processes", + argstr="-jobs %d", + nohash=True, + ) + fout = traits.Bool(desc="output F-statistic for each stimulus", argstr="-fout") rout = traits.Bool( - desc='output the R^2 statistic for each stimulus', argstr='-rout') - tout = traits.Bool( - desc='output the T-statistic for each stimulus', argstr='-tout') + desc="output the R^2 statistic for each stimulus", argstr="-rout" + ) + tout = traits.Bool(desc="output the T-statistic for each stimulus", argstr="-tout") vout = traits.Bool( - desc='output the sample variance (MSE) for each stimulus', - argstr='-vout') + desc="output the sample variance (MSE) for each stimulus", argstr="-vout" + ) nofdr = traits.Bool( - desc="Don't compute the statistic-vs-FDR curves for the bucket " - "dataset.", - argstr='-noFDR') + desc="Don't compute the statistic-vs-FDR curves for the bucket " "dataset.", + argstr="-noFDR", + ) global_times = traits.Bool( - desc='use global timing for stimulus timing files', - argstr='-global_times', - xor=['local_times']) + desc="use global timing for stimulus timing files", + argstr="-global_times", + xor=["local_times"], + ) local_times = traits.Bool( - desc='use local timing for stimulus timing files', - argstr='-local_times', - xor=['global_times']) + desc="use local timing for stimulus timing files", + argstr="-local_times", + xor=["global_times"], + ) num_stimts = traits.Int( - desc='number of stimulus timing files', - argstr='-num_stimts %d', - position=-6) + desc="number of stimulus timing files", argstr="-num_stimts %d", position=-6 + ) stim_times = traits.List( traits.Tuple( - traits.Int(desc='k-th response model'), - File(desc='stimulus timing file', exists=True), - Str(desc='model')), - desc='generate a response model from a set of stimulus times' - ' given in file.', - argstr='-stim_times %d %s \'%s\'...', - position=-5) + traits.Int(desc="k-th response model"), + File(desc="stimulus timing file", exists=True), + Str(desc="model"), + ), + desc="generate a response model from a set of stimulus times" " given in file.", + argstr="-stim_times %d %s '%s'...", + position=-5, + ) stim_label = traits.List( traits.Tuple( - traits.Int(desc='k-th input stimulus'), - Str(desc='stimulus label')), - desc='label for kth input stimulus (e.g., Label1)', - argstr='-stim_label %d %s...', - requires=['stim_times'], - position=-4) + traits.Int(desc="k-th input stimulus"), Str(desc="stimulus label") + ), + desc="label for kth input stimulus (e.g., Label1)", + argstr="-stim_label %d %s...", + requires=["stim_times"], + position=-4, + ) stim_times_subtract = traits.Float( - desc='this option means to subtract specified seconds from each time ' - 'encountered in any \'stim_times\' option. The purpose of this ' - 'option is to make it simple to adjust timing files for the ' - 'removal of images from the start of each imaging run.', - argstr='-stim_times_subtract %f') + desc="this option means to subtract specified seconds from each time " + "encountered in any 'stim_times' option. The purpose of this " + "option is to make it simple to adjust timing files for the " + "removal of images from the start of each imaging run.", + argstr="-stim_times_subtract %f", + ) num_glt = traits.Int( - desc='number of general linear tests (i.e., contrasts)', - argstr='-num_glt %d', - position=-3) + desc="number of general linear tests (i.e., contrasts)", + argstr="-num_glt %d", + position=-3, + ) gltsym = traits.List( - Str(desc='symbolic general linear test'), - desc='general linear tests (i.e., contrasts) using symbolic ' - 'conventions (e.g., \'+Label1 -Label2\')', - argstr='-gltsym \'SYM: %s\'...', - position=-2) + Str(desc="symbolic general linear test"), + desc="general linear tests (i.e., contrasts) using symbolic " + "conventions (e.g., '+Label1 -Label2')", + argstr="-gltsym 'SYM: %s'...", + position=-2, + ) glt_label = traits.List( traits.Tuple( - traits.Int(desc='k-th general linear test'), - Str(desc='GLT label')), - desc='general linear test (i.e., contrast) labels', - argstr='-glt_label %d %s...', - requires=['gltsym'], - position=-1) + traits.Int(desc="k-th general linear test"), Str(desc="GLT label") + ), + desc="general linear test (i.e., contrast) labels", + argstr="-glt_label %d %s...", + requires=["gltsym"], + position=-1, + ) class DeconvolveOutputSpec(TraitedSpec): - out_file = File(desc='output statistics file', exists=True) + out_file = File(desc="output statistics file", exists=True) reml_script = File( - desc='automatical generated script to run 3dREMLfit', exists=True) - x1D = File(desc='save out X matrix', exists=True) - cbucket = File(desc='output regression coefficients file (if generated)') + desc="automatical generated script to run 3dREMLfit", exists=True + ) + x1D = File(desc="save out X matrix", exists=True) + cbucket = File(desc="output regression coefficients file (if generated)") class Deconvolve(AFNICommand): @@ -252,28 +295,27 @@ class Deconvolve(AFNICommand): >>> res = deconvolve.run() # doctest: +SKIP """ - _cmd = '3dDeconvolve' + _cmd = "3dDeconvolve" input_spec = DeconvolveInputSpec output_spec = DeconvolveOutputSpec def _format_arg(self, name, trait_spec, value): - if name == 'gltsym': + if name == "gltsym": for n, val in enumerate(value): - if val.startswith('SYM: '): - value[n] = val.lstrip('SYM: ') + if val.startswith("SYM: "): + value[n] = val.lstrip("SYM: ") return super(Deconvolve, self)._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): if skip is None: skip = [] - if len(self.inputs.stim_times) and not isdefined( - self.inputs.num_stimts): + if len(self.inputs.stim_times) and not isdefined(self.inputs.num_stimts): self.inputs.num_stimts = len(self.inputs.stim_times) if len(self.inputs.gltsym) and not isdefined(self.inputs.num_glt): self.inputs.num_glt = len(self.inputs.gltsym) if not isdefined(self.inputs.out_file): - self.inputs.out_file = 'Decon.nii' + self.inputs.out_file = "Decon.nii" return super(Deconvolve, self)._parse_inputs(skip) @@ -281,28 +323,26 @@ def _list_outputs(self): outputs = self.output_spec().get() _gen_fname_opts = {} - _gen_fname_opts['basename'] = self.inputs.out_file - _gen_fname_opts['cwd'] = os.getcwd() + _gen_fname_opts["basename"] = self.inputs.out_file + _gen_fname_opts["cwd"] = os.getcwd() if isdefined(self.inputs.x1D): - if not self.inputs.x1D.endswith('.xmat.1D'): - outputs['x1D'] = os.path.abspath(self.inputs.x1D + '.xmat.1D') + if not self.inputs.x1D.endswith(".xmat.1D"): + outputs["x1D"] = os.path.abspath(self.inputs.x1D + ".xmat.1D") else: - outputs['x1D'] = os.path.abspath(self.inputs.x1D) + outputs["x1D"] = os.path.abspath(self.inputs.x1D) else: - outputs['x1D'] = self._gen_fname( - suffix='.xmat.1D', **_gen_fname_opts) + outputs["x1D"] = self._gen_fname(suffix=".xmat.1D", **_gen_fname_opts) if isdefined(self.inputs.cbucket): - outputs['cbucket'] = os.path.abspath(self.inputs.cbucket) + outputs["cbucket"] = os.path.abspath(self.inputs.cbucket) - outputs['reml_script'] = self._gen_fname( - suffix='.REML_cmd', **_gen_fname_opts) + outputs["reml_script"] = self._gen_fname(suffix=".REML_cmd", **_gen_fname_opts) # remove out_file from outputs if x1d_stop set to True if self.inputs.x1D_stop: - del outputs['out_file'], outputs['cbucket'] + del outputs["out_file"], outputs["cbucket"] else: - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs @@ -311,239 +351,261 @@ class RemlfitInputSpec(AFNICommandInputSpec): # mandatory files in_files = InputMultiPath( File(exists=True), - desc='Read time series dataset', + desc="Read time series dataset", argstr='-input "%s"', mandatory=True, copyfile=False, - sep=" ") + sep=" ", + ) matrix = File( - desc='the design matrix file, which should have been output from ' - 'Deconvolve via the \'x1D\' option', - argstr='-matrix %s', - mandatory=True) + desc="the design matrix file, which should have been output from " + "Deconvolve via the 'x1D' option", + argstr="-matrix %s", + mandatory=True, + ) # "Semi-Hidden Alternative Ways to Define the Matrix" polort = traits.Int( - desc='if no \'matrix\' option is given, AND no \'matim\' option, ' - 'create a matrix with Legendre polynomial regressors' - 'up to the specified order. The default value is 0, which' - 'produces a matrix with a single column of all ones', - argstr='-polort %d', - xor=['matrix']) + desc="if no 'matrix' option is given, AND no 'matim' option, " + "create a matrix with Legendre polynomial regressors" + "up to the specified order. The default value is 0, which" + "produces a matrix with a single column of all ones", + argstr="-polort %d", + xor=["matrix"], + ) matim = File( - desc='read a standard file as the matrix. You can use only Col as ' - 'a name in GLTs with these nonstandard matrix input methods, ' - 'since the other names come from the \'matrix\' file. ' - 'These mutually exclusive options are ignored if \'matrix\' ' - 'is used.', - argstr='-matim %s', - xor=['matrix']) + desc="read a standard file as the matrix. You can use only Col as " + "a name in GLTs with these nonstandard matrix input methods, " + "since the other names come from the 'matrix' file. " + "These mutually exclusive options are ignored if 'matrix' " + "is used.", + argstr="-matim %s", + xor=["matrix"], + ) # Other arguments mask = File( - desc='filename of 3D mask dataset; only data time series from within ' - 'the mask will be analyzed; results for voxels outside the mask ' - 'will be set to zero.', - argstr='-mask %s', - exists=True) + desc="filename of 3D mask dataset; only data time series from within " + "the mask will be analyzed; results for voxels outside the mask " + "will be set to zero.", + argstr="-mask %s", + exists=True, + ) automask = traits.Bool( usedefault=True, - argstr='-automask', - desc='build a mask automatically from input data (will be slow for ' - 'long time series datasets)') + argstr="-automask", + desc="build a mask automatically from input data (will be slow for " + "long time series datasets)", + ) STATmask = File( - desc='filename of 3D mask dataset to be used for the purpose ' - 'of reporting truncation-to float issues AND for computing the ' - 'FDR curves. The actual results ARE not masked with this option ' - '(only with \'mask\' or \'automask\' options).', - argstr='-STATmask %s', - exists=True) + desc="filename of 3D mask dataset to be used for the purpose " + "of reporting truncation-to float issues AND for computing the " + "FDR curves. The actual results ARE not masked with this option " + "(only with 'mask' or 'automask' options).", + argstr="-STATmask %s", + exists=True, + ) addbase = InputMultiPath( - File( - exists=True, - desc='file containing columns to add to regression matrix'), - desc='file(s) to add baseline model columns to the matrix with this ' - 'option. Each column in the specified file(s) will be appended ' - 'to the matrix. File(s) must have at least as many rows as the ' - 'matrix does.', + File(exists=True, desc="file containing columns to add to regression matrix"), + desc="file(s) to add baseline model columns to the matrix with this " + "option. Each column in the specified file(s) will be appended " + "to the matrix. File(s) must have at least as many rows as the " + "matrix does.", copyfile=False, sep=" ", - argstr='-addbase %s') + argstr="-addbase %s", + ) slibase = InputMultiPath( - File( - exists=True, - desc='file containing columns to add to regression matrix'), - desc='similar to \'addbase\' in concept, BUT each specified file ' - 'must have an integer multiple of the number of slices ' - 'in the input dataset(s); then, separate regression ' - 'matrices are generated for each slice, with the ' - 'first column of the file appended to the matrix for ' - 'the first slice of the dataset, the second column of the file ' - 'appended to the matrix for the first slice of the dataset, ' - 'and so on. Intended to help model physiological noise in FMRI, ' - 'or other effects you want to regress out that might ' - 'change significantly in the inter-slice time intervals. This ' - 'will slow the program down, and make it use a lot more memory ' - '(to hold all the matrix stuff).', - argstr='-slibase %s') + File(exists=True, desc="file containing columns to add to regression matrix"), + desc="similar to 'addbase' in concept, BUT each specified file " + "must have an integer multiple of the number of slices " + "in the input dataset(s); then, separate regression " + "matrices are generated for each slice, with the " + "first column of the file appended to the matrix for " + "the first slice of the dataset, the second column of the file " + "appended to the matrix for the first slice of the dataset, " + "and so on. Intended to help model physiological noise in FMRI, " + "or other effects you want to regress out that might " + "change significantly in the inter-slice time intervals. This " + "will slow the program down, and make it use a lot more memory " + "(to hold all the matrix stuff).", + argstr="-slibase %s", + ) slibase_sm = InputMultiPath( - File( - exists=True, - desc='file containing columns to add to regression matrix'), - desc='similar to \'slibase\', BUT each file much be in slice major ' - 'order (i.e. all slice0 columns come first, then all slice1 ' - 'columns, etc).', - argstr='-slibase_sm %s') + File(exists=True, desc="file containing columns to add to regression matrix"), + desc="similar to 'slibase', BUT each file much be in slice major " + "order (i.e. all slice0 columns come first, then all slice1 " + "columns, etc).", + argstr="-slibase_sm %s", + ) usetemp = traits.Bool( - desc='write intermediate stuff to disk, to economize on RAM. ' - 'Using this option might be necessary to run with ' - '\'slibase\' and with \'Grid\' values above the default, ' - 'since the program has to store a large number of ' - 'matrices for such a problem: two for every slice and ' - 'for every (a,b) pair in the ARMA parameter grid. Temporary ' - 'files are written to the directory given in environment ' - 'variable TMPDIR, or in /tmp, or in ./ (preference is in that ' - 'order)', - argstr='-usetemp') + desc="write intermediate stuff to disk, to economize on RAM. " + "Using this option might be necessary to run with " + "'slibase' and with 'Grid' values above the default, " + "since the program has to store a large number of " + "matrices for such a problem: two for every slice and " + "for every (a,b) pair in the ARMA parameter grid. Temporary " + "files are written to the directory given in environment " + "variable TMPDIR, or in /tmp, or in ./ (preference is in that " + "order)", + argstr="-usetemp", + ) nodmbase = traits.Bool( - desc='by default, baseline columns added to the matrix via ' - '\'addbase\' or \'slibase\' or \'dsort\' will each have their ' - 'mean removed (as is done in Deconvolve); this option turns this ' - 'centering off', - argstr='-nodmbase', - requires=['addbase', 'dsort']) + desc="by default, baseline columns added to the matrix via " + "'addbase' or 'slibase' or 'dsort' will each have their " + "mean removed (as is done in Deconvolve); this option turns this " + "centering off", + argstr="-nodmbase", + requires=["addbase", "dsort"], + ) dsort = File( - desc='4D dataset to be used as voxelwise baseline regressor', + desc="4D dataset to be used as voxelwise baseline regressor", exists=True, copyfile=False, - argstr='-dsort %s') + argstr="-dsort %s", + ) dsort_nods = traits.Bool( - desc='if \'dsort\' option is used, this command will output ' - 'additional results files excluding the \'dsort\' file', - argstr='-dsort_nods', - requires=['dsort']) - fout = traits.Bool( - desc='output F-statistic for each stimulus', argstr='-fout') + desc="if 'dsort' option is used, this command will output " + "additional results files excluding the 'dsort' file", + argstr="-dsort_nods", + requires=["dsort"], + ) + fout = traits.Bool(desc="output F-statistic for each stimulus", argstr="-fout") rout = traits.Bool( - desc='output the R^2 statistic for each stimulus', argstr='-rout') + desc="output the R^2 statistic for each stimulus", argstr="-rout" + ) tout = traits.Bool( - desc='output the T-statistic for each stimulus; if you use ' - '\'out_file\' and do not give any of \'fout\', \'tout\',' - 'or \'rout\', then the program assumes \'fout\' is activated.', - argstr='-tout') + desc="output the T-statistic for each stimulus; if you use " + "'out_file' and do not give any of 'fout', 'tout'," + "or 'rout', then the program assumes 'fout' is activated.", + argstr="-tout", + ) nofdr = traits.Bool( - desc='do NOT add FDR curve data to bucket datasets; FDR curves can ' - 'take a long time if \'tout\' is used', - argstr='-noFDR') + desc="do NOT add FDR curve data to bucket datasets; FDR curves can " + "take a long time if 'tout' is used", + argstr="-noFDR", + ) nobout = traits.Bool( - desc='do NOT add baseline (null hypothesis) regressor betas ' - 'to the \'rbeta_file\' and/or \'obeta_file\' output datasets.', - argstr='-nobout') + desc="do NOT add baseline (null hypothesis) regressor betas " + "to the 'rbeta_file' and/or 'obeta_file' output datasets.", + argstr="-nobout", + ) gltsym = traits.List( traits.Either( - traits.Tuple(File(exists=True), Str()), traits.Tuple(Str(), - Str())), - desc='read a symbolic GLT from input file and associate it with a ' - 'label. As in Deconvolve, you can also use the \'SYM:\' method ' - 'to provide the definition of the GLT directly as a string ' - '(e.g., with \'SYM: +Label1 -Label2\'). Unlike Deconvolve, you ' - 'MUST specify \'SYM: \' if providing the GLT directly as a ' - 'string instead of from a file', - argstr='-gltsym "%s" %s...') + traits.Tuple(File(exists=True), Str()), traits.Tuple(Str(), Str()) + ), + desc="read a symbolic GLT from input file and associate it with a " + "label. As in Deconvolve, you can also use the 'SYM:' method " + "to provide the definition of the GLT directly as a string " + "(e.g., with 'SYM: +Label1 -Label2'). Unlike Deconvolve, you " + "MUST specify 'SYM: ' if providing the GLT directly as a " + "string instead of from a file", + argstr='-gltsym "%s" %s...', + ) out_file = File( - desc='output dataset for beta + statistics from the REML estimation; ' - 'also contains the results of any GLT analysis requested ' - 'in the Deconvolve setup, similar to the \'bucket\' output ' - 'from Deconvolve. This dataset does NOT get the betas ' - '(or statistics) of those regressors marked as \'baseline\' ' - 'in the matrix file.', - argstr='-Rbuck %s') + desc="output dataset for beta + statistics from the REML estimation; " + "also contains the results of any GLT analysis requested " + "in the Deconvolve setup, similar to the 'bucket' output " + "from Deconvolve. This dataset does NOT get the betas " + "(or statistics) of those regressors marked as 'baseline' " + "in the matrix file.", + argstr="-Rbuck %s", + ) var_file = File( - desc='output dataset for REML variance parameters', argstr='-Rvar %s') + desc="output dataset for REML variance parameters", argstr="-Rvar %s" + ) rbeta_file = File( - desc='output dataset for beta weights from the REML estimation, ' - 'similar to the \'cbucket\' output from Deconvolve. This dataset ' - 'will contain all the beta weights, for baseline and stimulus ' - 'regressors alike, unless the \'-nobout\' option is given -- ' - 'in that case, this dataset will only get the betas for the ' - 'stimulus regressors.', - argstr='-Rbeta %s') + desc="output dataset for beta weights from the REML estimation, " + "similar to the 'cbucket' output from Deconvolve. This dataset " + "will contain all the beta weights, for baseline and stimulus " + "regressors alike, unless the '-nobout' option is given -- " + "in that case, this dataset will only get the betas for the " + "stimulus regressors.", + argstr="-Rbeta %s", + ) glt_file = File( - desc='output dataset for beta + statistics from the REML estimation, ' - 'but ONLY for the GLTs added on the REMLfit command line itself ' - 'via \'gltsym\'; GLTs from Deconvolve\'s command line will NOT ' - 'be included.', - argstr='-Rglt %s') - fitts_file = File( - desc='ouput dataset for REML fitted model', argstr='-Rfitts %s') + desc="output dataset for beta + statistics from the REML estimation, " + "but ONLY for the GLTs added on the REMLfit command line itself " + "via 'gltsym'; GLTs from Deconvolve's command line will NOT " + "be included.", + argstr="-Rglt %s", + ) + fitts_file = File(desc="ouput dataset for REML fitted model", argstr="-Rfitts %s") errts_file = File( - desc='output dataset for REML residuals = data - fitted model', - argstr='-Rerrts %s') + desc="output dataset for REML residuals = data - fitted model", + argstr="-Rerrts %s", + ) wherr_file = File( - desc='dataset for REML residual, whitened using the estimated ' - 'ARMA(1,1) correlation matrix of the noise', - argstr='-Rwherr %s') - quiet = traits.Bool( - desc='turn off most progress messages', argstr='-quiet') + desc="dataset for REML residual, whitened using the estimated " + "ARMA(1,1) correlation matrix of the noise", + argstr="-Rwherr %s", + ) + quiet = traits.Bool(desc="turn off most progress messages", argstr="-quiet") verb = traits.Bool( - desc='turns on more progress messages, including memory usage ' - 'progress reports at various stages', - argstr='-verb') + desc="turns on more progress messages, including memory usage " + "progress reports at various stages", + argstr="-verb", + ) goforit = traits.Bool( - desc='With potential issues flagged in the design matrix, an attempt ' - 'will nevertheless be made to fit the model', - argstr='-GOFORIT') + desc="With potential issues flagged in the design matrix, an attempt " + "will nevertheless be made to fit the model", + argstr="-GOFORIT", + ) ovar = File( - desc='dataset for OLSQ st.dev. parameter (kind of boring)', - argstr='-Ovar %s') + desc="dataset for OLSQ st.dev. parameter (kind of boring)", argstr="-Ovar %s" + ) obeta = File( - desc='dataset for beta weights from the OLSQ estimation', - argstr='-Obeta %s') + desc="dataset for beta weights from the OLSQ estimation", argstr="-Obeta %s" + ) obuck = File( - desc='dataset for beta + statistics from the OLSQ estimation', - argstr='-Obuck %s') + desc="dataset for beta + statistics from the OLSQ estimation", + argstr="-Obuck %s", + ) oglt = File( - desc='dataset for beta + statistics from \'gltsym\' options', - argstr='-Oglt %s') - ofitts = File(desc='dataset for OLSQ fitted model', argstr='-Ofitts %s') + desc="dataset for beta + statistics from 'gltsym' options", argstr="-Oglt %s" + ) + ofitts = File(desc="dataset for OLSQ fitted model", argstr="-Ofitts %s") oerrts = File( - desc='dataset for OLSQ residuals (data - fitted model)', - argstr='-Oerrts %s') + desc="dataset for OLSQ residuals (data - fitted model)", argstr="-Oerrts %s" + ) class RemlfitOutputSpec(AFNICommandOutputSpec): out_file = File( - desc='dataset for beta + statistics from the REML estimation (if ' - 'generated') - var_file = File(desc='dataset for REML variance parameters (if generated)') + desc="dataset for beta + statistics from the REML estimation (if " "generated" + ) + var_file = File(desc="dataset for REML variance parameters (if generated)") rbeta_file = File( - desc='dataset for beta weights from the REML estimation (if ' - 'generated)') + desc="dataset for beta weights from the REML estimation (if " "generated)" + ) rbeta_file = File( - desc='output dataset for beta weights from the REML estimation (if ' - 'generated') + desc="output dataset for beta weights from the REML estimation (if " "generated" + ) glt_file = File( - desc='output dataset for beta + statistics from the REML estimation, ' - 'but ONLY for the GLTs added on the REMLfit command ' - 'line itself via \'gltsym\' (if generated)') - fitts_file = File( - desc='ouput dataset for REML fitted model (if generated)') + desc="output dataset for beta + statistics from the REML estimation, " + "but ONLY for the GLTs added on the REMLfit command " + "line itself via 'gltsym' (if generated)" + ) + fitts_file = File(desc="ouput dataset for REML fitted model (if generated)") errts_file = File( - desc='output dataset for REML residuals = data - fitted model (if ' - 'generated') + desc="output dataset for REML residuals = data - fitted model (if " "generated" + ) wherr_file = File( - desc='dataset for REML residual, whitened using the estimated ' - 'ARMA(1,1) correlation matrix of the noise (if generated)') - ovar = File(desc='dataset for OLSQ st.dev. parameter (if generated)') - obeta = File(desc='dataset for beta weights from the OLSQ estimation (if ' - 'generated)') + desc="dataset for REML residual, whitened using the estimated " + "ARMA(1,1) correlation matrix of the noise (if generated)" + ) + ovar = File(desc="dataset for OLSQ st.dev. parameter (if generated)") + obeta = File( + desc="dataset for beta weights from the OLSQ estimation (if " "generated)" + ) obuck = File( - desc='dataset for beta + statistics from the OLSQ estimation (if ' - 'generated)') + desc="dataset for beta + statistics from the OLSQ estimation (if " "generated)" + ) oglt = File( - desc='dataset for beta + statistics from \'gltsym\' options (if ' - 'generated') - ofitts = File(desc='dataset for OLSQ fitted model (if generated)') - oerrts = File(desc='dataset for OLSQ residuals = data - fitted model (if ' - 'generated') + desc="dataset for beta + statistics from 'gltsym' options (if " "generated" + ) + ofitts = File(desc="dataset for OLSQ fitted model (if generated)") + oerrts = File( + desc="dataset for OLSQ residuals = data - fitted model (if " "generated" + ) class Remlfit(AFNICommand): @@ -568,7 +630,7 @@ class Remlfit(AFNICommand): >>> res = remlfit.run() # doctest: +SKIP """ - _cmd = '3dREMLfit' + _cmd = "3dREMLfit" input_spec = RemlfitInputSpec output_spec = RemlfitOutputSpec @@ -589,50 +651,53 @@ def _list_outputs(self): class SynthesizeInputSpec(AFNICommandInputSpec): cbucket = File( - desc='Read the dataset output from ' - '3dDeconvolve via the \'-cbucket\' option.', - argstr='-cbucket %s', + desc="Read the dataset output from " "3dDeconvolve via the '-cbucket' option.", + argstr="-cbucket %s", copyfile=False, - mandatory=True) + mandatory=True, + ) matrix = File( - desc='Read the matrix output from ' - '3dDeconvolve via the \'-x1D\' option.', - argstr='-matrix %s', + desc="Read the matrix output from " "3dDeconvolve via the '-x1D' option.", + argstr="-matrix %s", copyfile=False, - mandatory=True) + mandatory=True, + ) select = traits.List( - Str(desc='selected columns to synthesize'), - argstr='-select %s', - desc='A list of selected columns from the matrix (and the ' - 'corresponding coefficient sub-bricks from the ' - 'cbucket). Valid types include \'baseline\', ' - ' \'polort\', \'allfunc\', \'allstim\', \'all\', ' - 'Can also provide \'something\' where something matches ' - 'a stim_label from 3dDeconvolve, and \'digits\' where digits ' - 'are the numbers of the select matrix columns by ' - 'numbers (starting at 0), or number ranges of the form ' - '\'3..7\' and \'3-7\'.', - mandatory=True) + Str(desc="selected columns to synthesize"), + argstr="-select %s", + desc="A list of selected columns from the matrix (and the " + "corresponding coefficient sub-bricks from the " + "cbucket). Valid types include 'baseline', " + " 'polort', 'allfunc', 'allstim', 'all', " + "Can also provide 'something' where something matches " + "a stim_label from 3dDeconvolve, and 'digits' where digits " + "are the numbers of the select matrix columns by " + "numbers (starting at 0), or number ranges of the form " + "'3..7' and '3-7'.", + mandatory=True, + ) out_file = File( - name_template='syn', - desc='output dataset prefix name (default \'syn\')', - argstr='-prefix %s') + name_template="syn", + desc="output dataset prefix name (default 'syn')", + argstr="-prefix %s", + ) dry_run = traits.Bool( - desc='Don\'t compute the output, just ' - 'check the inputs.', - argstr='-dry') + desc="Don't compute the output, just " "check the inputs.", argstr="-dry" + ) TR = traits.Float( - desc='TR to set in the output. The default value of ' - 'TR is read from the header of the matrix file.', - argstr='-TR %f') + desc="TR to set in the output. The default value of " + "TR is read from the header of the matrix file.", + argstr="-TR %f", + ) cenfill = traits.Enum( - 'zero', - 'nbhr', - 'none', - argstr='-cenfill %s', - desc='Determines how censored time points from the ' - '3dDeconvolve run will be filled. Valid types ' - 'are \'zero\', \'nbhr\' and \'none\'.') + "zero", + "nbhr", + "none", + argstr="-cenfill %s", + desc="Determines how censored time points from the " + "3dDeconvolve run will be filled. Valid types " + "are 'zero', 'nbhr' and 'none'.", + ) class Synthesize(AFNICommand): @@ -656,7 +721,7 @@ class Synthesize(AFNICommand): >>> syn = synthesize.run() # doctest: +SKIP """ - _cmd = '3dSynthesize' + _cmd = "3dSynthesize" input_spec = SynthesizeInputSpec output_spec = AFNICommandOutputSpec diff --git a/nipype/interfaces/afni/preprocess.py b/nipype/interfaces/afni/preprocess.py index 17b23a2491..3e0c02eda7 100644 --- a/nipype/interfaces/afni/preprocess.py +++ b/nipype/interfaces/afni/preprocess.py @@ -6,108 +6,134 @@ import os import os.path as op -from ...utils.filemanip import (load_json, save_json, split_filename, - fname_presuffix) -from ..base import (CommandLineInputSpec, CommandLine, TraitedSpec, traits, - isdefined, File, InputMultiPath, Undefined, Str, - InputMultiObject) - -from .base import (AFNICommandBase, AFNICommand, AFNICommandInputSpec, - AFNICommandOutputSpec, AFNIPythonCommandInputSpec, - AFNIPythonCommand, Info, no_afni) - -from ...import logging -iflogger = logging.getLogger('nipype.interface') +from ...utils.filemanip import load_json, save_json, split_filename, fname_presuffix +from ..base import ( + CommandLineInputSpec, + CommandLine, + TraitedSpec, + traits, + isdefined, + File, + InputMultiPath, + Undefined, + Str, + InputMultiObject, +) + +from .base import ( + AFNICommandBase, + AFNICommand, + AFNICommandInputSpec, + AFNICommandOutputSpec, + AFNIPythonCommandInputSpec, + AFNIPythonCommand, + Info, + no_afni, +) + +from ... import logging + +iflogger = logging.getLogger("nipype.interface") class CentralityInputSpec(AFNICommandInputSpec): """Common input spec class for all centrality-related commands """ - mask = File( - desc='mask file to mask input data', argstr='-mask %s', exists=True) + mask = File(desc="mask file to mask input data", argstr="-mask %s", exists=True) thresh = traits.Float( - desc='threshold to exclude connections where corr <= thresh', - argstr='-thresh %f') - polort = traits.Int(desc='', argstr='-polort %d') + desc="threshold to exclude connections where corr <= thresh", + argstr="-thresh %f", + ) + polort = traits.Int(desc="", argstr="-polort %d") autoclip = traits.Bool( - desc='Clip off low-intensity regions in the dataset', - argstr='-autoclip') + desc="Clip off low-intensity regions in the dataset", argstr="-autoclip" + ) automask = traits.Bool( - desc='Mask the dataset to target brain-only voxels', - argstr='-automask') + desc="Mask the dataset to target brain-only voxels", argstr="-automask" + ) class AlignEpiAnatPyInputSpec(AFNIPythonCommandInputSpec): in_file = File( - desc='EPI dataset to align', - argstr='-epi %s', + desc="EPI dataset to align", + argstr="-epi %s", mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) anat = File( - desc='name of structural dataset', - argstr='-anat %s', + desc="name of structural dataset", + argstr="-anat %s", mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) epi_base = traits.Either( traits.Range(low=0), - traits.Enum('mean', 'median', 'max'), - desc='the epi base used in alignment' - 'should be one of (0/mean/median/max/subbrick#)', + traits.Enum("mean", "median", "max"), + desc="the epi base used in alignment" + "should be one of (0/mean/median/max/subbrick#)", mandatory=True, - argstr='-epi_base %s') + argstr="-epi_base %s", + ) anat2epi = traits.Bool( - desc='align anatomical to EPI dataset (default)', argstr='-anat2epi') - epi2anat = traits.Bool( - desc='align EPI to anatomical dataset', argstr='-epi2anat') + desc="align anatomical to EPI dataset (default)", argstr="-anat2epi" + ) + epi2anat = traits.Bool(desc="align EPI to anatomical dataset", argstr="-epi2anat") save_skullstrip = traits.Bool( - desc='save skull-stripped (not aligned)', argstr='-save_skullstrip') + desc="save skull-stripped (not aligned)", argstr="-save_skullstrip" + ) suffix = traits.Str( - '_al', - desc='append suffix to the original anat/epi dataset to use' + "_al", + desc="append suffix to the original anat/epi dataset to use" 'in the resulting dataset names (default is "_al")', usedefault=True, - argstr='-suffix %s') + argstr="-suffix %s", + ) epi_strip = traits.Enum( - ('3dSkullStrip', '3dAutomask', 'None'), - desc='method to mask brain in EPI data' - 'should be one of[3dSkullStrip]/3dAutomask/None)', - argstr='-epi_strip %s') + ("3dSkullStrip", "3dAutomask", "None"), + desc="method to mask brain in EPI data" + "should be one of[3dSkullStrip]/3dAutomask/None)", + argstr="-epi_strip %s", + ) volreg = traits.Enum( - 'on', - 'off', + "on", + "off", usedefault=True, - desc='do volume registration on EPI dataset before alignment' - 'should be \'on\' or \'off\', defaults to \'on\'', - argstr='-volreg %s') + desc="do volume registration on EPI dataset before alignment" + "should be 'on' or 'off', defaults to 'on'", + argstr="-volreg %s", + ) tshift = traits.Enum( - 'on', - 'off', + "on", + "off", usedefault=True, - desc='do time shifting of EPI dataset before alignment' - 'should be \'on\' or \'off\', defaults to \'on\'', - argstr='-tshift %s') + desc="do time shifting of EPI dataset before alignment" + "should be 'on' or 'off', defaults to 'on'", + argstr="-tshift %s", + ) class AlignEpiAnatPyOutputSpec(TraitedSpec): - anat_al_orig = File( - desc="A version of the anatomy that is aligned to the EPI") - epi_al_orig = File( - desc="A version of the EPI dataset aligned to the anatomy") + anat_al_orig = File(desc="A version of the anatomy that is aligned to the EPI") + epi_al_orig = File(desc="A version of the EPI dataset aligned to the anatomy") epi_tlrc_al = File( - desc="A version of the EPI dataset aligned to a standard template") + desc="A version of the EPI dataset aligned to a standard template" + ) anat_al_mat = File(desc="matrix to align anatomy to the EPI") epi_al_mat = File(desc="matrix to align EPI to anatomy") epi_vr_al_mat = File(desc="matrix to volume register EPI") - epi_reg_al_mat = File( - desc="matrix to volume register and align epi to anatomy") - epi_al_tlrc_mat = File(desc="matrix to volume register and align epi" - "to anatomy and put into standard space") - epi_vr_motion = File(desc="motion parameters from EPI time-series" - "registration (tsh included in name if slice" - "timing correction is also included).") + epi_reg_al_mat = File(desc="matrix to volume register and align epi to anatomy") + epi_al_tlrc_mat = File( + desc="matrix to volume register and align epi" + "to anatomy and put into standard space" + ) + epi_vr_motion = File( + desc="motion parameters from EPI time-series" + "registration (tsh included in name if slice" + "timing correction is also included)." + ) skullstrip = File(desc="skull-stripped (not aligned) volume") @@ -158,7 +184,8 @@ class AlignEpiAnatPy(AFNIPythonCommand): 'python2 ...align_epi_anat.py -anat structural.nii -epi_base 0 -epi_strip 3dAutomask -epi functional.nii -save_skullstrip -suffix _al -tshift off -volreg off' >>> res = allineate.run() # doctest: +SKIP """ - _cmd = 'align_epi_anat.py' + + _cmd = "align_epi_anat.py" input_spec = AlignEpiAnatPyInputSpec output_spec = AlignEpiAnatPyOutputSpec @@ -166,288 +193,350 @@ def _list_outputs(self): outputs = self.output_spec().get() anat_prefix = self._gen_fname(self.inputs.anat) epi_prefix = self._gen_fname(self.inputs.in_file) - if '+' in anat_prefix: - anat_prefix = ''.join(anat_prefix.split('+')[:-1]) - if '+' in epi_prefix: - epi_prefix = ''.join(epi_prefix.split('+')[:-1]) + if "+" in anat_prefix: + anat_prefix = "".join(anat_prefix.split("+")[:-1]) + if "+" in epi_prefix: + epi_prefix = "".join(epi_prefix.split("+")[:-1]) outputtype = self.inputs.outputtype - if outputtype == 'AFNI': - ext = '.HEAD' + if outputtype == "AFNI": + ext = ".HEAD" else: ext = Info.output_type_to_ext(outputtype) - matext = '.1D' + matext = ".1D" suffix = self.inputs.suffix if self.inputs.anat2epi: - outputs['anat_al_orig'] = self._gen_fname( - anat_prefix, suffix=suffix + '+orig', ext=ext) - outputs['anat_al_mat'] = self._gen_fname( - anat_prefix, suffix=suffix + '_mat.aff12', ext=matext) + outputs["anat_al_orig"] = self._gen_fname( + anat_prefix, suffix=suffix + "+orig", ext=ext + ) + outputs["anat_al_mat"] = self._gen_fname( + anat_prefix, suffix=suffix + "_mat.aff12", ext=matext + ) if self.inputs.epi2anat: - outputs['epi_al_orig'] = self._gen_fname( - epi_prefix, suffix=suffix + '+orig', ext=ext) - outputs['epi_al_mat'] = self._gen_fname( - epi_prefix, suffix=suffix + '_mat.aff12', ext=matext) - if self.inputs.volreg == 'on': - outputs['epi_vr_al_mat'] = self._gen_fname( - epi_prefix, suffix='_vr' + suffix + '_mat.aff12', ext=matext) - if self.inputs.tshift == 'on': - outputs['epi_vr_motion'] = self._gen_fname( - epi_prefix, suffix='tsh_vr_motion', ext=matext) - elif self.inputs.tshift == 'off': - outputs['epi_vr_motion'] = self._gen_fname( - epi_prefix, suffix='vr_motion', ext=matext) - if self.inputs.volreg == 'on' and self.inputs.epi2anat: - outputs['epi_reg_al_mat'] = self._gen_fname( - epi_prefix, suffix='_reg' + suffix + '_mat.aff12', ext=matext) + outputs["epi_al_orig"] = self._gen_fname( + epi_prefix, suffix=suffix + "+orig", ext=ext + ) + outputs["epi_al_mat"] = self._gen_fname( + epi_prefix, suffix=suffix + "_mat.aff12", ext=matext + ) + if self.inputs.volreg == "on": + outputs["epi_vr_al_mat"] = self._gen_fname( + epi_prefix, suffix="_vr" + suffix + "_mat.aff12", ext=matext + ) + if self.inputs.tshift == "on": + outputs["epi_vr_motion"] = self._gen_fname( + epi_prefix, suffix="tsh_vr_motion", ext=matext + ) + elif self.inputs.tshift == "off": + outputs["epi_vr_motion"] = self._gen_fname( + epi_prefix, suffix="vr_motion", ext=matext + ) + if self.inputs.volreg == "on" and self.inputs.epi2anat: + outputs["epi_reg_al_mat"] = self._gen_fname( + epi_prefix, suffix="_reg" + suffix + "_mat.aff12", ext=matext + ) if self.inputs.save_skullstrip: outputs.skullstrip = self._gen_fname( - anat_prefix, suffix='_ns' + '+orig', ext=ext) + anat_prefix, suffix="_ns" + "+orig", ext=ext + ) return outputs class AllineateInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dAllineate', - argstr='-source %s', + desc="input file to 3dAllineate", + argstr="-source %s", mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) reference = File( exists=True, - argstr='-base %s', - desc='file to be used as reference, the first volume will be used if ' - 'not given the reference will be the first volume of in_file.') + argstr="-base %s", + desc="file to be used as reference, the first volume will be used if " + "not given the reference will be the first volume of in_file.", + ) out_file = File( - desc='output file from 3dAllineate', - argstr='-prefix %s', - name_template='%s_allineate', - name_source='in_file', + desc="output file from 3dAllineate", + argstr="-prefix %s", + name_template="%s_allineate", + name_source="in_file", hash_files=False, - xor=['allcostx']) + xor=["allcostx"], + ) out_param_file = File( - argstr='-1Dparam_save %s', - desc='Save the warp parameters in ASCII (.1D) format.', - xor=['in_param_file', 'allcostx']) + argstr="-1Dparam_save %s", + desc="Save the warp parameters in ASCII (.1D) format.", + xor=["in_param_file", "allcostx"], + ) in_param_file = File( exists=True, - argstr='-1Dparam_apply %s', - desc='Read warp parameters from file and apply them to ' - 'the source dataset, and produce a new dataset', - xor=['out_param_file']) + argstr="-1Dparam_apply %s", + desc="Read warp parameters from file and apply them to " + "the source dataset, and produce a new dataset", + xor=["out_param_file"], + ) out_matrix = File( - argstr='-1Dmatrix_save %s', - desc='Save the transformation matrix for each volume.', - xor=['in_matrix', 'allcostx']) + argstr="-1Dmatrix_save %s", + desc="Save the transformation matrix for each volume.", + xor=["in_matrix", "allcostx"], + ) in_matrix = File( - desc='matrix to align input file', - argstr='-1Dmatrix_apply %s', + desc="matrix to align input file", + argstr="-1Dmatrix_apply %s", position=-3, - xor=['out_matrix']) + xor=["out_matrix"], + ) overwrite = traits.Bool( - desc='overwrite output file if it already exists', argstr='-overwrite') + desc="overwrite output file if it already exists", argstr="-overwrite" + ) allcostx = File( - desc= - 'Compute and print ALL available cost functionals for the un-warped inputs' - 'AND THEN QUIT. If you use this option none of the other expected outputs will be produced', - argstr='-allcostx |& tee %s', + desc="Compute and print ALL available cost functionals for the un-warped inputs" + "AND THEN QUIT. If you use this option none of the other expected outputs will be produced", + argstr="-allcostx |& tee %s", position=-1, - xor=['out_file', 'out_matrix', 'out_param_file', 'out_weight_file']) + xor=["out_file", "out_matrix", "out_param_file", "out_weight_file"], + ) _cost_funcs = [ - 'leastsq', 'ls', 'mutualinfo', 'mi', 'corratio_mul', 'crM', - 'norm_mutualinfo', 'nmi', 'hellinger', 'hel', 'corratio_add', 'crA', - 'corratio_uns', 'crU' + "leastsq", + "ls", + "mutualinfo", + "mi", + "corratio_mul", + "crM", + "norm_mutualinfo", + "nmi", + "hellinger", + "hel", + "corratio_add", + "crA", + "corratio_uns", + "crU", ] cost = traits.Enum( *_cost_funcs, - argstr='-cost %s', - desc='Defines the \'cost\' function that defines the matching between ' - 'the source and the base') - _interp_funcs = [ - 'nearestneighbour', 'linear', 'cubic', 'quintic', 'wsinc5' - ] + argstr="-cost %s", + desc="Defines the 'cost' function that defines the matching between " + "the source and the base" + ) + _interp_funcs = ["nearestneighbour", "linear", "cubic", "quintic", "wsinc5"] interpolation = traits.Enum( *_interp_funcs[:-1], - argstr='-interp %s', - desc='Defines interpolation method to use during matching') + argstr="-interp %s", + desc="Defines interpolation method to use during matching" + ) final_interpolation = traits.Enum( *_interp_funcs, - argstr='-final %s', - desc='Defines interpolation method used to create the output dataset') + argstr="-final %s", + desc="Defines interpolation method used to create the output dataset" + ) # TECHNICAL OPTIONS (used for fine control of the program): nmatch = traits.Int( - argstr='-nmatch %d', - desc='Use at most n scattered points to match the datasets.') + argstr="-nmatch %d", + desc="Use at most n scattered points to match the datasets.", + ) no_pad = traits.Bool( - argstr='-nopad', desc='Do not use zero-padding on the base image.') + argstr="-nopad", desc="Do not use zero-padding on the base image." + ) zclip = traits.Bool( - argstr='-zclip', - desc='Replace negative values in the input datasets (source & base) ' - 'with zero.') + argstr="-zclip", + desc="Replace negative values in the input datasets (source & base) " + "with zero.", + ) convergence = traits.Float( - argstr='-conv %f', - desc='Convergence test in millimeters (default 0.05mm).') - usetemp = traits.Bool(argstr='-usetemp', desc='temporary file use') + argstr="-conv %f", desc="Convergence test in millimeters (default 0.05mm)." + ) + usetemp = traits.Bool(argstr="-usetemp", desc="temporary file use") check = traits.List( traits.Enum(*_cost_funcs), - argstr='-check %s', - desc='After cost functional optimization is done, start at the final ' - 'parameters and RE-optimize using this new cost functions. If ' - 'the results are too different, a warning message will be ' - 'printed. However, the final parameters from the original ' - 'optimization will be used to create the output dataset.') + argstr="-check %s", + desc="After cost functional optimization is done, start at the final " + "parameters and RE-optimize using this new cost functions. If " + "the results are too different, a warning message will be " + "printed. However, the final parameters from the original " + "optimization will be used to create the output dataset.", + ) # ** PARAMETERS THAT AFFECT THE COST OPTIMIZATION STRATEGY ** one_pass = traits.Bool( - argstr='-onepass', - desc='Use only the refining pass -- do not try a coarse resolution ' - 'pass first. Useful if you know that only small amounts of ' - 'image alignment are needed.') + argstr="-onepass", + desc="Use only the refining pass -- do not try a coarse resolution " + "pass first. Useful if you know that only small amounts of " + "image alignment are needed.", + ) two_pass = traits.Bool( - argstr='-twopass', - desc='Use a two pass alignment strategy for all volumes, searching ' - 'for a large rotation+shift and then refining the alignment.') + argstr="-twopass", + desc="Use a two pass alignment strategy for all volumes, searching " + "for a large rotation+shift and then refining the alignment.", + ) two_blur = traits.Float( - argstr='-twoblur %f', - desc='Set the blurring radius for the first pass in mm.') + argstr="-twoblur %f", desc="Set the blurring radius for the first pass in mm." + ) two_first = traits.Bool( - argstr='-twofirst', - desc='Use -twopass on the first image to be registered, and ' - 'then on all subsequent images from the source dataset, ' - 'use results from the first image\'s coarse pass to start ' - 'the fine pass.') + argstr="-twofirst", + desc="Use -twopass on the first image to be registered, and " + "then on all subsequent images from the source dataset, " + "use results from the first image's coarse pass to start " + "the fine pass.", + ) two_best = traits.Int( - argstr='-twobest %d', - desc='In the coarse pass, use the best \'bb\' set of initial' - 'points to search for the starting point for the fine' - 'pass. If bb==0, then no search is made for the best' - 'starting point, and the identity transformation is' - 'used as the starting point. [Default=5; min=0 max=11]') + argstr="-twobest %d", + desc="In the coarse pass, use the best 'bb' set of initial" + "points to search for the starting point for the fine" + "pass. If bb==0, then no search is made for the best" + "starting point, and the identity transformation is" + "used as the starting point. [Default=5; min=0 max=11]", + ) fine_blur = traits.Float( - argstr='-fineblur %f', - desc='Set the blurring radius to use in the fine resolution ' - 'pass to \'x\' mm. A small amount (1-2 mm?) of blurring at ' - 'the fine step may help with convergence, if there is ' - 'some problem, especially if the base volume is very noisy. ' - '[Default == 0 mm = no blurring at the final alignment pass]') + argstr="-fineblur %f", + desc="Set the blurring radius to use in the fine resolution " + "pass to 'x' mm. A small amount (1-2 mm?) of blurring at " + "the fine step may help with convergence, if there is " + "some problem, especially if the base volume is very noisy. " + "[Default == 0 mm = no blurring at the final alignment pass]", + ) center_of_mass = Str( - argstr='-cmass%s', - desc='Use the center-of-mass calculation to bracket the shifts.') + argstr="-cmass%s", + desc="Use the center-of-mass calculation to bracket the shifts.", + ) autoweight = Str( - argstr='-autoweight%s', - desc='Compute a weight function using the 3dAutomask ' - 'algorithm plus some blurring of the base image.') + argstr="-autoweight%s", + desc="Compute a weight function using the 3dAutomask " + "algorithm plus some blurring of the base image.", + ) automask = traits.Int( - argstr='-automask+%d', - desc='Compute a mask function, set a value for dilation or 0.') + argstr="-automask+%d", + desc="Compute a mask function, set a value for dilation or 0.", + ) autobox = traits.Bool( - argstr='-autobox', - desc='Expand the -automask function to enclose a rectangular ' - 'box that holds the irregular mask.') + argstr="-autobox", + desc="Expand the -automask function to enclose a rectangular " + "box that holds the irregular mask.", + ) nomask = traits.Bool( - argstr='-nomask', - desc='Don\'t compute the autoweight/mask; if -weight is not ' - 'also used, then every voxel will be counted equally.') + argstr="-nomask", + desc="Don't compute the autoweight/mask; if -weight is not " + "also used, then every voxel will be counted equally.", + ) weight_file = File( - argstr='-weight %s', + argstr="-weight %s", exists=True, - deprecated='1.0.0', - new_name='weight', - desc='Set the weighting for each voxel in the base dataset; ' - 'larger weights mean that voxel count more in the cost function. ' - 'Must be defined on the same grid as the base dataset') + deprecated="1.0.0", + new_name="weight", + desc="Set the weighting for each voxel in the base dataset; " + "larger weights mean that voxel count more in the cost function. " + "Must be defined on the same grid as the base dataset", + ) weight = traits.Either( File(exists=True), traits.Float(), - argstr='-weight %s', - desc='Set the weighting for each voxel in the base dataset; ' - 'larger weights mean that voxel count more in the cost function. ' - 'If an image file is given, the volume must be defined on the ' - 'same grid as the base dataset') + argstr="-weight %s", + desc="Set the weighting for each voxel in the base dataset; " + "larger weights mean that voxel count more in the cost function. " + "If an image file is given, the volume must be defined on the " + "same grid as the base dataset", + ) out_weight_file = File( - argstr='-wtprefix %s', - desc='Write the weight volume to disk as a dataset', - xor=['allcostx']) + argstr="-wtprefix %s", + desc="Write the weight volume to disk as a dataset", + xor=["allcostx"], + ) source_mask = File( - exists=True, argstr='-source_mask %s', desc='mask the input dataset') + exists=True, argstr="-source_mask %s", desc="mask the input dataset" + ) source_automask = traits.Int( - argstr='-source_automask+%d', - desc='Automatically mask the source dataset with dilation or 0.') + argstr="-source_automask+%d", + desc="Automatically mask the source dataset with dilation or 0.", + ) warp_type = traits.Enum( - 'shift_only', - 'shift_rotate', - 'shift_rotate_scale', - 'affine_general', - argstr='-warp %s', - desc='Set the warp type.') + "shift_only", + "shift_rotate", + "shift_rotate_scale", + "affine_general", + argstr="-warp %s", + desc="Set the warp type.", + ) warpfreeze = traits.Bool( - argstr='-warpfreeze', - desc='Freeze the non-rigid body parameters after first volume.') + argstr="-warpfreeze", + desc="Freeze the non-rigid body parameters after first volume.", + ) replacebase = traits.Bool( - argstr='-replacebase', - desc='If the source has more than one volume, then after the first ' - 'volume is aligned to the base.') + argstr="-replacebase", + desc="If the source has more than one volume, then after the first " + "volume is aligned to the base.", + ) replacemeth = traits.Enum( *_cost_funcs, - argstr='-replacemeth %s', - desc='After first volume is aligned, switch method for later volumes. ' - 'For use with \'-replacebase\'.') + argstr="-replacemeth %s", + desc="After first volume is aligned, switch method for later volumes. " + "For use with '-replacebase'." + ) epi = traits.Bool( - argstr='-EPI', - desc='Treat the source dataset as being composed of warped ' - 'EPI slices, and the base as comprising anatomically ' - '\'true\' images. Only phase-encoding direction image ' - 'shearing and scaling will be allowed with this option.') + argstr="-EPI", + desc="Treat the source dataset as being composed of warped " + "EPI slices, and the base as comprising anatomically " + "'true' images. Only phase-encoding direction image " + "shearing and scaling will be allowed with this option.", + ) maxrot = traits.Float( - argstr='-maxrot %f', desc='Maximum allowed rotation in degrees.') - maxshf = traits.Float( - argstr='-maxshf %f', desc='Maximum allowed shift in mm.') - maxscl = traits.Float( - argstr='-maxscl %f', desc='Maximum allowed scaling factor.') - maxshr = traits.Float( - argstr='-maxshr %f', desc='Maximum allowed shearing factor.') + argstr="-maxrot %f", desc="Maximum allowed rotation in degrees." + ) + maxshf = traits.Float(argstr="-maxshf %f", desc="Maximum allowed shift in mm.") + maxscl = traits.Float(argstr="-maxscl %f", desc="Maximum allowed scaling factor.") + maxshr = traits.Float(argstr="-maxshr %f", desc="Maximum allowed shearing factor.") master = File( exists=True, - argstr='-master %s', - desc='Write the output dataset on the same grid as this file.') + argstr="-master %s", + desc="Write the output dataset on the same grid as this file.", + ) newgrid = traits.Float( - argstr='-newgrid %f', - desc='Write the output dataset using isotropic grid spacing in mm.') + argstr="-newgrid %f", + desc="Write the output dataset using isotropic grid spacing in mm.", + ) # Non-linear experimental _nwarp_types = [ - 'bilinear', 'cubic', 'quintic', 'heptic', 'nonic', 'poly3', 'poly5', - 'poly7', 'poly9' + "bilinear", + "cubic", + "quintic", + "heptic", + "nonic", + "poly3", + "poly5", + "poly7", + "poly9", ] # same non-hellenistic nwarp = traits.Enum( *_nwarp_types, - argstr='-nwarp %s', - desc='Experimental nonlinear warping: bilinear or legendre poly.') - _dirs = ['X', 'Y', 'Z', 'I', 'J', 'K'] + argstr="-nwarp %s", + desc="Experimental nonlinear warping: bilinear or legendre poly." + ) + _dirs = ["X", "Y", "Z", "I", "J", "K"] nwarp_fixmot = traits.List( traits.Enum(*_dirs), - argstr='-nwarp_fixmot%s...', - desc='To fix motion along directions.') + argstr="-nwarp_fixmot%s...", + desc="To fix motion along directions.", + ) nwarp_fixdep = traits.List( traits.Enum(*_dirs), - argstr='-nwarp_fixdep%s...', - desc='To fix non-linear warp dependency along directions.') - verbose = traits.Bool( - argstr='-verb', desc='Print out verbose progress reports.') + argstr="-nwarp_fixdep%s...", + desc="To fix non-linear warp dependency along directions.", + ) + verbose = traits.Bool(argstr="-verb", desc="Print out verbose progress reports.") quiet = traits.Bool( - argstr='-quiet', desc="Don't print out verbose progress reports.") + argstr="-quiet", desc="Don't print out verbose progress reports." + ) class AllineateOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='output image file name') - out_matrix = File(exists=True, desc='matrix to align input file') - out_param_file = File(exists=True, desc='warp parameters') - out_weight_file = File(exists=True, desc='weight volume') + out_file = File(exists=True, desc="output image file name") + out_matrix = File(exists=True, desc="matrix to align input file") + out_param_file = File(exists=True, desc="warp parameters") + out_weight_file = File(exists=True, desc="weight volume") allcostx = File( - desc= - 'Compute and print ALL available cost functionals for the un-warped inputs' + desc="Compute and print ALL available cost functionals for the un-warped inputs" ) @@ -486,7 +575,7 @@ class Allineate(AFNICommand): >>> res = allineate.run() # doctest: +SKIP """ - _cmd = '3dAllineate' + _cmd = "3dAllineate" input_spec = AllineateInputSpec output_spec = AllineateOutputSpec @@ -494,58 +583,63 @@ def _list_outputs(self): outputs = super(Allineate, self)._list_outputs() if self.inputs.out_weight_file: - outputs['out_weight_file'] = op.abspath( - self.inputs.out_weight_file) + outputs["out_weight_file"] = op.abspath(self.inputs.out_weight_file) if self.inputs.out_matrix: path, base, ext = split_filename(self.inputs.out_matrix) - if ext.lower() not in ['.1d', '.1D']: - outputs['out_matrix'] = self._gen_fname( - self.inputs.out_matrix, suffix='.aff12.1D') + if ext.lower() not in [".1d", ".1D"]: + outputs["out_matrix"] = self._gen_fname( + self.inputs.out_matrix, suffix=".aff12.1D" + ) else: - outputs['out_matrix'] = op.abspath(self.inputs.out_matrix) + outputs["out_matrix"] = op.abspath(self.inputs.out_matrix) if self.inputs.out_param_file: path, base, ext = split_filename(self.inputs.out_param_file) - if ext.lower() not in ['.1d', '.1D']: - outputs['out_param_file'] = self._gen_fname( - self.inputs.out_param_file, suffix='.param.1D') + if ext.lower() not in [".1d", ".1D"]: + outputs["out_param_file"] = self._gen_fname( + self.inputs.out_param_file, suffix=".param.1D" + ) else: - outputs['out_param_file'] = op.abspath( - self.inputs.out_param_file) + outputs["out_param_file"] = op.abspath(self.inputs.out_param_file) if self.inputs.allcostx: - outputs['allcostX'] = os.path.abspath(self.inputs.allcostx) + outputs["allcostX"] = os.path.abspath(self.inputs.allcostx) return outputs class AutoTcorrelateInputSpec(AFNICommandInputSpec): in_file = File( - desc='timeseries x space (volume or surface) file', - argstr='%s', + desc="timeseries x space (volume or surface) file", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) polort = traits.Int( - desc='Remove polynomical trend of order m or -1 for no detrending', - argstr='-polort %d') - eta2 = traits.Bool(desc='eta^2 similarity', argstr='-eta2') - mask = File(exists=True, desc='mask of voxels', argstr='-mask %s') + desc="Remove polynomical trend of order m or -1 for no detrending", + argstr="-polort %d", + ) + eta2 = traits.Bool(desc="eta^2 similarity", argstr="-eta2") + mask = File(exists=True, desc="mask of voxels", argstr="-mask %s") mask_only_targets = traits.Bool( - desc='use mask only on targets voxels', - argstr='-mask_only_targets', - xor=['mask_source']) + desc="use mask only on targets voxels", + argstr="-mask_only_targets", + xor=["mask_source"], + ) mask_source = File( exists=True, - desc='mask for source voxels', - argstr='-mask_source %s', - xor=['mask_only_targets']) + desc="mask for source voxels", + argstr="-mask_source %s", + xor=["mask_only_targets"], + ) out_file = File( - name_template='%s_similarity_matrix.1D', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_similarity_matrix.1D", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) class AutoTcorrelate(AFNICommand): @@ -573,44 +667,48 @@ class AutoTcorrelate(AFNICommand): input_spec = AutoTcorrelateInputSpec output_spec = AFNICommandOutputSpec - _cmd = '3dAutoTcorrelate' + _cmd = "3dAutoTcorrelate" def _overload_extension(self, value, name=None): path, base, ext = split_filename(value) - if ext.lower() not in ['.1d', '.1D', '.nii.gz', '.nii']: - ext = ext + '.1D' + if ext.lower() not in [".1d", ".1D", ".nii.gz", ".nii"]: + ext = ext + ".1D" return os.path.join(path, base + ext) class AutomaskInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dAutomask', - argstr='%s', + desc="input file to 3dAutomask", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_mask', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_mask", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) brain_file = File( - name_template='%s_masked', - desc='output file from 3dAutomask', - argstr='-apply_prefix %s', - name_source='in_file') + name_template="%s_masked", + desc="output file from 3dAutomask", + argstr="-apply_prefix %s", + name_source="in_file", + ) clfrac = traits.Float( - desc='sets the clip level fraction (must be 0.1-0.9). A small value ' - 'will tend to make the mask larger [default = 0.5].', - argstr='-clfrac %s') - dilate = traits.Int(desc='dilate the mask outwards', argstr='-dilate %s') - erode = traits.Int(desc='erode the mask inwards', argstr='-erode %s') + desc="sets the clip level fraction (must be 0.1-0.9). A small value " + "will tend to make the mask larger [default = 0.5].", + argstr="-clfrac %s", + ) + dilate = traits.Int(desc="dilate the mask outwards", argstr="-dilate %s") + erode = traits.Int(desc="erode the mask inwards", argstr="-erode %s") class AutomaskOutputSpec(TraitedSpec): - out_file = File(desc='mask file', exists=True) - brain_file = File(desc='brain file (skull stripped)', exists=True) + out_file = File(desc="mask file", exists=True) + brain_file = File(desc="brain file (skull stripped)", exists=True) class Automask(AFNICommand): @@ -633,81 +731,85 @@ class Automask(AFNICommand): """ - _cmd = '3dAutomask' + _cmd = "3dAutomask" input_spec = AutomaskInputSpec output_spec = AutomaskOutputSpec class AutoTLRCInputSpec(CommandLineInputSpec): outputtype = traits.Enum( - 'AFNI', list(Info.ftypes.keys()), desc='AFNI output filetype') + "AFNI", list(Info.ftypes.keys()), desc="AFNI output filetype" + ) in_file = File( - desc='Original anatomical volume (+orig).' - 'The skull is removed by this script' - 'unless instructed otherwise (-no_ss).', - argstr='-input %s', + desc="Original anatomical volume (+orig)." + "The skull is removed by this script" + "unless instructed otherwise (-no_ss).", + argstr="-input %s", mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) base = traits.Str( - desc=' Reference anatomical volume' - ' Usually this volume is in some standard space like' - ' TLRC or MNI space and with afni dataset view of' - ' (+tlrc).' - ' Preferably, this reference volume should have had' - ' the skull removed but that is not mandatory.' - ' AFNI\'s distribution contains several templates.' + desc=" Reference anatomical volume" + " Usually this volume is in some standard space like" + " TLRC or MNI space and with afni dataset view of" + " (+tlrc)." + " Preferably, this reference volume should have had" + " the skull removed but that is not mandatory." + " AFNI's distribution contains several templates." ' For a longer list, use "whereami -show_templates"' - 'TT_N27+tlrc --> Single subject, skull stripped volume.' - ' This volume is also known as ' - ' N27_SurfVol_NoSkull+tlrc elsewhere in ' - ' AFNI and SUMA land.' - ' (www.loni.ucla.edu, www.bic.mni.mcgill.ca)' - ' This template has a full set of FreeSurfer' - ' (surfer.nmr.mgh.harvard.edu)' - ' surface models that can be used in SUMA. ' - ' For details, see Talairach-related link:' - ' https://afni.nimh.nih.gov/afni/suma' - 'TT_icbm452+tlrc --> Average volume of 452 normal brains.' - ' Skull Stripped. (www.loni.ucla.edu)' - 'TT_avg152T1+tlrc --> Average volume of 152 normal brains.' - ' Skull Stripped.(www.bic.mni.mcgill.ca)' - 'TT_EPI+tlrc --> EPI template from spm2, masked as TT_avg152T1' - ' TT_avg152 and TT_EPI volume sources are from' - ' SPM\'s distribution. (www.fil.ion.ucl.ac.uk/spm/)' - 'If you do not specify a path for the template, the script' - 'will attempt to locate the template AFNI\'s binaries directory.' - 'NOTE: These datasets have been slightly modified from' - ' their original size to match the standard TLRC' - ' dimensions (Jean Talairach and Pierre Tournoux' - ' Co-Planar Stereotaxic Atlas of the Human Brain' - ' Thieme Medical Publishers, New York, 1988). ' - ' That was done for internal consistency in AFNI.' - ' You may use the original form of these' - ' volumes if you choose but your TLRC coordinates' - ' will not be consistent with AFNI\'s TLRC database' - ' (San Antonio Talairach Daemon database), for example.', + "TT_N27+tlrc --> Single subject, skull stripped volume." + " This volume is also known as " + " N27_SurfVol_NoSkull+tlrc elsewhere in " + " AFNI and SUMA land." + " (www.loni.ucla.edu, www.bic.mni.mcgill.ca)" + " This template has a full set of FreeSurfer" + " (surfer.nmr.mgh.harvard.edu)" + " surface models that can be used in SUMA. " + " For details, see Talairach-related link:" + " https://afni.nimh.nih.gov/afni/suma" + "TT_icbm452+tlrc --> Average volume of 452 normal brains." + " Skull Stripped. (www.loni.ucla.edu)" + "TT_avg152T1+tlrc --> Average volume of 152 normal brains." + " Skull Stripped.(www.bic.mni.mcgill.ca)" + "TT_EPI+tlrc --> EPI template from spm2, masked as TT_avg152T1" + " TT_avg152 and TT_EPI volume sources are from" + " SPM's distribution. (www.fil.ion.ucl.ac.uk/spm/)" + "If you do not specify a path for the template, the script" + "will attempt to locate the template AFNI's binaries directory." + "NOTE: These datasets have been slightly modified from" + " their original size to match the standard TLRC" + " dimensions (Jean Talairach and Pierre Tournoux" + " Co-Planar Stereotaxic Atlas of the Human Brain" + " Thieme Medical Publishers, New York, 1988). " + " That was done for internal consistency in AFNI." + " You may use the original form of these" + " volumes if you choose but your TLRC coordinates" + " will not be consistent with AFNI's TLRC database" + " (San Antonio Talairach Daemon database), for example.", mandatory=True, - argstr='-base %s') + argstr="-base %s", + ) no_ss = traits.Bool( - desc='Do not strip skull of input data set' - '(because skull has already been removed' - 'or because template still has the skull)' - 'NOTE: The -no_ss option is not all that optional.' - ' Here is a table of when you should and should not use -no_ss' - ' Template Template' - ' WITH skull WITHOUT skull' - ' Dset.' - ' WITH skull -no_ss xxx ' - ' ' - ' WITHOUT skull No Cigar -no_ss' - ' ' - ' Template means: Your template of choice' - ' Dset. means: Your anatomical dataset' - ' -no_ss means: Skull stripping should not be attempted on Dset' - ' xxx means: Don\'t put anything, the script will strip Dset' - ' No Cigar means: Don\'t try that combination, it makes no sense.', - argstr='-no_ss') + desc="Do not strip skull of input data set" + "(because skull has already been removed" + "or because template still has the skull)" + "NOTE: The -no_ss option is not all that optional." + " Here is a table of when you should and should not use -no_ss" + " Template Template" + " WITH skull WITHOUT skull" + " Dset." + " WITH skull -no_ss xxx " + " " + " WITHOUT skull No Cigar -no_ss" + " " + " Template means: Your template of choice" + " Dset. means: Your anatomical dataset" + " -no_ss means: Skull stripping should not be attempted on Dset" + " xxx means: Don't put anything, the script will strip Dset" + " No Cigar means: Don't try that combination, it makes no sense.", + argstr="-no_ss", + ) class AutoTLRC(AFNICommand): @@ -728,85 +830,97 @@ class AutoTLRC(AFNICommand): >>> res = autoTLRC.run() # doctest: +SKIP """ - _cmd = '@auto_tlrc' + + _cmd = "@auto_tlrc" input_spec = AutoTLRCInputSpec output_spec = AFNICommandOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - ext = '.HEAD' - outputs['out_file'] = os.path.abspath( - self._gen_fname(self.inputs.in_file, suffix='+tlrc') + ext) + ext = ".HEAD" + outputs["out_file"] = os.path.abspath( + self._gen_fname(self.inputs.in_file, suffix="+tlrc") + ext + ) return outputs class BandpassInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dBandpass', - argstr='%s', + desc="input file to 3dBandpass", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_bp', - desc='output file from 3dBandpass', - argstr='-prefix %s', + name_template="%s_bp", + desc="output file from 3dBandpass", + argstr="-prefix %s", position=1, - name_source='in_file') - lowpass = traits.Float( - desc='lowpass', argstr='%f', position=-2, mandatory=True) - highpass = traits.Float( - desc='highpass', argstr='%f', position=-3, mandatory=True) - mask = File(desc='mask file', position=2, argstr='-mask %s', exists=True) + name_source="in_file", + ) + lowpass = traits.Float(desc="lowpass", argstr="%f", position=-2, mandatory=True) + highpass = traits.Float(desc="highpass", argstr="%f", position=-3, mandatory=True) + mask = File(desc="mask file", position=2, argstr="-mask %s", exists=True) despike = traits.Bool( - argstr='-despike', - desc='Despike each time series before other processing. Hopefully, ' - 'you don\'t actually need to do this, which is why it is ' - 'optional.') + argstr="-despike", + desc="Despike each time series before other processing. Hopefully, " + "you don't actually need to do this, which is why it is " + "optional.", + ) orthogonalize_file = InputMultiPath( File(exists=True), - argstr='-ort %s', - desc='Also orthogonalize input to columns in f.1D. Multiple \'-ort\' ' - 'options are allowed.') + argstr="-ort %s", + desc="Also orthogonalize input to columns in f.1D. Multiple '-ort' " + "options are allowed.", + ) orthogonalize_dset = File( exists=True, - argstr='-dsort %s', - desc='Orthogonalize each voxel to the corresponding voxel time series ' - 'in dataset \'fset\', which must have the same spatial and ' - 'temporal grid structure as the main input dataset. At present, ' - 'only one \'-dsort\' option is allowed.') + argstr="-dsort %s", + desc="Orthogonalize each voxel to the corresponding voxel time series " + "in dataset 'fset', which must have the same spatial and " + "temporal grid structure as the main input dataset. At present, " + "only one '-dsort' option is allowed.", + ) no_detrend = traits.Bool( - argstr='-nodetrend', - desc='Skip the quadratic detrending of the input that occurs before ' - 'the FFT-based bandpassing. You would only want to do this if ' - 'the dataset had been detrended already in some other program.') + argstr="-nodetrend", + desc="Skip the quadratic detrending of the input that occurs before " + "the FFT-based bandpassing. You would only want to do this if " + "the dataset had been detrended already in some other program.", + ) tr = traits.Float( - argstr='-dt %f', - desc='Set time step (TR) in sec [default=from dataset header].') + argstr="-dt %f", desc="Set time step (TR) in sec [default=from dataset header]." + ) nfft = traits.Int( - argstr='-nfft %d', desc='Set the FFT length [must be a legal value].') + argstr="-nfft %d", desc="Set the FFT length [must be a legal value]." + ) normalize = traits.Bool( - argstr='-norm', - desc='Make all output time series have L2 norm = 1 (i.e., sum of ' - 'squares = 1).') + argstr="-norm", + desc="Make all output time series have L2 norm = 1 (i.e., sum of " + "squares = 1).", + ) automask = traits.Bool( - argstr='-automask', desc='Create a mask from the input dataset.') + argstr="-automask", desc="Create a mask from the input dataset." + ) blur = traits.Float( - argstr='-blur %f', - desc='Blur (inside the mask only) with a filter width (FWHM) of ' - '\'fff\' millimeters.') + argstr="-blur %f", + desc="Blur (inside the mask only) with a filter width (FWHM) of " + "'fff' millimeters.", + ) localPV = traits.Float( - argstr='-localPV %f', - desc='Replace each vector by the local Principal Vector (AKA first ' - 'singular vector) from a neighborhood of radius \'rrr\' ' - 'millimeters. Note that the PV time series is L2 normalized. ' - 'This option is mostly for Bob Cox to have fun with.') + argstr="-localPV %f", + desc="Replace each vector by the local Principal Vector (AKA first " + "singular vector) from a neighborhood of radius 'rrr' " + "millimeters. Note that the PV time series is L2 normalized. " + "This option is mostly for Bob Cox to have fun with.", + ) notrans = traits.Bool( - argstr='-notrans', - desc='Don\'t check for initial positive transients in the data. ' - 'The test is a little slow, so skipping it is OK, if you KNOW ' - 'the data time series are transient-free.') + argstr="-notrans", + desc="Don't check for initial positive transients in the data. " + "The test is a little slow, so skipping it is OK, if you KNOW " + "the data time series are transient-free.", + ) class Bandpass(AFNICommand): @@ -831,46 +945,52 @@ class Bandpass(AFNICommand): """ - _cmd = '3dBandpass' + _cmd = "3dBandpass" input_spec = BandpassInputSpec output_spec = AFNICommandOutputSpec class BlurInMaskInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dSkullStrip', - argstr='-input %s', + desc="input file to 3dSkullStrip", + argstr="-input %s", position=1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_blur', - desc='output to the file', - argstr='-prefix %s', - name_source='in_file', - position=-1) + name_template="%s_blur", + desc="output to the file", + argstr="-prefix %s", + name_source="in_file", + position=-1, + ) mask = File( - desc='Mask dataset, if desired. Blurring will occur only within the ' - 'mask. Voxels NOT in the mask will be set to zero in the output.', - argstr='-mask %s') + desc="Mask dataset, if desired. Blurring will occur only within the " + "mask. Voxels NOT in the mask will be set to zero in the output.", + argstr="-mask %s", + ) multimask = File( - desc='Multi-mask dataset -- each distinct nonzero value in dataset ' - 'will be treated as a separate mask for blurring purposes.', - argstr='-Mmask %s') + desc="Multi-mask dataset -- each distinct nonzero value in dataset " + "will be treated as a separate mask for blurring purposes.", + argstr="-Mmask %s", + ) automask = traits.Bool( - desc='Create an automask from the input dataset.', argstr='-automask') - fwhm = traits.Float( - desc='fwhm kernel size', argstr='-FWHM %f', mandatory=True) + desc="Create an automask from the input dataset.", argstr="-automask" + ) + fwhm = traits.Float(desc="fwhm kernel size", argstr="-FWHM %f", mandatory=True) preserve = traits.Bool( - desc='Normally, voxels not in the mask will be set to zero in the ' - 'output. If you want the original values in the dataset to be ' - 'preserved in the output, use this option.', - argstr='-preserve') + desc="Normally, voxels not in the mask will be set to zero in the " + "output. If you want the original values in the dataset to be " + "preserved in the output, use this option.", + argstr="-preserve", + ) float_out = traits.Bool( - desc='Save dataset as floats, no matter what the input data type is.', - argstr='-float') - options = Str(desc='options', argstr='%s', position=2) + desc="Save dataset as floats, no matter what the input data type is.", + argstr="-float", + ) + options = Str(desc="options", argstr="%s", position=2) class BlurInMask(AFNICommand): @@ -893,34 +1013,39 @@ class BlurInMask(AFNICommand): """ - _cmd = '3dBlurInMask' + _cmd = "3dBlurInMask" input_spec = BlurInMaskInputSpec output_spec = AFNICommandOutputSpec class BlurToFWHMInputSpec(AFNICommandInputSpec): in_file = File( - desc='The dataset that will be smoothed', - argstr='-input %s', + desc="The dataset that will be smoothed", + argstr="-input %s", mandatory=True, - exists=True) + exists=True, + ) automask = traits.Bool( - desc='Create an automask from the input dataset.', argstr='-automask') + desc="Create an automask from the input dataset.", argstr="-automask" + ) fwhm = traits.Float( - desc='Blur until the 3D FWHM reaches this value (in mm)', - argstr='-FWHM %f') + desc="Blur until the 3D FWHM reaches this value (in mm)", argstr="-FWHM %f" + ) fwhmxy = traits.Float( - desc='Blur until the 2D (x,y)-plane FWHM reaches this value (in mm)', - argstr='-FWHMxy %f') + desc="Blur until the 2D (x,y)-plane FWHM reaches this value (in mm)", + argstr="-FWHMxy %f", + ) blurmaster = File( - desc='The dataset whose smoothness controls the process.', - argstr='-blurmaster %s', - exists=True) + desc="The dataset whose smoothness controls the process.", + argstr="-blurmaster %s", + exists=True, + ) mask = File( - desc='Mask dataset, if desired. Voxels NOT in mask will be set to zero ' - 'in output.', - argstr='-mask %s', - exists=True) + desc="Mask dataset, if desired. Voxels NOT in mask will be set to zero " + "in output.", + argstr="-mask %s", + exists=True, + ) class BlurToFWHM(AFNICommand): @@ -942,37 +1067,42 @@ class BlurToFWHM(AFNICommand): >>> res = blur.run() # doctest: +SKIP """ - _cmd = '3dBlurToFWHM' + + _cmd = "3dBlurToFWHM" input_spec = BlurToFWHMInputSpec output_spec = AFNICommandOutputSpec class ClipLevelInputSpec(CommandLineInputSpec): in_file = File( - desc='input file to 3dClipLevel', - argstr='%s', + desc="input file to 3dClipLevel", + argstr="%s", position=-1, mandatory=True, - exists=True) + exists=True, + ) mfrac = traits.Float( - desc='Use the number ff instead of 0.50 in the algorithm', - argstr='-mfrac %s', - position=2) + desc="Use the number ff instead of 0.50 in the algorithm", + argstr="-mfrac %s", + position=2, + ) doall = traits.Bool( - desc='Apply the algorithm to each sub-brick separately.', - argstr='-doall', + desc="Apply the algorithm to each sub-brick separately.", + argstr="-doall", position=3, - xor=('grad')) + xor=("grad"), + ) grad = File( - desc='Also compute a \'gradual\' clip level as a function of voxel ' - 'position, and output that to a dataset.', - argstr='-grad %s', + desc="Also compute a 'gradual' clip level as a function of voxel " + "position, and output that to a dataset.", + argstr="-grad %s", position=3, - xor=('doall')) + xor=("doall"), + ) class ClipLevelOutputSpec(TraitedSpec): - clip_val = traits.Float(desc='output') + clip_val = traits.Float(desc="output") class ClipLevel(AFNICommandBase): @@ -993,7 +1123,8 @@ class ClipLevel(AFNICommandBase): >>> res = cliplevel.run() # doctest: +SKIP """ - _cmd = '3dClipLevel' + + _cmd = "3dClipLevel" input_spec = ClipLevelInputSpec output_spec = ClipLevelOutputSpec @@ -1001,16 +1132,16 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() - outfile = os.path.join(os.getcwd(), 'stat_result.json') + outfile = os.path.join(os.getcwd(), "stat_result.json") if runtime is None: try: - clip_val = load_json(outfile)['stat'] + clip_val = load_json(outfile)["stat"] except IOError: return self.run().outputs else: clip_val = [] - for line in runtime.stdout.split('\n'): + for line in runtime.stdout.split("\n"): if line: values = line.split() if len(values) > 1: @@ -1031,17 +1162,19 @@ class DegreeCentralityInputSpec(CentralityInputSpec): """ in_file = File( - desc='input file to 3dDegreeCentrality', - argstr='%s', + desc="input file to 3dDegreeCentrality", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) sparsity = traits.Float( - desc='only take the top percent of connections', argstr='-sparsity %f') + desc="only take the top percent of connections", argstr="-sparsity %f" + ) oned_file = Str( - desc='output filepath to text dump of correlation matrix', - argstr='-out1D %s') + desc="output filepath to text dump of correlation matrix", argstr="-out1D %s" + ) class DegreeCentralityOutputSpec(AFNICommandOutputSpec): @@ -1049,9 +1182,10 @@ class DegreeCentralityOutputSpec(AFNICommandOutputSpec): """ oned_file = File( - desc='The text output of the similarity matrix computed after ' - 'thresholding with one-dimensional and ijk voxel indices, ' - 'correlations, image extents, and affine matrix.') + desc="The text output of the similarity matrix computed after " + "thresholding with one-dimensional and ijk voxel indices, " + "correlations, image extents, and affine matrix." + ) class DegreeCentrality(AFNICommand): @@ -1076,7 +1210,7 @@ class DegreeCentrality(AFNICommand): """ - _cmd = '3dDegreeCentrality' + _cmd = "3dDegreeCentrality" input_spec = DegreeCentralityInputSpec output_spec = DegreeCentralityOutputSpec @@ -1088,24 +1222,26 @@ def _list_outputs(self): # Update outputs dictionary if oned file is defined outputs = super(DegreeCentrality, self)._list_outputs() if self.inputs.oned_file: - outputs['oned_file'] = os.path.abspath(self.inputs.oned_file) + outputs["oned_file"] = os.path.abspath(self.inputs.oned_file) return outputs class DespikeInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dDespike', - argstr='%s', + desc="input file to 3dDespike", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_despike', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_despike", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) class Despike(AFNICommand): @@ -1126,24 +1262,26 @@ class Despike(AFNICommand): """ - _cmd = '3dDespike' + _cmd = "3dDespike" input_spec = DespikeInputSpec output_spec = AFNICommandOutputSpec class DetrendInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dDetrend', - argstr='%s', + desc="input file to 3dDetrend", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_detrend', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_detrend", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) class Detrend(AFNICommand): @@ -1167,7 +1305,7 @@ class Detrend(AFNICommand): """ - _cmd = '3dDetrend' + _cmd = "3dDetrend" input_spec = DetrendInputSpec output_spec = AFNICommandOutputSpec @@ -1177,43 +1315,52 @@ class ECMInputSpec(CentralityInputSpec): """ in_file = File( - desc='input file to 3dECM', - argstr='%s', + desc="input file to 3dECM", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) sparsity = traits.Float( - desc='only take the top percent of connections', argstr='-sparsity %f') + desc="only take the top percent of connections", argstr="-sparsity %f" + ) full = traits.Bool( - desc='Full power method; enables thresholding; automatically selected ' - 'if -thresh or -sparsity are set', - argstr='-full') + desc="Full power method; enables thresholding; automatically selected " + "if -thresh or -sparsity are set", + argstr="-full", + ) fecm = traits.Bool( - desc='Fast centrality method; substantial speed increase but cannot ' - 'accomodate thresholding; automatically selected if -thresh or ' - '-sparsity are not set', - argstr='-fecm') + desc="Fast centrality method; substantial speed increase but cannot " + "accomodate thresholding; automatically selected if -thresh or " + "-sparsity are not set", + argstr="-fecm", + ) shift = traits.Float( - desc='shift correlation coefficients in similarity matrix to enforce ' - 'non-negativity, s >= 0.0; default = 0.0 for -full, 1.0 for -fecm', - argstr='-shift %f') + desc="shift correlation coefficients in similarity matrix to enforce " + "non-negativity, s >= 0.0; default = 0.0 for -full, 1.0 for -fecm", + argstr="-shift %f", + ) scale = traits.Float( - desc='scale correlation coefficients in similarity matrix to after ' - 'shifting, x >= 0.0; default = 1.0 for -full, 0.5 for -fecm', - argstr='-scale %f') + desc="scale correlation coefficients in similarity matrix to after " + "shifting, x >= 0.0; default = 1.0 for -full, 0.5 for -fecm", + argstr="-scale %f", + ) eps = traits.Float( - desc='sets the stopping criterion for the power iteration; ' - 'l2|v_old - v_new| < eps*|v_old|; default = 0.001', - argstr='-eps %f') + desc="sets the stopping criterion for the power iteration; " + "l2|v_old - v_new| < eps*|v_old|; default = 0.001", + argstr="-eps %f", + ) max_iter = traits.Int( - desc='sets the maximum number of iterations to use in the power ' - 'iteration; default = 1000', - argstr='-max_iter %d') + desc="sets the maximum number of iterations to use in the power " + "iteration; default = 1000", + argstr="-max_iter %d", + ) memory = traits.Float( - desc='Limit memory consumption on system by setting the amount of GB ' - 'to limit the algorithm to; default = 2GB', - argstr='-memory %f') + desc="Limit memory consumption on system by setting the amount of GB " + "to limit the algorithm to; default = 2GB", + argstr="-memory %f", + ) class ECM(AFNICommand): @@ -1238,38 +1385,39 @@ class ECM(AFNICommand): """ - _cmd = '3dECM' + _cmd = "3dECM" input_spec = ECMInputSpec output_spec = AFNICommandOutputSpec class FimInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dfim+', - argstr='-input %s', + desc="input file to 3dfim+", + argstr="-input %s", position=1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_fim', - desc='output image file name', - argstr='-bucket %s', - name_source='in_file') + name_template="%s_fim", + desc="output image file name", + argstr="-bucket %s", + name_source="in_file", + ) ideal_file = File( - desc='ideal time series file name', - argstr='-ideal_file %s', + desc="ideal time series file name", + argstr="-ideal_file %s", position=2, mandatory=True, - exists=True) + exists=True, + ) fim_thr = traits.Float( - desc='fim internal mask threshold value', - argstr='-fim_thr %f', - position=3) + desc="fim internal mask threshold value", argstr="-fim_thr %f", position=3 + ) out = Str( - desc='Flag to output the specified parameter', - argstr='-out %s', - position=4) + desc="Flag to output the specified parameter", argstr="-out %s", position=4 + ) class Fim(AFNICommand): @@ -1295,32 +1443,33 @@ class Fim(AFNICommand): """ - _cmd = '3dfim+' + _cmd = "3dfim+" input_spec = FimInputSpec output_spec = AFNICommandOutputSpec class FourierInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dFourier', - argstr='%s', + desc="input file to 3dFourier", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_fourier', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') - lowpass = traits.Float( - desc='lowpass', argstr='-lowpass %f', mandatory=True) - highpass = traits.Float( - desc='highpass', argstr='-highpass %f', mandatory=True) + name_template="%s_fourier", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) + lowpass = traits.Float(desc="lowpass", argstr="-lowpass %f", mandatory=True) + highpass = traits.Float(desc="highpass", argstr="-highpass %f", mandatory=True) retrend = traits.Bool( - desc='Any mean and linear trend are removed before filtering. This ' - 'will restore the trend after filtering.', - argstr='-retrend') + desc="Any mean and linear trend are removed before filtering. This " + "will restore the trend after filtering.", + argstr="-retrend", + ) class Fourier(AFNICommand): @@ -1345,48 +1494,48 @@ class Fourier(AFNICommand): """ - _cmd = '3dFourier' + _cmd = "3dFourier" input_spec = FourierInputSpec output_spec = AFNICommandOutputSpec class HistInputSpec(CommandLineInputSpec): in_file = File( - desc='input file to 3dHist', - argstr='-input %s', + desc="input file to 3dHist", + argstr="-input %s", position=1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - desc='Write histogram to niml file with this prefix', - name_template='%s_hist', + desc="Write histogram to niml file with this prefix", + name_template="%s_hist", keep_extension=False, - argstr='-prefix %s', - name_source=['in_file']) + argstr="-prefix %s", + name_source=["in_file"], + ) showhist = traits.Bool( - False, - usedefault=True, - desc='write a text visual histogram', - argstr='-showhist') + False, usedefault=True, desc="write a text visual histogram", argstr="-showhist" + ) out_show = File( - name_template='%s_hist.out', - desc='output image file name', + name_template="%s_hist.out", + desc="output image file name", keep_extension=False, - argstr='> %s', - name_source='in_file', - position=-1) - mask = File( - desc='matrix to align input file', argstr='-mask %s', exists=True) - nbin = traits.Int(desc='number of bins', argstr='-nbin %d') - max_value = traits.Float(argstr='-max %f', desc='maximum intensity value') - min_value = traits.Float(argstr='-min %f', desc='minimum intensity value') - bin_width = traits.Float(argstr='-binwidth %f', desc='bin width') + argstr="> %s", + name_source="in_file", + position=-1, + ) + mask = File(desc="matrix to align input file", argstr="-mask %s", exists=True) + nbin = traits.Int(desc="number of bins", argstr="-nbin %d") + max_value = traits.Float(argstr="-max %f", desc="maximum intensity value") + min_value = traits.Float(argstr="-min %f", desc="minimum intensity value") + bin_width = traits.Float(argstr="-binwidth %f", desc="bin width") class HistOutputSpec(TraitedSpec): - out_file = File(desc='output file', exists=True) - out_show = File(desc='output visual histogram') + out_file = File(desc="output file", exists=True) + out_show = File(desc="output visual histogram") class Hist(AFNICommandBase): @@ -1408,7 +1557,7 @@ class Hist(AFNICommandBase): """ - _cmd = '3dHist' + _cmd = "3dHist" input_spec = HistInputSpec output_spec = HistOutputSpec _redirect_x = True @@ -1426,14 +1575,14 @@ def _parse_inputs(self, skip=None): if not self.inputs.showhist: if skip is None: skip = [] - skip += ['out_show'] + skip += ["out_show"] return super(Hist, self)._parse_inputs(skip=skip) def _list_outputs(self): outputs = super(Hist, self)._list_outputs() - outputs['out_file'] += '.niml.hist' + outputs["out_file"] += ".niml.hist" if not self.inputs.showhist: - outputs['out_show'] = Undefined + outputs["out_show"] = Undefined return outputs @@ -1442,12 +1591,13 @@ class LFCDInputSpec(CentralityInputSpec): """ in_file = File( - desc='input file to 3dLFCD', - argstr='%s', + desc="input file to 3dLFCD", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) class LFCD(AFNICommand): @@ -1471,33 +1621,32 @@ class LFCD(AFNICommand): >>> res = lfcd.run() # doctest: +SKIP """ - _cmd = '3dLFCD' + _cmd = "3dLFCD" input_spec = LFCDInputSpec output_spec = AFNICommandOutputSpec class MaskaveInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dmaskave', - argstr='%s', + desc="input file to 3dmaskave", + argstr="%s", position=-2, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_maskave.1D', - desc='output image file name', + name_template="%s_maskave.1D", + desc="output image file name", keep_extension=True, - argstr='> %s', - name_source='in_file', - position=-1) + argstr="> %s", + name_source="in_file", + position=-1, + ) mask = File( - desc='matrix to align input file', - argstr='-mask %s', - position=1, - exists=True) - quiet = traits.Bool( - desc='matrix to align input file', argstr='-quiet', position=2) + desc="matrix to align input file", argstr="-mask %s", position=1, exists=True + ) + quiet = traits.Bool(desc="matrix to align input file", argstr="-quiet", position=2) class Maskave(AFNICommand): @@ -1521,40 +1670,39 @@ class Maskave(AFNICommand): """ - _cmd = '3dmaskave' + _cmd = "3dmaskave" input_spec = MaskaveInputSpec output_spec = AFNICommandOutputSpec class MeansInputSpec(AFNICommandInputSpec): in_file_a = File( - desc='input file to 3dMean', - argstr='%s', + desc="input file to 3dMean", + argstr="%s", position=-2, mandatory=True, - exists=True) + exists=True, + ) in_file_b = File( - desc='another input file to 3dMean', - argstr='%s', - position=-1, - exists=True) + desc="another input file to 3dMean", argstr="%s", position=-1, exists=True + ) datum = traits.Str( - desc='Sets the data type of the output dataset', argstr='-datum %s') + desc="Sets the data type of the output dataset", argstr="-datum %s" + ) out_file = File( - name_template='%s_mean', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file_a') - scale = Str(desc='scaling of output', argstr='-%sscale') - non_zero = traits.Bool(desc='use only non-zero values', argstr='-non_zero') - std_dev = traits.Bool(desc='calculate std dev', argstr='-stdev') - sqr = traits.Bool(desc='mean square instead of value', argstr='-sqr') - summ = traits.Bool(desc='take sum, (not average)', argstr='-sum') - count = traits.Bool( - desc='compute count of non-zero voxels', argstr='-count') - mask_inter = traits.Bool( - desc='create intersection mask', argstr='-mask_inter') - mask_union = traits.Bool(desc='create union mask', argstr='-mask_union') + name_template="%s_mean", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file_a", + ) + scale = Str(desc="scaling of output", argstr="-%sscale") + non_zero = traits.Bool(desc="use only non-zero values", argstr="-non_zero") + std_dev = traits.Bool(desc="calculate std dev", argstr="-stdev") + sqr = traits.Bool(desc="mean square instead of value", argstr="-sqr") + summ = traits.Bool(desc="take sum, (not average)", argstr="-sum") + count = traits.Bool(desc="compute count of non-zero voxels", argstr="-count") + mask_inter = traits.Bool(desc="create intersection mask", argstr="-mask_inter") + mask_union = traits.Bool(desc="create union mask", argstr="-mask_union") class Means(AFNICommand): @@ -1586,81 +1734,82 @@ class Means(AFNICommand): """ - _cmd = '3dMean' + _cmd = "3dMean" input_spec = MeansInputSpec output_spec = AFNICommandOutputSpec class OutlierCountInputSpec(CommandLineInputSpec): in_file = File( - argstr='%s', - mandatory=True, - exists=True, - position=-2, - desc='input dataset') + argstr="%s", mandatory=True, exists=True, position=-2, desc="input dataset" + ) mask = File( exists=True, - argstr='-mask %s', - xor=['autoclip', 'automask'], - desc='only count voxels within the given mask') + argstr="-mask %s", + xor=["autoclip", "automask"], + desc="only count voxels within the given mask", + ) qthr = traits.Range( value=1e-3, low=0.0, high=1.0, usedefault=True, - argstr='-qthr %.5f', - desc='indicate a value for q to compute alpha') + argstr="-qthr %.5f", + desc="indicate a value for q to compute alpha", + ) autoclip = traits.Bool( False, usedefault=True, - argstr='-autoclip', - xor=['mask'], - desc='clip off small voxels') + argstr="-autoclip", + xor=["mask"], + desc="clip off small voxels", + ) automask = traits.Bool( False, usedefault=True, - argstr='-automask', - xor=['mask'], - desc='clip off small voxels') + argstr="-automask", + xor=["mask"], + desc="clip off small voxels", + ) fraction = traits.Bool( False, usedefault=True, - argstr='-fraction', - desc='write out the fraction of masked voxels which are outliers at ' - 'each timepoint') + argstr="-fraction", + desc="write out the fraction of masked voxels which are outliers at " + "each timepoint", + ) interval = traits.Bool( False, usedefault=True, - argstr='-range', - desc='write out the median + 3.5 MAD of outlier count with each ' - 'timepoint') - save_outliers = traits.Bool( - False, usedefault=True, desc='enables out_file option') + argstr="-range", + desc="write out the median + 3.5 MAD of outlier count with each " "timepoint", + ) + save_outliers = traits.Bool(False, usedefault=True, desc="enables out_file option") outliers_file = File( - name_template='%s_outliers', - argstr='-save %s', - name_source=['in_file'], - output_name='out_outliers', + name_template="%s_outliers", + argstr="-save %s", + name_source=["in_file"], + output_name="out_outliers", keep_extension=True, - desc='output image file name') + desc="output image file name", + ) polort = traits.Int( - argstr='-polort %d', - desc='detrend each voxel timeseries with polynomials') + argstr="-polort %d", desc="detrend each voxel timeseries with polynomials" + ) legendre = traits.Bool( - False, - usedefault=True, - argstr='-legendre', - desc='use Legendre polynomials') + False, usedefault=True, argstr="-legendre", desc="use Legendre polynomials" + ) out_file = File( - name_template='%s_outliers', - name_source=['in_file'], + name_template="%s_outliers", + name_source=["in_file"], keep_extension=False, - desc='capture standard output') + desc="capture standard output", + ) class OutlierCountOutputSpec(TraitedSpec): - out_outliers = File(exists=True, desc='output image file name') - out_file = File(desc='capture standard output') + out_outliers = File(exists=True, desc="output image file name") + out_file = File(desc="capture standard output") class OutlierCount(CommandLine): @@ -1682,10 +1831,10 @@ class OutlierCount(CommandLine): """ - _cmd = '3dToutcount' + _cmd = "3dToutcount" input_spec = OutlierCountInputSpec output_spec = OutlierCountOutputSpec - _terminal_output = 'file_split' + _terminal_output = "file_split" def _parse_inputs(self, skip=None): if skip is None: @@ -1693,84 +1842,87 @@ def _parse_inputs(self, skip=None): # This is not strictly an input, but needs be # set before run() is called. - if self.terminal_output == 'none': - self.terminal_output = 'file_split' + if self.terminal_output == "none": + self.terminal_output = "file_split" if not self.inputs.save_outliers: - skip += ['outliers_file'] + skip += ["outliers_file"] return super(OutlierCount, self)._parse_inputs(skip) def _run_interface(self, runtime): runtime = super(OutlierCount, self)._run_interface(runtime) # Read from runtime.stdout or runtime.merged - with open(op.abspath(self.inputs.out_file), 'w') as outfh: + with open(op.abspath(self.inputs.out_file), "w") as outfh: outfh.write(runtime.stdout or runtime.merged) return runtime def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) if self.inputs.save_outliers: - outputs['out_outliers'] = op.abspath(self.inputs.outliers_file) + outputs["out_outliers"] = op.abspath(self.inputs.outliers_file) return outputs class QualityIndexInputSpec(CommandLineInputSpec): in_file = File( - argstr='%s', - mandatory=True, - exists=True, - position=-2, - desc='input dataset') + argstr="%s", mandatory=True, exists=True, position=-2, desc="input dataset" + ) mask = File( exists=True, - argstr='-mask %s', - xor=['autoclip', 'automask'], - desc='compute correlation only across masked voxels') + argstr="-mask %s", + xor=["autoclip", "automask"], + desc="compute correlation only across masked voxels", + ) spearman = traits.Bool( False, usedefault=True, - argstr='-spearman', - desc='Quality index is 1 minus the Spearman (rank) correlation ' - 'coefficient of each sub-brick with the median sub-brick. ' - '(default).') + argstr="-spearman", + desc="Quality index is 1 minus the Spearman (rank) correlation " + "coefficient of each sub-brick with the median sub-brick. " + "(default).", + ) quadrant = traits.Bool( False, usedefault=True, - argstr='-quadrant', - desc='Similar to -spearman, but using 1 minus the quadrant correlation ' - 'coefficient as the quality index.') + argstr="-quadrant", + desc="Similar to -spearman, but using 1 minus the quadrant correlation " + "coefficient as the quality index.", + ) autoclip = traits.Bool( False, usedefault=True, - argstr='-autoclip', - xor=['mask'], - desc='clip off small voxels') + argstr="-autoclip", + xor=["mask"], + desc="clip off small voxels", + ) automask = traits.Bool( False, usedefault=True, - argstr='-automask', - xor=['mask'], - desc='clip off small voxels') - clip = traits.Float(argstr='-clip %f', desc='clip off values below') + argstr="-automask", + xor=["mask"], + desc="clip off small voxels", + ) + clip = traits.Float(argstr="-clip %f", desc="clip off values below") interval = traits.Bool( False, usedefault=True, - argstr='-range', - desc='write out the median + 3.5 MAD of outlier count with each ' - 'timepoint') + argstr="-range", + desc="write out the median + 3.5 MAD of outlier count with each " "timepoint", + ) out_file = File( - name_template='%s_tqual', - name_source=['in_file'], - argstr='> %s', + name_template="%s_tqual", + name_source=["in_file"], + argstr="> %s", keep_extension=False, position=-1, - desc='capture standard output') + desc="capture standard output", + ) class QualityIndexOutputSpec(TraitedSpec): - out_file = File(desc='file containing the captured standard output') + out_file = File(desc="file containing the captured standard output") class QualityIndex(CommandLine): @@ -1792,110 +1944,130 @@ class QualityIndex(CommandLine): >>> res = tqual.run() # doctest: +SKIP """ - _cmd = '3dTqual' + + _cmd = "3dTqual" input_spec = QualityIndexInputSpec output_spec = QualityIndexOutputSpec class ROIStatsInputSpec(CommandLineInputSpec): in_file = File( - desc='input dataset', - argstr='%s', - position=-2, - mandatory=True, - exists=True) - mask = File(desc='input mask', argstr='-mask %s', position=3, exists=True, - deprecated='1.1.4', new_name='mask_file') - mask_file = File(desc='input mask', argstr='-mask %s', exists=True) + desc="input dataset", argstr="%s", position=-2, mandatory=True, exists=True + ) + mask = File( + desc="input mask", + argstr="-mask %s", + position=3, + exists=True, + deprecated="1.1.4", + new_name="mask_file", + ) + mask_file = File(desc="input mask", argstr="-mask %s", exists=True) mask_f2short = traits.Bool( - desc='Tells the program to convert a float mask to short integers, ' - 'by simple rounding.', - argstr='-mask_f2short') + desc="Tells the program to convert a float mask to short integers, " + "by simple rounding.", + argstr="-mask_f2short", + ) num_roi = traits.Int( - desc='Forces the assumption that the mask dataset\'s ROIs are ' - 'denoted by 1 to n inclusive. Normally, the program ' - 'figures out the ROIs on its own. This option is ' - 'useful if a) you are certain that the mask dataset ' - 'has no values outside the range [0 n], b) there may ' - 'be some ROIs missing between [1 n] in the mask data-' - 'set and c) you want those columns in the output any-' - 'way so the output lines up with the output from other ' - 'invocations of 3dROIstats.', - argstr='-numroi %s') + desc="Forces the assumption that the mask dataset's ROIs are " + "denoted by 1 to n inclusive. Normally, the program " + "figures out the ROIs on its own. This option is " + "useful if a) you are certain that the mask dataset " + "has no values outside the range [0 n], b) there may " + "be some ROIs missing between [1 n] in the mask data-" + "set and c) you want those columns in the output any-" + "way so the output lines up with the output from other " + "invocations of 3dROIstats.", + argstr="-numroi %s", + ) zerofill = traits.Str( - requires=['num_roi'], - desc='For ROI labels not found, use the provided string instead of ' - 'a \'0\' in the output file. Only active if `num_roi` is ' - 'enabled.', - argstr='-zerofill %s') + requires=["num_roi"], + desc="For ROI labels not found, use the provided string instead of " + "a '0' in the output file. Only active if `num_roi` is " + "enabled.", + argstr="-zerofill %s", + ) roisel = File( exists=True, - desc='Only considers ROIs denoted by values found in the specified ' - 'file. Note that the order of the ROIs as specified in the file ' - 'is not preserved. So an SEL.1D of \'2 8 20\' produces the same ' - 'output as \'8 20 2\'', - argstr='-roisel %s') - debug = traits.Bool( - desc='print debug information', - argstr='-debug') - quiet = traits.Bool( - desc='execute quietly', - argstr='-quiet') + desc="Only considers ROIs denoted by values found in the specified " + "file. Note that the order of the ROIs as specified in the file " + "is not preserved. So an SEL.1D of '2 8 20' produces the same " + "output as '8 20 2'", + argstr="-roisel %s", + ) + debug = traits.Bool(desc="print debug information", argstr="-debug") + quiet = traits.Bool(desc="execute quietly", argstr="-quiet") nomeanout = traits.Bool( - desc='Do not include the (zero-inclusive) mean among computed stats', - argstr='-nomeanout') + desc="Do not include the (zero-inclusive) mean among computed stats", + argstr="-nomeanout", + ) nobriklab = traits.Bool( - desc='Do not print the sub-brick label next to its index', - argstr='-nobriklab') + desc="Do not print the sub-brick label next to its index", argstr="-nobriklab" + ) format1D = traits.Bool( - xor=['format1DR'], - desc='Output results in a 1D format that includes commented labels', - argstr='-1Dformat') + xor=["format1DR"], + desc="Output results in a 1D format that includes commented labels", + argstr="-1Dformat", + ) format1DR = traits.Bool( - xor=['format1D'], - desc='Output results in a 1D format that includes uncommented ' - 'labels. May not work optimally with typical 1D functions, ' - 'but is useful for R functions.', - argstr='-1DRformat') - _stat_names = ['mean', 'sum', 'voxels', 'minmax', 'sigma', 'median', - 'mode', 'summary', 'zerominmax', 'zerosigma', 'zeromedian', - 'zeromode'] + xor=["format1D"], + desc="Output results in a 1D format that includes uncommented " + "labels. May not work optimally with typical 1D functions, " + "but is useful for R functions.", + argstr="-1DRformat", + ) + _stat_names = [ + "mean", + "sum", + "voxels", + "minmax", + "sigma", + "median", + "mode", + "summary", + "zerominmax", + "zerosigma", + "zeromedian", + "zeromode", + ] stat = InputMultiObject( traits.Enum(_stat_names), - desc='statistics to compute. Options include: ' - ' * mean = Compute the mean using only non_zero voxels.' - ' Implies the opposite for the mean computed ' - ' by default.\n' - ' * median = Compute the median of nonzero voxels\n' - ' * mode = Compute the mode of nonzero voxels.' - ' (integral valued sets only)\n' - ' * minmax = Compute the min/max of nonzero voxels\n' - ' * sum = Compute the sum using only nonzero voxels.\n' - ' * voxels = Compute the number of nonzero voxels\n' - ' * sigma = Compute the standard deviation of nonzero' - ' voxels\n' - 'Statistics that include zero-valued voxels:\n' - ' * zerominmax = Compute the min/max of all voxels.\n' - ' * zerosigma = Compute the standard deviation of all' - ' voxels.\n' - ' * zeromedian = Compute the median of all voxels.\n' - ' * zeromode = Compute the mode of all voxels.\n' - ' * summary = Only output a summary line with the grand ' - ' mean across all briks in the input dataset.' - ' This option cannot be used with nomeanout.\n' - 'More that one option can be specified.', - argstr='%s...') + desc="statistics to compute. Options include: " + " * mean = Compute the mean using only non_zero voxels." + " Implies the opposite for the mean computed " + " by default.\n" + " * median = Compute the median of nonzero voxels\n" + " * mode = Compute the mode of nonzero voxels." + " (integral valued sets only)\n" + " * minmax = Compute the min/max of nonzero voxels\n" + " * sum = Compute the sum using only nonzero voxels.\n" + " * voxels = Compute the number of nonzero voxels\n" + " * sigma = Compute the standard deviation of nonzero" + " voxels\n" + "Statistics that include zero-valued voxels:\n" + " * zerominmax = Compute the min/max of all voxels.\n" + " * zerosigma = Compute the standard deviation of all" + " voxels.\n" + " * zeromedian = Compute the median of all voxels.\n" + " * zeromode = Compute the mode of all voxels.\n" + " * summary = Only output a summary line with the grand " + " mean across all briks in the input dataset." + " This option cannot be used with nomeanout.\n" + "More that one option can be specified.", + argstr="%s...", + ) out_file = File( - name_template='%s_roistat.1D', - desc='output file', + name_template="%s_roistat.1D", + desc="output file", keep_extension=False, - argstr='> %s', - name_source='in_file', - position=-1) + argstr="> %s", + name_source="in_file", + position=-1, + ) class ROIStatsOutputSpec(TraitedSpec): - out_file = File(desc='output tab-separated values file', exists=True) + out_file = File(desc="output tab-separated values file", exists=True) class ROIStats(AFNICommandBase): @@ -1918,75 +2090,84 @@ class ROIStats(AFNICommandBase): >>> res = roistats.run() # doctest: +SKIP """ - _cmd = '3dROIstats' - _terminal_output = 'allatonce' + + _cmd = "3dROIstats" + _terminal_output = "allatonce" input_spec = ROIStatsInputSpec output_spec = ROIStatsOutputSpec def _format_arg(self, name, spec, value): _stat_dict = { - 'mean': '-nzmean', - 'median': '-nzmedian', - 'mode': '-nzmode', - 'minmax': '-nzminmax', - 'sigma': '-nzsigma', - 'voxels': '-nzvoxels', - 'sum': '-nzsum', - 'summary': '-summary', - 'zerominmax': '-minmax', - 'zeromedian': '-median', - 'zerosigma': '-sigma', - 'zeromode': '-mode' - } - if name == 'stat': + "mean": "-nzmean", + "median": "-nzmedian", + "mode": "-nzmode", + "minmax": "-nzminmax", + "sigma": "-nzsigma", + "voxels": "-nzvoxels", + "sum": "-nzsum", + "summary": "-summary", + "zerominmax": "-minmax", + "zeromedian": "-median", + "zerosigma": "-sigma", + "zeromode": "-mode", + } + if name == "stat": value = [_stat_dict[v] for v in value] return super(ROIStats, self)._format_arg(name, spec, value) class RetroicorInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dretroicor', - argstr='%s', + desc="input file to 3dretroicor", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_retroicor', - name_source=['in_file'], - desc='output image file name', - argstr='-prefix %s', - position=1) + name_template="%s_retroicor", + name_source=["in_file"], + desc="output image file name", + argstr="-prefix %s", + position=1, + ) card = File( - desc='1D cardiac data file for cardiac correction', - argstr='-card %s', + desc="1D cardiac data file for cardiac correction", + argstr="-card %s", position=-2, - exists=True) + exists=True, + ) resp = File( - desc='1D respiratory waveform data for correction', - argstr='-resp %s', + desc="1D respiratory waveform data for correction", + argstr="-resp %s", position=-3, - exists=True) + exists=True, + ) threshold = traits.Int( - desc='Threshold for detection of R-wave peaks in input (Make sure it ' - 'is above the background noise level, Try 3/4 or 4/5 times range ' - 'plus minimum)', - argstr='-threshold %d', - position=-4) + desc="Threshold for detection of R-wave peaks in input (Make sure it " + "is above the background noise level, Try 3/4 or 4/5 times range " + "plus minimum)", + argstr="-threshold %d", + position=-4, + ) order = traits.Int( - desc='The order of the correction (2 is typical)', - argstr='-order %s', - position=-5) + desc="The order of the correction (2 is typical)", + argstr="-order %s", + position=-5, + ) cardphase = File( - desc='Filename for 1D cardiac phase output', - argstr='-cardphase %s', + desc="Filename for 1D cardiac phase output", + argstr="-cardphase %s", position=-6, - hash_files=False) + hash_files=False, + ) respphase = File( - desc='Filename for 1D resp phase output', - argstr='-respphase %s', + desc="Filename for 1D resp phase output", + argstr="-respphase %s", position=-7, - hash_files=False) + hash_files=False, + ) class Retroicor(AFNICommand): @@ -2024,67 +2205,75 @@ class Retroicor(AFNICommand): """ - _cmd = '3dretroicor' + _cmd = "3dretroicor" input_spec = RetroicorInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, trait_spec, value): - if name == 'in_file': - if not isdefined(self.inputs.card) and not isdefined( - self.inputs.resp): + if name == "in_file": + if not isdefined(self.inputs.card) and not isdefined(self.inputs.resp): return None return super(Retroicor, self)._format_arg(name, trait_spec, value) class SegInputSpec(CommandLineInputSpec): in_file = File( - desc='ANAT is the volume to segment', - argstr='-anat %s', + desc="ANAT is the volume to segment", + argstr="-anat %s", position=-1, mandatory=True, exists=True, - copyfile=True) + copyfile=True, + ) mask = traits.Either( - traits.Enum('AUTO'), + traits.Enum("AUTO"), File(exists=True), - desc='only non-zero voxels in mask are analyzed. mask can either be a ' + desc="only non-zero voxels in mask are analyzed. mask can either be a " 'dataset or the string "AUTO" which would use AFNI\'s automask ' - 'function to create the mask.', - argstr='-mask %s', + "function to create the mask.", + argstr="-mask %s", position=-2, - mandatory=True) + mandatory=True, + ) blur_meth = traits.Enum( - 'BFT', - 'BIM', - argstr='-blur_meth %s', - desc='set the blurring method for bias field estimation') + "BFT", + "BIM", + argstr="-blur_meth %s", + desc="set the blurring method for bias field estimation", + ) bias_fwhm = traits.Float( - desc='The amount of blurring used when estimating the field bias with ' - 'the Wells method', - argstr='-bias_fwhm %f') + desc="The amount of blurring used when estimating the field bias with " + "the Wells method", + argstr="-bias_fwhm %f", + ) classes = Str( - desc='CLASS_STRING is a semicolon delimited string of class labels', - argstr='-classes %s') + desc="CLASS_STRING is a semicolon delimited string of class labels", + argstr="-classes %s", + ) bmrf = traits.Float( - desc='Weighting factor controlling spatial homogeneity of the ' - 'classifications', - argstr='-bmrf %f') + desc="Weighting factor controlling spatial homogeneity of the " + "classifications", + argstr="-bmrf %f", + ) bias_classes = Str( - desc='A semicolon delimited string of classes that contribute to the ' - 'estimation of the bias field', - argstr='-bias_classes %s') + desc="A semicolon delimited string of classes that contribute to the " + "estimation of the bias field", + argstr="-bias_classes %s", + ) prefix = Str( - desc='the prefix for the output folder containing all output volumes', - argstr='-prefix %s') + desc="the prefix for the output folder containing all output volumes", + argstr="-prefix %s", + ) mixfrac = Str( - desc='MIXFRAC sets up the volume-wide (within mask) tissue fractions ' - 'while initializing the segmentation (see IGNORE for exception)', - argstr='-mixfrac %s') + desc="MIXFRAC sets up the volume-wide (within mask) tissue fractions " + "while initializing the segmentation (see IGNORE for exception)", + argstr="-mixfrac %s", + ) mixfloor = traits.Float( - desc='Set the minimum value for any class\'s mixing fraction', - argstr='-mixfloor %f') - main_N = traits.Int( - desc='Number of iterations to perform.', argstr='-main_N %d') + desc="Set the minimum value for any class's mixing fraction", + argstr="-mixfloor %f", + ) + main_N = traits.Int(desc="Number of iterations to perform.", argstr="-main_N %d") class Seg(AFNICommandBase): @@ -2108,7 +2297,7 @@ class Seg(AFNICommandBase): """ - _cmd = '3dSeg' + _cmd = "3dSeg" input_spec = SegInputSpec output_spec = AFNICommandOutputSpec @@ -2119,10 +2308,9 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() if isdefined(self.inputs.prefix): - outfile = os.path.join(os.getcwd(), self.inputs.prefix, - 'Classes+*.BRIK') + outfile = os.path.join(os.getcwd(), self.inputs.prefix, "Classes+*.BRIK") else: - outfile = os.path.join(os.getcwd(), 'Segsy', 'Classes+*.BRIK') + outfile = os.path.join(os.getcwd(), "Segsy", "Classes+*.BRIK") outputs.out_file = glob.glob(outfile)[0] @@ -2131,17 +2319,19 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): class SkullStripInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dSkullStrip', - argstr='-input %s', + desc="input file to 3dSkullStrip", + argstr="-input %s", position=1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_skullstrip', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_skullstrip", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) class SkullStrip(AFNICommand): @@ -2164,7 +2354,8 @@ class SkullStrip(AFNICommand): >>> res = skullstrip.run() # doctest: +SKIP """ - _cmd = '3dSkullStrip' + + _cmd = "3dSkullStrip" _redirect_x = True input_spec = SkullStripInputSpec output_spec = AFNICommandOutputSpec @@ -2182,48 +2373,55 @@ def __init__(self, **inputs): class TCorr1DInputSpec(AFNICommandInputSpec): xset = File( - desc='3d+time dataset input', - argstr=' %s', + desc="3d+time dataset input", + argstr=" %s", position=-2, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) y_1d = File( - desc='1D time series file input', - argstr=' %s', + desc="1D time series file input", + argstr=" %s", position=-1, mandatory=True, - exists=True) + exists=True, + ) out_file = File( - desc='output filename prefix', - name_template='%s_correlation.nii.gz', - argstr='-prefix %s', - name_source='xset', - keep_extension=True) + desc="output filename prefix", + name_template="%s_correlation.nii.gz", + argstr="-prefix %s", + name_source="xset", + keep_extension=True, + ) pearson = traits.Bool( - desc='Correlation is the normal Pearson correlation coefficient', - argstr=' -pearson', - xor=['spearman', 'quadrant', 'ktaub'], - position=1) + desc="Correlation is the normal Pearson correlation coefficient", + argstr=" -pearson", + xor=["spearman", "quadrant", "ktaub"], + position=1, + ) spearman = traits.Bool( - desc='Correlation is the Spearman (rank) correlation coefficient', - argstr=' -spearman', - xor=['pearson', 'quadrant', 'ktaub'], - position=1) + desc="Correlation is the Spearman (rank) correlation coefficient", + argstr=" -spearman", + xor=["pearson", "quadrant", "ktaub"], + position=1, + ) quadrant = traits.Bool( - desc='Correlation is the quadrant correlation coefficient', - argstr=' -quadrant', - xor=['pearson', 'spearman', 'ktaub'], - position=1) + desc="Correlation is the quadrant correlation coefficient", + argstr=" -quadrant", + xor=["pearson", "spearman", "ktaub"], + position=1, + ) ktaub = traits.Bool( - desc='Correlation is the Kendall\'s tau_b correlation coefficient', - argstr=' -ktaub', - xor=['pearson', 'spearman', 'quadrant'], - position=1) + desc="Correlation is the Kendall's tau_b correlation coefficient", + argstr=" -ktaub", + xor=["pearson", "spearman", "quadrant"], + position=1, + ) class TCorr1DOutputSpec(TraitedSpec): - out_file = File(desc='output file containing correlations', exists=True) + out_file = File(desc="output file containing correlations", exists=True) class TCorr1D(AFNICommand): @@ -2243,73 +2441,69 @@ class TCorr1D(AFNICommand): """ - _cmd = '3dTcorr1D' + _cmd = "3dTcorr1D" input_spec = TCorr1DInputSpec output_spec = TCorr1DOutputSpec class TCorrMapInputSpec(AFNICommandInputSpec): - in_file = File( - exists=True, argstr='-input %s', mandatory=True, copyfile=False) - seeds = File(exists=True, argstr='-seed %s', xor=('seeds_width')) - mask = File(exists=True, argstr='-mask %s') - automask = traits.Bool(argstr='-automask') - polort = traits.Int(argstr='-polort %d') - bandpass = traits.Tuple( - (traits.Float(), traits.Float()), argstr='-bpass %f %f') - regress_out_timeseries = File(exists=True, argstr='-ort %s') - blur_fwhm = traits.Float(argstr='-Gblur %f') - seeds_width = traits.Float(argstr='-Mseed %f', xor=('seeds')) + in_file = File(exists=True, argstr="-input %s", mandatory=True, copyfile=False) + seeds = File(exists=True, argstr="-seed %s", xor=("seeds_width")) + mask = File(exists=True, argstr="-mask %s") + automask = traits.Bool(argstr="-automask") + polort = traits.Int(argstr="-polort %d") + bandpass = traits.Tuple((traits.Float(), traits.Float()), argstr="-bpass %f %f") + regress_out_timeseries = File(exists=True, argstr="-ort %s") + blur_fwhm = traits.Float(argstr="-Gblur %f") + seeds_width = traits.Float(argstr="-Mseed %f", xor=("seeds")) # outputs - mean_file = File(argstr='-Mean %s', suffix='_mean', name_source='in_file') - zmean = File(argstr='-Zmean %s', suffix='_zmean', name_source='in_file') - qmean = File(argstr='-Qmean %s', suffix='_qmean', name_source='in_file') - pmean = File(argstr='-Pmean %s', suffix='_pmean', name_source='in_file') - - _thresh_opts = ('absolute_threshold', 'var_absolute_threshold', - 'var_absolute_threshold_normalize') + mean_file = File(argstr="-Mean %s", suffix="_mean", name_source="in_file") + zmean = File(argstr="-Zmean %s", suffix="_zmean", name_source="in_file") + qmean = File(argstr="-Qmean %s", suffix="_qmean", name_source="in_file") + pmean = File(argstr="-Pmean %s", suffix="_pmean", name_source="in_file") + + _thresh_opts = ( + "absolute_threshold", + "var_absolute_threshold", + "var_absolute_threshold_normalize", + ) thresholds = traits.List(traits.Int()) absolute_threshold = File( - argstr='-Thresh %f %s', - suffix='_thresh', - name_source='in_file', - xor=_thresh_opts) + argstr="-Thresh %f %s", + suffix="_thresh", + name_source="in_file", + xor=_thresh_opts, + ) var_absolute_threshold = File( - argstr='-VarThresh %f %f %f %s', - suffix='_varthresh', - name_source='in_file', - xor=_thresh_opts) + argstr="-VarThresh %f %f %f %s", + suffix="_varthresh", + name_source="in_file", + xor=_thresh_opts, + ) var_absolute_threshold_normalize = File( - argstr='-VarThreshN %f %f %f %s', - suffix='_varthreshn', - name_source='in_file', - xor=_thresh_opts) + argstr="-VarThreshN %f %f %f %s", + suffix="_varthreshn", + name_source="in_file", + xor=_thresh_opts, + ) - correlation_maps = File(argstr='-CorrMap %s', name_source='in_file') - correlation_maps_masked = File( - argstr='-CorrMask %s', name_source='in_file') + correlation_maps = File(argstr="-CorrMap %s", name_source="in_file") + correlation_maps_masked = File(argstr="-CorrMask %s", name_source="in_file") - _expr_opts = ('average_expr', 'average_expr_nonzero', 'sum_expr') + _expr_opts = ("average_expr", "average_expr_nonzero", "sum_expr") expr = Str() average_expr = File( - argstr='-Aexpr %s %s', - suffix='_aexpr', - name_source='in_file', - xor=_expr_opts) + argstr="-Aexpr %s %s", suffix="_aexpr", name_source="in_file", xor=_expr_opts + ) average_expr_nonzero = File( - argstr='-Cexpr %s %s', - suffix='_cexpr', - name_source='in_file', - xor=_expr_opts) + argstr="-Cexpr %s %s", suffix="_cexpr", name_source="in_file", xor=_expr_opts + ) sum_expr = File( - argstr='-Sexpr %s %s', - suffix='_sexpr', - name_source='in_file', - xor=_expr_opts) + argstr="-Sexpr %s %s", suffix="_sexpr", name_source="in_file", xor=_expr_opts + ) histogram_bin_numbers = traits.Int() - histogram = File( - name_source='in_file', argstr='-Hist %d %s', suffix='_hist') + histogram = File(name_source="in_file", argstr="-Hist %d %s", suffix="_hist") class TCorrMapOutputSpec(TraitedSpec): @@ -2350,48 +2544,50 @@ class TCorrMap(AFNICommand): """ - _cmd = '3dTcorrMap' + _cmd = "3dTcorrMap" input_spec = TCorrMapInputSpec output_spec = TCorrMapOutputSpec - _additional_metadata = ['suffix'] + _additional_metadata = ["suffix"] def _format_arg(self, name, trait_spec, value): if name in self.inputs._thresh_opts: return trait_spec.argstr % self.inputs.thresholds + [value] elif name in self.inputs._expr_opts: return trait_spec.argstr % (self.inputs.expr, value) - elif name == 'histogram': - return trait_spec.argstr % (self.inputs.histogram_bin_numbers, - value) + elif name == "histogram": + return trait_spec.argstr % (self.inputs.histogram_bin_numbers, value) else: return super(TCorrMap, self)._format_arg(name, trait_spec, value) class TCorrelateInputSpec(AFNICommandInputSpec): xset = File( - desc='input xset', - argstr='%s', + desc="input xset", + argstr="%s", position=-2, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) yset = File( - desc='input yset', - argstr='%s', + desc="input yset", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_tcorr', - desc='output image file name', - argstr='-prefix %s', - name_source='xset') + name_template="%s_tcorr", + desc="output image file name", + argstr="-prefix %s", + name_source="xset", + ) pearson = traits.Bool( - desc='Correlation is the normal Pearson correlation coefficient', - argstr='-pearson') - polort = traits.Int( - desc='Remove polynomical trend of order m', argstr='-polort %d') + desc="Correlation is the normal Pearson correlation coefficient", + argstr="-pearson", + ) + polort = traits.Int(desc="Remove polynomical trend of order m", argstr="-polort %d") class TCorrelate(AFNICommand): @@ -2417,44 +2613,50 @@ class TCorrelate(AFNICommand): """ - _cmd = '3dTcorrelate' + _cmd = "3dTcorrelate" input_spec = TCorrelateInputSpec output_spec = AFNICommandOutputSpec class TNormInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dTNorm', - argstr='%s', + desc="input file to 3dTNorm", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_tnorm', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_tnorm", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) norm2 = traits.Bool( - desc='L2 normalize (sum of squares = 1) [DEFAULT]', argstr='-norm2') + desc="L2 normalize (sum of squares = 1) [DEFAULT]", argstr="-norm2" + ) normR = traits.Bool( - desc= - 'normalize so sum of squares = number of time points * e.g., so RMS = 1.', - argstr='-normR') + desc="normalize so sum of squares = number of time points * e.g., so RMS = 1.", + argstr="-normR", + ) norm1 = traits.Bool( - desc='L1 normalize (sum of absolute values = 1)', argstr='-norm1') + desc="L1 normalize (sum of absolute values = 1)", argstr="-norm1" + ) normx = traits.Bool( - desc='Scale so max absolute value = 1 (L_infinity norm)', - argstr='-normx') + desc="Scale so max absolute value = 1 (L_infinity norm)", argstr="-normx" + ) polort = traits.Int( desc="""Detrend with polynomials of order p before normalizing [DEFAULT = don't do this] * Use '-polort 0' to remove the mean, for example""", - argstr='-polort %s') + argstr="-polort %s", + ) L1fit = traits.Bool( desc="""Detrend with L1 regression (L2 is the default) * This option is here just for the hell of it""", - argstr='-L1fit') + argstr="-L1fit", + ) class TNorm(AFNICommand): @@ -2477,32 +2679,36 @@ class TNorm(AFNICommand): >>> res = tshift.run() # doctest: +SKIP """ - _cmd = '3dTnorm' + + _cmd = "3dTnorm" input_spec = TNormInputSpec output_spec = AFNICommandOutputSpec class TProjectInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dTproject', - argstr='-input %s', + desc="input file to 3dTproject", + argstr="-input %s", position=1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_tproject', - desc='output image file name', + name_template="%s_tproject", + desc="output image file name", position=-1, - argstr='-prefix %s', - name_source='in_file') + argstr="-prefix %s", + name_source="in_file", + ) censor = File( desc="""filename of censor .1D time series * This is a file of 1s and 0s, indicating which time points are to be included (1) and which are to be excluded (0).""", argstr="-censor %s", - exists=True) + exists=True, + ) censortr = traits.List( traits.Str(), desc="""list of strings that specify time indexes @@ -2519,10 +2725,13 @@ class TProjectInputSpec(AFNICommandInputSpec): +N.B.: 2:37,47 means index #37 in run #2 and global time index 47; it does NOT mean index #37 in run #2 AND index #47 in run #2.""", - argstr="-CENSORTR %s") + argstr="-CENSORTR %s", + ) cenmode = traits.Enum( - 'KILL', 'ZERO', 'NTRP', - desc="""specifies how censored time points are treated in + "KILL", + "ZERO", + "NTRP", + desc="""specifies how censored time points are treated in the output dataset: + mode = ZERO ==> put zero values in their place ==> output datset is same length as input @@ -2535,7 +2744,8 @@ class TProjectInputSpec(AFNICommandInputSpec): of any time points -- this feature is to keep the Spanish Inquisition happy. * The default mode is KILL !!!""", - argstr='-cenmode %s') + argstr="-cenmode %s", + ) concat = File( desc="""The catenation file, as in 3dDeconvolve, containing the TR indexes of the start points for each contiguous run @@ -2556,18 +2766,21 @@ class TProjectInputSpec(AFNICommandInputSpec): from the ort files via the '{...}' selector for the 1D files and the '[...]' selector for the datasets.""", exists=True, - argstr='-concat %s') + argstr="-concat %s", + ) noblock = traits.Bool( desc="""Also as in 3dDeconvolve, if you want the program to treat an auto-catenated dataset as one long run, use this option. ++ However, '-noblock' will not affect catenation if you use the '-concat' option.""", - argstr='-noblock') + argstr="-noblock", + ) ort = File( desc="""Remove each column in file ++ Each column will have its mean removed.""", exists=True, - argstr="-ort %s") + argstr="-ort %s", + ) polort = traits.Int( desc="""Remove polynomials up to and including degree pp. ++ Default value is 2. @@ -2578,49 +2791,56 @@ class TProjectInputSpec(AFNICommandInputSpec): ++ Use of -polort -1 is not advised (if data mean != 0), even if -ort contains constant terms, as all means are removed.""", - argstr="-polort %d") + argstr="-polort %d", + ) dsort = InputMultiObject( - File( - exists=True, - copyfile=False), + File(exists=True, copyfile=False), argstr="-dsort %s...", desc="""Remove the 3D+time time series in dataset fset. ++ That is, 'fset' contains a different nuisance time series for each voxel (e.g., from AnatICOR). - ++ Multiple -dsort options are allowed.""") + ++ Multiple -dsort options are allowed.""", + ) bandpass = traits.Tuple( - traits.Float, traits.Float, + traits.Float, + traits.Float, desc="""Remove all frequencies EXCEPT those in the range""", - argstr='-bandpass %g %g') + argstr="-bandpass %g %g", + ) stopband = traits.Tuple( - traits.Float, traits.Float, + traits.Float, + traits.Float, desc="""Remove all frequencies in the range""", - argstr='-stopband %g %g') + argstr="-stopband %g %g", + ) TR = traits.Float( desc="""Use time step dd for the frequency calculations, rather than the value stored in the dataset header.""", - argstr='-TR %g') + argstr="-TR %g", + ) mask = File( exists=True, desc="""Only operate on voxels nonzero in the mset dataset. ++ Voxels outside the mask will be filled with zeros. ++ If no masking option is given, then all voxels will be processed.""", - argstr='-mask %s') + argstr="-mask %s", + ) automask = traits.Bool( - desc="""Generate a mask automatically""", - xor=['mask'], - argstr='-automask') + desc="""Generate a mask automatically""", xor=["mask"], argstr="-automask" + ) blur = traits.Float( desc="""Blur (inside the mask only) with a filter that has width (FWHM) of fff millimeters. ++ Spatial blurring (if done) is after the time series filtering.""", - argstr='-blur %g') + argstr="-blur %g", + ) norm = traits.Bool( desc="""Normalize each output time series to have sum of squares = 1. This is the LAST operation.""", - argstr='-norm') + argstr="-norm", + ) class TProject(AFNICommand): @@ -2652,76 +2872,92 @@ class TProject(AFNICommand): >>> res = tproject.run() # doctest: +SKIP """ - _cmd = '3dTproject' + + _cmd = "3dTproject" input_spec = TProjectInputSpec output_spec = AFNICommandOutputSpec - class TShiftInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dTshift', - argstr='%s', + desc="input file to 3dTshift", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_tshift', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_tshift", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) tr = Str( desc='manually set the TR. You can attach suffix "s" for seconds ' 'or "ms" for milliseconds.', - argstr='-TR %s') + argstr="-TR %s", + ) tzero = traits.Float( - desc='align each slice to given time offset', - argstr='-tzero %s', - xor=['tslice']) + desc="align each slice to given time offset", argstr="-tzero %s", xor=["tslice"] + ) tslice = traits.Int( - desc='align each slice to time offset of given slice', - argstr='-slice %s', - xor=['tzero']) + desc="align each slice to time offset of given slice", + argstr="-slice %s", + xor=["tzero"], + ) ignore = traits.Int( - desc='ignore the first set of points specified', argstr='-ignore %s') + desc="ignore the first set of points specified", argstr="-ignore %s" + ) interp = traits.Enum( - ('Fourier', 'linear', 'cubic', 'quintic', 'heptic'), - desc='different interpolation methods (see 3dTshift for details) ' - 'default = Fourier', - argstr='-%s') + ("Fourier", "linear", "cubic", "quintic", "heptic"), + desc="different interpolation methods (see 3dTshift for details) " + "default = Fourier", + argstr="-%s", + ) tpattern = traits.Either( - traits.Enum('alt+z', 'altplus', # Synonyms - 'alt+z2', - 'alt-z', 'altminus', # Synonyms - 'alt-z2', - 'seq+z', 'seqplus', # Synonyms - 'seq-z', 'seqminus'), # Synonyms + traits.Enum( + "alt+z", + "altplus", # Synonyms + "alt+z2", + "alt-z", + "altminus", # Synonyms + "alt-z2", + "seq+z", + "seqplus", # Synonyms + "seq-z", + "seqminus", + ), # Synonyms Str, # For backwards compatibility - desc='use specified slice time pattern rather than one in header', - argstr='-tpattern %s', - xor=['slice_timing']) + desc="use specified slice time pattern rather than one in header", + argstr="-tpattern %s", + xor=["slice_timing"], + ) slice_timing = traits.Either( File(exists=True), traits.List(traits.Float), - desc='time offsets from the volume acquisition onset for each slice', - argstr='-tpattern @%s', - xor=['tpattern']) + desc="time offsets from the volume acquisition onset for each slice", + argstr="-tpattern @%s", + xor=["tpattern"], + ) slice_encoding_direction = traits.Enum( - 'k', 'k-', + "k", + "k-", usedefault=True, - desc='Direction in which slice_timing is specified (default: k). If negative,' - 'slice_timing is defined in reverse order, that is, the first entry ' - 'corresponds to the slice with the largest index, and the final entry ' - 'corresponds to slice index zero. Only in effect when slice_timing is ' - 'passed as list, not when it is passed as file.',) + desc="Direction in which slice_timing is specified (default: k). If negative," + "slice_timing is defined in reverse order, that is, the first entry " + "corresponds to the slice with the largest index, and the final entry " + "corresponds to slice index zero. Only in effect when slice_timing is " + "passed as list, not when it is passed as file.", + ) rlt = traits.Bool( - desc='Before shifting, remove the mean and linear trend', - argstr='-rlt') + desc="Before shifting, remove the mean and linear trend", argstr="-rlt" + ) rltplus = traits.Bool( - desc='Before shifting, remove the mean and linear trend and later put ' - 'back the mean', - argstr='-rlt+') + desc="Before shifting, remove the mean and linear trend and later put " + "back the mean", + argstr="-rlt+", + ) class TShiftOutputSpec(AFNICommandOutputSpec): @@ -2818,15 +3054,18 @@ class TShift(AFNICommand): >>> res = tshift.run() # doctest: +SKIP """ - _cmd = '3dTshift' + + _cmd = "3dTshift" input_spec = TShiftInputSpec output_spec = TShiftOutputSpec def _format_arg(self, name, trait_spec, value): - if name == 'tpattern' and value.startswith('@'): - iflogger.warning('Passing a file prefixed by "@" will be deprecated' - '; please use the `slice_timing` input') - elif name == 'slice_timing' and isinstance(value, list): + if name == "tpattern" and value.startswith("@"): + iflogger.warning( + 'Passing a file prefixed by "@" will be deprecated' + "; please use the `slice_timing` input" + ) + elif name == "slice_timing" and isinstance(value, list): value = self._write_slice_timing() return super(TShift, self)._format_arg(name, trait_spec, value) @@ -2835,67 +3074,72 @@ def _write_slice_timing(self): if self.inputs.slice_encoding_direction.endswith("-"): slice_timing.reverse() - fname = 'slice_timing.1D' - with open(fname, 'w') as fobj: - fobj.write('\t'.join(map(str, slice_timing))) + fname = "slice_timing.1D" + with open(fname, "w") as fobj: + fobj.write("\t".join(map(str, slice_timing))) return fname def _list_outputs(self): outputs = super(TShift, self)._list_outputs() if isdefined(self.inputs.slice_timing): if isinstance(self.inputs.slice_timing, list): - outputs['timing_file'] = os.path.abspath('slice_timing.1D') + outputs["timing_file"] = os.path.abspath("slice_timing.1D") else: - outputs['timing_file'] = os.path.abspath(self.inputs.slice_timing) + outputs["timing_file"] = os.path.abspath(self.inputs.slice_timing) return outputs class TSmoothInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dTSmooth', - argstr='%s', + desc="input file to 3dTSmooth", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_smooth', - desc='output file from 3dTSmooth', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_smooth", + desc="output file from 3dTSmooth", + argstr="-prefix %s", + name_source="in_file", + ) datum = traits.Str( - desc='Sets the data type of the output dataset', - argstr='-datum %s') + desc="Sets the data type of the output dataset", argstr="-datum %s" + ) lin = traits.Bool( - desc='3 point linear filter: 0.15*a + 0.70*b + 0.15*c' - '[This is the default smoother]', - argstr='-lin') - med = traits.Bool( - desc='3 point median filter: median(a,b,c)', - argstr='-med') + desc="3 point linear filter: 0.15*a + 0.70*b + 0.15*c" + "[This is the default smoother]", + argstr="-lin", + ) + med = traits.Bool(desc="3 point median filter: median(a,b,c)", argstr="-med") osf = traits.Bool( - desc='3 point order statistics filter:' - '0.15*min(a,b,c) + 0.70*median(a,b,c) + 0.15*max(a,b,c)', - argstr='-osf') + desc="3 point order statistics filter:" + "0.15*min(a,b,c) + 0.70*median(a,b,c) + 0.15*max(a,b,c)", + argstr="-osf", + ) lin3 = traits.Int( - desc='3 point linear filter: 0.5*(1-m)*a + m*b + 0.5*(1-m)*c' + desc="3 point linear filter: 0.5*(1-m)*a + m*b + 0.5*(1-m)*c" "Here, 'm' is a number strictly between 0 and 1.", - argstr='-3lin %d') + argstr="-3lin %d", + ) hamming = traits.Int( - argstr='-hamming %d', - desc='Use N point Hamming windows.' - '(N must be odd and bigger than 1.)') + argstr="-hamming %d", + desc="Use N point Hamming windows." "(N must be odd and bigger than 1.)", + ) blackman = traits.Int( - argstr='-blackman %d', - desc='Use N point Blackman windows.' - '(N must be odd and bigger than 1.)') + argstr="-blackman %d", + desc="Use N point Blackman windows." "(N must be odd and bigger than 1.)", + ) custom = File( - argstr='-custom %s', - desc='odd # of coefficients must be in a single column in ASCII file') + argstr="-custom %s", + desc="odd # of coefficients must be in a single column in ASCII file", + ) adaptive = traits.Int( - argstr='-adaptive %d', - desc='use adaptive mean filtering of width N ' - '(where N must be odd and bigger than 3).') + argstr="-adaptive %d", + desc="use adaptive mean filtering of width N " + "(where N must be odd and bigger than 3).", + ) class TSmooth(AFNICommand): @@ -2918,76 +3162,87 @@ class TSmooth(AFNICommand): >>> res = smooth.run() # doctest: +SKIP """ - _cmd = '3dTsmooth' + + _cmd = "3dTsmooth" input_spec = TSmoothInputSpec output_spec = AFNICommandOutputSpec class VolregInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dvolreg', - argstr='%s', + desc="input file to 3dvolreg", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) in_weight_volume = traits.Either( traits.Tuple(File(exists=True), traits.Int), File(exists=True), - desc='weights for each voxel specified by a file with an ' - 'optional volume number (defaults to 0)', - argstr="-weight '%s[%d]'") + desc="weights for each voxel specified by a file with an " + "optional volume number (defaults to 0)", + argstr="-weight '%s[%d]'", + ) out_file = File( - name_template='%s_volreg', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_volreg", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) basefile = File( - desc='base file for registration', - argstr='-base %s', - position=-6, - exists=True) + desc="base file for registration", argstr="-base %s", position=-6, exists=True + ) zpad = traits.Int( - desc='Zeropad around the edges by \'n\' voxels during rotations', - argstr='-zpad %d', - position=-5) + desc="Zeropad around the edges by 'n' voxels during rotations", + argstr="-zpad %d", + position=-5, + ) md1d_file = File( - name_template='%s_md.1D', - desc='max displacement output file', - argstr='-maxdisp1D %s', - name_source='in_file', + name_template="%s_md.1D", + desc="max displacement output file", + argstr="-maxdisp1D %s", + name_source="in_file", keep_extension=True, - position=-4) + position=-4, + ) oned_file = File( - name_template='%s.1D', - desc='1D movement parameters output file', - argstr='-1Dfile %s', - name_source='in_file', - keep_extension=True) + name_template="%s.1D", + desc="1D movement parameters output file", + argstr="-1Dfile %s", + name_source="in_file", + keep_extension=True, + ) verbose = traits.Bool( - desc='more detailed description of the process', argstr='-verbose') + desc="more detailed description of the process", argstr="-verbose" + ) timeshift = traits.Bool( - desc='time shift to mean slice time offset', argstr='-tshift 0') + desc="time shift to mean slice time offset", argstr="-tshift 0" + ) copyorigin = traits.Bool( - desc='copy base file origin coords to output', argstr='-twodup') + desc="copy base file origin coords to output", argstr="-twodup" + ) oned_matrix_save = File( - name_template='%s.aff12.1D', - desc='Save the matrix transformation', - argstr='-1Dmatrix_save %s', + name_template="%s.aff12.1D", + desc="Save the matrix transformation", + argstr="-1Dmatrix_save %s", keep_extension=True, - name_source='in_file') + name_source="in_file", + ) interp = traits.Enum( - ('Fourier', 'cubic', 'heptic', 'quintic', 'linear'), - desc='spatial interpolation methods [default = heptic]', - argstr='-%s') + ("Fourier", "cubic", "heptic", "quintic", "linear"), + desc="spatial interpolation methods [default = heptic]", + argstr="-%s", + ) class VolregOutputSpec(TraitedSpec): - out_file = File(desc='registered file', exists=True) - md1d_file = File(desc='max displacement info file', exists=True) - oned_file = File(desc='movement parameters info file', exists=True) + out_file = File(desc="registered file", exists=True) + md1d_file = File(desc="max displacement info file", exists=True) + oned_file = File(desc="movement parameters info file", exists=True) oned_matrix_save = File( - desc='matrix transformation from base to input', exists=True) + desc="matrix transformation from base to input", exists=True + ) class Volreg(AFNICommand): @@ -3025,67 +3280,73 @@ class Volreg(AFNICommand): """ - _cmd = '3dvolreg' + _cmd = "3dvolreg" input_spec = VolregInputSpec output_spec = VolregOutputSpec def _format_arg(self, name, trait_spec, value): - if name == 'in_weight_volume' and not isinstance(value, tuple): + if name == "in_weight_volume" and not isinstance(value, tuple): value = (value, 0) return super(Volreg, self)._format_arg(name, trait_spec, value) class WarpInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dWarp', - argstr='%s', + desc="input file to 3dWarp", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_warp', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file', - keep_extension=True) + name_template="%s_warp", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + keep_extension=True, + ) tta2mni = traits.Bool( - desc='transform dataset from Talairach to MNI152', argstr='-tta2mni') + desc="transform dataset from Talairach to MNI152", argstr="-tta2mni" + ) mni2tta = traits.Bool( - desc='transform dataset from MNI152 to Talaraich', argstr='-mni2tta') + desc="transform dataset from MNI152 to Talaraich", argstr="-mni2tta" + ) matparent = File( - desc='apply transformation from 3dWarpDrive', - argstr='-matparent %s', - exists=True) + desc="apply transformation from 3dWarpDrive", + argstr="-matparent %s", + exists=True, + ) oblique_parent = File( - desc='Read in the oblique transformation matrix from an oblique ' - 'dataset and make cardinal dataset oblique to match', - argstr='-oblique_parent %s', - exists=True) + desc="Read in the oblique transformation matrix from an oblique " + "dataset and make cardinal dataset oblique to match", + argstr="-oblique_parent %s", + exists=True, + ) deoblique = traits.Bool( - desc='transform dataset from oblique to cardinal', argstr='-deoblique') + desc="transform dataset from oblique to cardinal", argstr="-deoblique" + ) interp = traits.Enum( - ('linear', 'cubic', 'NN', 'quintic'), - desc='spatial interpolation methods [default = linear]', - argstr='-%s') + ("linear", "cubic", "NN", "quintic"), + desc="spatial interpolation methods [default = linear]", + argstr="-%s", + ) gridset = File( - desc='copy grid of specified dataset', - argstr='-gridset %s', - exists=True) - newgrid = traits.Float( - desc='specify grid of this size (mm)', argstr='-newgrid %f') + desc="copy grid of specified dataset", argstr="-gridset %s", exists=True + ) + newgrid = traits.Float(desc="specify grid of this size (mm)", argstr="-newgrid %f") zpad = traits.Int( - desc='pad input dataset with N planes of zero on all sides.', - argstr='-zpad %d') + desc="pad input dataset with N planes of zero on all sides.", argstr="-zpad %d" + ) verbose = traits.Bool( - desc='Print out some information along the way.', argstr='-verb') - save_warp = traits.Bool( - desc='save warp as .mat file', requires=['verbose']) + desc="Print out some information along the way.", argstr="-verb" + ) + save_warp = traits.Bool(desc="save warp as .mat file", requires=["verbose"]) class WarpOutputSpec(TraitedSpec): - out_file = File(desc='Warped file.', exists=True) - warp_file = File(desc='warp transform .mat file') + out_file = File(desc="Warped file.", exists=True) + warp_file = File(desc="warp transform .mat file") class Warp(AFNICommand): @@ -3115,7 +3376,8 @@ class Warp(AFNICommand): >>> res = warp_2.run() # doctest: +SKIP """ - _cmd = '3dWarp' + + _cmd = "3dWarp" input_spec = WarpInputSpec output_spec = WarpOutputSpec @@ -3124,39 +3386,40 @@ def _run_interface(self, runtime): if self.inputs.save_warp: import numpy as np - warp_file = self._list_outputs()['warp_file'] - np.savetxt(warp_file, [runtime.stdout], fmt=str('%s')) + + warp_file = self._list_outputs()["warp_file"] + np.savetxt(warp_file, [runtime.stdout], fmt=str("%s")) return runtime def _list_outputs(self): outputs = super(Warp, self)._list_outputs() if self.inputs.save_warp: - outputs['warp_file'] = fname_presuffix(outputs['out_file'], - suffix='_transform.mat', - use_ext=False) + outputs["warp_file"] = fname_presuffix( + outputs["out_file"], suffix="_transform.mat", use_ext=False + ) return outputs class QwarpInputSpec(AFNICommandInputSpec): in_file = File( - desc= - 'Source image (opposite phase encoding direction than base image).', - argstr='-source %s', + desc="Source image (opposite phase encoding direction than base image).", + argstr="-source %s", mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) base_file = File( - desc= - 'Base image (opposite phase encoding direction than source image).', - argstr='-base %s', + desc="Base image (opposite phase encoding direction than source image).", + argstr="-base %s", mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - argstr='-prefix %s', - name_template='ppp_%s', - name_source=['in_file'], + argstr="-prefix %s", + name_template="ppp_%s", + name_source=["in_file"], desc="""\ Sets the prefix/suffix for the output datasets. * The source dataset is warped to match the base @@ -3190,482 +3453,514 @@ class QwarpInputSpec(AFNICommandInputSpec): * You can easily compute the inverse later, say by a command like 3dNwarpCat -prefix Z_WARPINV 'INV(Z_WARP+tlrc)' or the inverse can be computed as needed in 3dNwarpApply, like - 3dNwarpApply -nwarp 'INV(Z_WARP+tlrc)' -source Dataset.nii ...""") + 3dNwarpApply -nwarp 'INV(Z_WARP+tlrc)' -source Dataset.nii ...""", + ) resample = traits.Bool( - desc='This option simply resamples the source dataset to match the' - 'base dataset grid. You can use this if the two datasets' - 'overlap well (as seen in the AFNI GUI), but are not on the' - 'same 3D grid.' - '* If they don\'t overlap well, allineate them first' - '* The reampling here is done with the' - '\'wsinc5\' method, which has very little blurring artifact.' - '* If the base and source datasets ARE on the same 3D grid,' - 'then the -resample option will be ignored.' - '* You CAN use -resample with these 3dQwarp options:' - '-plusminus -inilev -iniwarp -duplo', - argstr='-resample') + desc="This option simply resamples the source dataset to match the" + "base dataset grid. You can use this if the two datasets" + "overlap well (as seen in the AFNI GUI), but are not on the" + "same 3D grid." + "* If they don't overlap well, allineate them first" + "* The reampling here is done with the" + "'wsinc5' method, which has very little blurring artifact." + "* If the base and source datasets ARE on the same 3D grid," + "then the -resample option will be ignored." + "* You CAN use -resample with these 3dQwarp options:" + "-plusminus -inilev -iniwarp -duplo", + argstr="-resample", + ) allineate = traits.Bool( - desc='This option will make 3dQwarp run 3dAllineate first, to align ' - 'the source dataset to the base with an affine transformation. ' - 'It will then use that alignment as a starting point for the ' - 'nonlinear warping.', - argstr='-allineate') + desc="This option will make 3dQwarp run 3dAllineate first, to align " + "the source dataset to the base with an affine transformation. " + "It will then use that alignment as a starting point for the " + "nonlinear warping.", + argstr="-allineate", + ) allineate_opts = traits.Str( - desc='add extra options to the 3dAllineate command to be run by ' - '3dQwarp.', - argstr='-allineate_opts %s', - requires=['allineate']) - nowarp = traits.Bool(desc='Do not save the _WARP file.', argstr='-nowarp') + desc="add extra options to the 3dAllineate command to be run by " "3dQwarp.", + argstr="-allineate_opts %s", + requires=["allineate"], + ) + nowarp = traits.Bool(desc="Do not save the _WARP file.", argstr="-nowarp") iwarp = traits.Bool( - desc='Do compute and save the _WARPINV file.', - argstr='-iwarp', - xor=['plusminus']) + desc="Do compute and save the _WARPINV file.", + argstr="-iwarp", + xor=["plusminus"], + ) pear = traits.Bool( - desc='Use strict Pearson correlation for matching.' - '* Not usually recommended, since the \'clipped Pearson\' method' - 'used by default will reduce the impact of outlier values.', - argstr='-pear') + desc="Use strict Pearson correlation for matching." + "* Not usually recommended, since the 'clipped Pearson' method" + "used by default will reduce the impact of outlier values.", + argstr="-pear", + ) noneg = traits.Bool( - desc='Replace negative values in either input volume with 0.' - '* If there ARE negative input values, and you do NOT use -noneg,' - 'then strict Pearson correlation will be used, since the \'clipped\'' - 'method only is implemented for non-negative volumes.' - '* \'-noneg\' is not the default, since there might be situations where' - 'you want to align datasets with positive and negative values mixed.' - '* But, in many cases, the negative values in a dataset are just the' - 'result of interpolation artifacts (or other peculiarities), and so' - 'they should be ignored. That is what \'-noneg\' is for.', - argstr='-noneg') + desc="Replace negative values in either input volume with 0." + "* If there ARE negative input values, and you do NOT use -noneg," + "then strict Pearson correlation will be used, since the 'clipped'" + "method only is implemented for non-negative volumes." + "* '-noneg' is not the default, since there might be situations where" + "you want to align datasets with positive and negative values mixed." + "* But, in many cases, the negative values in a dataset are just the" + "result of interpolation artifacts (or other peculiarities), and so" + "they should be ignored. That is what '-noneg' is for.", + argstr="-noneg", + ) nopenalty = traits.Bool( - desc='Replace negative values in either input volume with 0.' - '* If there ARE negative input values, and you do NOT use -noneg,' - 'then strict Pearson correlation will be used, since the \'clipped\'' - 'method only is implemented for non-negative volumes.' - '* \'-noneg\' is not the default, since there might be situations where' - 'you want to align datasets with positive and negative values mixed.' - '* But, in many cases, the negative values in a dataset are just the' - 'result of interpolation artifacts (or other peculiarities), and so' - 'they should be ignored. That is what \'-noneg\' is for.', - argstr='-nopenalty') + desc="Replace negative values in either input volume with 0." + "* If there ARE negative input values, and you do NOT use -noneg," + "then strict Pearson correlation will be used, since the 'clipped'" + "method only is implemented for non-negative volumes." + "* '-noneg' is not the default, since there might be situations where" + "you want to align datasets with positive and negative values mixed." + "* But, in many cases, the negative values in a dataset are just the" + "result of interpolation artifacts (or other peculiarities), and so" + "they should be ignored. That is what '-noneg' is for.", + argstr="-nopenalty", + ) penfac = traits.Float( - desc='Use this value to weight the penalty.' - 'The default value is 1.Larger values mean the' - 'penalty counts more, reducing grid distortions,' - 'insha\'Allah; \'-nopenalty\' is the same as \'-penfac 0\'.' - ' -->>* [23 Sep 2013] -- Zhark increased the default value of' - ' the penalty by a factor of 5, and also made it get' - ' progressively larger with each level of refinement.' - ' Thus, warping results will vary from earlier instances' - ' of 3dQwarp.' - ' * The progressive increase in the penalty at higher levels' - ' means that the \'cost function\' can actually look like the' - ' alignment is getting worse when the levels change.' - ' * IF you wish to turn off this progression, for whatever' - ' reason (e.g., to keep compatibility with older results),' - ' use the option \'-penold\'.To be completely compatible with' - ' the older 3dQwarp, you\'ll also have to use \'-penfac 0.2\'.', - argstr='-penfac %f') + desc="Use this value to weight the penalty." + "The default value is 1.Larger values mean the" + "penalty counts more, reducing grid distortions," + "insha'Allah; '-nopenalty' is the same as '-penfac 0'." + " -->>* [23 Sep 2013] -- Zhark increased the default value of" + " the penalty by a factor of 5, and also made it get" + " progressively larger with each level of refinement." + " Thus, warping results will vary from earlier instances" + " of 3dQwarp." + " * The progressive increase in the penalty at higher levels" + " means that the 'cost function' can actually look like the" + " alignment is getting worse when the levels change." + " * IF you wish to turn off this progression, for whatever" + " reason (e.g., to keep compatibility with older results)," + " use the option '-penold'.To be completely compatible with" + " the older 3dQwarp, you'll also have to use '-penfac 0.2'.", + argstr="-penfac %f", + ) noweight = traits.Bool( - desc='If you want a binary weight (the old default), use this option.' - 'That is, each voxel in the base volume automask will be' - 'weighted the same in the computation of the cost functional.', - argstr='-noweight') + desc="If you want a binary weight (the old default), use this option." + "That is, each voxel in the base volume automask will be" + "weighted the same in the computation of the cost functional.", + argstr="-noweight", + ) weight = File( - desc='Instead of computing the weight from the base dataset,' - 'directly input the weight volume from dataset \'www\'.' - '* Useful if you know what over parts of the base image you' - 'want to emphasize or de-emphasize the matching functional.', - argstr='-weight %s', - exists=True) + desc="Instead of computing the weight from the base dataset," + "directly input the weight volume from dataset 'www'." + "* Useful if you know what over parts of the base image you" + "want to emphasize or de-emphasize the matching functional.", + argstr="-weight %s", + exists=True, + ) wball = traits.List( traits.Int(), - desc='-wball x y z r f' - 'Enhance automatic weight from \'-useweight\' by a factor' - 'of 1+f*Gaussian(FWHM=r) centered in the base image at' - 'DICOM coordinates (x,y,z) and with radius \'r\'. The' - 'goal of this option is to try and make the alignment' - 'better in a specific part of the brain.' - '* Example: -wball 0 14 6 30 40' - 'to emphasize the thalamic area (in MNI/Talairach space).' - '* The \'r\' parameter must be positive!' - '* The \'f\' parameter must be between 1 and 100 (inclusive).' - '* \'-wball\' does nothing if you input your own weight' - 'with the \'-weight\' option.' - '* \'-wball\' does change the binary weight created by' - 'the \'-noweight\' option.' - '* You can only use \'-wball\' once in a run of 3dQwarp.' - '*** The effect of \'-wball\' is not dramatic. The example' - 'above makes the average brain image across a collection' - 'of subjects a little sharper in the thalamic area, which' - 'might have some small value. If you care enough about' - 'alignment to use \'-wball\', then you should examine the' - 'results from 3dQwarp for each subject, to see if the' - 'alignments are good enough for your purposes.', - argstr='-wball %s', + desc="-wball x y z r f" + "Enhance automatic weight from '-useweight' by a factor" + "of 1+f*Gaussian(FWHM=r) centered in the base image at" + "DICOM coordinates (x,y,z) and with radius 'r'. The" + "goal of this option is to try and make the alignment" + "better in a specific part of the brain." + "* Example: -wball 0 14 6 30 40" + "to emphasize the thalamic area (in MNI/Talairach space)." + "* The 'r' parameter must be positive!" + "* The 'f' parameter must be between 1 and 100 (inclusive)." + "* '-wball' does nothing if you input your own weight" + "with the '-weight' option." + "* '-wball' does change the binary weight created by" + "the '-noweight' option." + "* You can only use '-wball' once in a run of 3dQwarp." + "*** The effect of '-wball' is not dramatic. The example" + "above makes the average brain image across a collection" + "of subjects a little sharper in the thalamic area, which" + "might have some small value. If you care enough about" + "alignment to use '-wball', then you should examine the" + "results from 3dQwarp for each subject, to see if the" + "alignments are good enough for your purposes.", + argstr="-wball %s", minlen=5, - maxlen=5) - traits.Tuple((traits.Float(), traits.Float()), argstr='-bpass %f %f') + maxlen=5, + ) + traits.Tuple((traits.Float(), traits.Float()), argstr="-bpass %f %f") wmask = traits.Tuple( (File(exists=True), traits.Float()), - desc='-wmask ws f' - 'Similar to \'-wball\', but here, you provide a dataset \'ws\'' - 'that indicates where to increase the weight.' - '* The \'ws\' dataset must be on the same 3D grid as the base dataset.' - '* \'ws\' is treated as a mask -- it only matters where it' - 'is nonzero -- otherwise, the values inside are not used.' - '* After \'ws\' comes the factor \'f\' by which to increase the' - 'automatically computed weight. Where \'ws\' is nonzero,' - 'the weighting will be multiplied by (1+f).' - '* As with \'-wball\', the factor \'f\' should be between 1 and 100.' - '* You cannot use \'-wball\' and \'-wmask\' together!', - argstr='-wpass %s %f') + desc="-wmask ws f" + "Similar to '-wball', but here, you provide a dataset 'ws'" + "that indicates where to increase the weight." + "* The 'ws' dataset must be on the same 3D grid as the base dataset." + "* 'ws' is treated as a mask -- it only matters where it" + "is nonzero -- otherwise, the values inside are not used." + "* After 'ws' comes the factor 'f' by which to increase the" + "automatically computed weight. Where 'ws' is nonzero," + "the weighting will be multiplied by (1+f)." + "* As with '-wball', the factor 'f' should be between 1 and 100." + "* You cannot use '-wball' and '-wmask' together!", + argstr="-wpass %s %f", + ) out_weight_file = File( - argstr='-wtprefix %s', - desc='Write the weight volume to disk as a dataset') + argstr="-wtprefix %s", desc="Write the weight volume to disk as a dataset" + ) blur = traits.List( traits.Float(), - desc='Gaussian blur the input images by \'bb\' (FWHM) voxels before' - 'doing the alignment (the output dataset will not be blurred).' - 'The default is 2.345 (for no good reason).' - '* Optionally, you can provide 2 values for \'bb\', and then' - 'the first one is applied to the base volume, the second' - 'to the source volume.' - '-->>* e.g., \'-blur 0 3\' to skip blurring the base image' - '(if the base is a blurry template, for example).' - '* A negative blur radius means to use 3D median filtering,' - 'rather than Gaussian blurring. This type of filtering will' - 'better preserve edges, which can be important in alignment.' - '* If the base is a template volume that is already blurry,' - 'you probably don\'t want to blur it again, but blurring' - 'the source volume a little is probably a good idea, to' - 'help the program avoid trying to match tiny features.' - '* Note that -duplo will blur the volumes some extra' - 'amount for the initial small-scale warping, to make' - 'that phase of the program converge more rapidly.', - argstr='-blur %s', + desc="Gaussian blur the input images by 'bb' (FWHM) voxels before" + "doing the alignment (the output dataset will not be blurred)." + "The default is 2.345 (for no good reason)." + "* Optionally, you can provide 2 values for 'bb', and then" + "the first one is applied to the base volume, the second" + "to the source volume." + "-->>* e.g., '-blur 0 3' to skip blurring the base image" + "(if the base is a blurry template, for example)." + "* A negative blur radius means to use 3D median filtering," + "rather than Gaussian blurring. This type of filtering will" + "better preserve edges, which can be important in alignment." + "* If the base is a template volume that is already blurry," + "you probably don't want to blur it again, but blurring" + "the source volume a little is probably a good idea, to" + "help the program avoid trying to match tiny features." + "* Note that -duplo will blur the volumes some extra" + "amount for the initial small-scale warping, to make" + "that phase of the program converge more rapidly.", + argstr="-blur %s", minlen=1, - maxlen=2) + maxlen=2, + ) pblur = traits.List( traits.Float(), - desc='Use progressive blurring; that is, for larger patch sizes,' - 'the amount of blurring is larger. The general idea is to' - 'avoid trying to match finer details when the patch size' - 'and incremental warps are coarse. When \'-blur\' is used' - 'as well, it sets a minimum amount of blurring that will' - 'be used. [06 Aug 2014 -- \'-pblur\' may become the default someday].' - '* You can optionally give the fraction of the patch size that' - 'is used for the progressive blur by providing a value between' - '0 and 0.25 after \'-pblur\'. If you provide TWO values, the' - 'the first fraction is used for progressively blurring the' - 'base image and the second for the source image. The default' - 'parameters when just \'-pblur\' is given is the same as giving' - 'the options as \'-pblur 0.09 0.09\'.' - '* \'-pblur\' is useful when trying to match 2 volumes with high' - 'amounts of detail; e.g, warping one subject\'s brain image to' - 'match another\'s, or trying to warp to match a detailed template.' - '* Note that using negative values with \'-blur\' means that the' - 'progressive blurring will be done with median filters, rather' - 'than Gaussian linear blurring.' - '-->>*** The combination of the -allineate and -pblur options will make' - 'the results of using 3dQwarp to align to a template somewhat' - 'less sensitive to initial head position and scaling.', - argstr='-pblur %s', + desc="Use progressive blurring; that is, for larger patch sizes," + "the amount of blurring is larger. The general idea is to" + "avoid trying to match finer details when the patch size" + "and incremental warps are coarse. When '-blur' is used" + "as well, it sets a minimum amount of blurring that will" + "be used. [06 Aug 2014 -- '-pblur' may become the default someday]." + "* You can optionally give the fraction of the patch size that" + "is used for the progressive blur by providing a value between" + "0 and 0.25 after '-pblur'. If you provide TWO values, the" + "the first fraction is used for progressively blurring the" + "base image and the second for the source image. The default" + "parameters when just '-pblur' is given is the same as giving" + "the options as '-pblur 0.09 0.09'." + "* '-pblur' is useful when trying to match 2 volumes with high" + "amounts of detail; e.g, warping one subject's brain image to" + "match another's, or trying to warp to match a detailed template." + "* Note that using negative values with '-blur' means that the" + "progressive blurring will be done with median filters, rather" + "than Gaussian linear blurring." + "-->>*** The combination of the -allineate and -pblur options will make" + "the results of using 3dQwarp to align to a template somewhat" + "less sensitive to initial head position and scaling.", + argstr="-pblur %s", minlen=1, - maxlen=2) + maxlen=2, + ) emask = File( - desc='Here, \'ee\' is a dataset to specify a mask of voxels' - 'to EXCLUDE from the analysis -- all voxels in \'ee\'' - 'that are NONZERO will not be used in the alignment.' - '* The base image always automasked -- the emask is' - 'extra, to indicate voxels you definitely DON\'T want' - 'included in the matching process, even if they are' - 'inside the brain.', - argstr='-emask %s', + desc="Here, 'ee' is a dataset to specify a mask of voxels" + "to EXCLUDE from the analysis -- all voxels in 'ee'" + "that are NONZERO will not be used in the alignment." + "* The base image always automasked -- the emask is" + "extra, to indicate voxels you definitely DON'T want" + "included in the matching process, even if they are" + "inside the brain.", + argstr="-emask %s", exists=True, - copyfile=False) - noXdis = traits.Bool( - desc='Warp will not displace in x direction', argstr='-noXdis') - noYdis = traits.Bool( - desc='Warp will not displace in y direction', argstr='-noYdis') - noZdis = traits.Bool( - desc='Warp will not displace in z direction', argstr='-noZdis') + copyfile=False, + ) + noXdis = traits.Bool(desc="Warp will not displace in x direction", argstr="-noXdis") + noYdis = traits.Bool(desc="Warp will not displace in y direction", argstr="-noYdis") + noZdis = traits.Bool(desc="Warp will not displace in z direction", argstr="-noZdis") iniwarp = traits.List( File(exists=True, copyfile=False), - desc='A dataset with an initial nonlinear warp to use.' - '* If this option is not used, the initial warp is the identity.' - '* You can specify a catenation of warps (in quotes) here, as in' - 'program 3dNwarpApply.' - '* As a special case, if you just input an affine matrix in a .1D' - 'file, that will work also -- it is treated as giving the initial' + desc="A dataset with an initial nonlinear warp to use." + "* If this option is not used, the initial warp is the identity." + "* You can specify a catenation of warps (in quotes) here, as in" + "program 3dNwarpApply." + "* As a special case, if you just input an affine matrix in a .1D" + "file, that will work also -- it is treated as giving the initial" 'warp via the string "IDENT(base_dataset) matrix_file.aff12.1D".' - '* You CANNOT use this option with -duplo !!' - '* -iniwarp is usually used with -inilev to re-start 3dQwarp from' - 'a previous stopping point.', - argstr='-iniwarp %s', - xor=['duplo']) + "* You CANNOT use this option with -duplo !!" + "* -iniwarp is usually used with -inilev to re-start 3dQwarp from" + "a previous stopping point.", + argstr="-iniwarp %s", + xor=["duplo"], + ) inilev = traits.Int( - desc='The initial refinement \'level\' at which to start.' - '* Usually used with -iniwarp; CANNOT be used with -duplo.' - '* The combination of -inilev and -iniwarp lets you take the' - 'results of a previous 3dQwarp run and refine them further:' - 'Note that the source dataset in the second run is the SAME as' - 'in the first run. If you don\'t see why this is necessary,' - 'then you probably need to seek help from an AFNI guru.', - argstr='-inilev %d', - xor=['duplo']) + desc="The initial refinement 'level' at which to start." + "* Usually used with -iniwarp; CANNOT be used with -duplo." + "* The combination of -inilev and -iniwarp lets you take the" + "results of a previous 3dQwarp run and refine them further:" + "Note that the source dataset in the second run is the SAME as" + "in the first run. If you don't see why this is necessary," + "then you probably need to seek help from an AFNI guru.", + argstr="-inilev %d", + xor=["duplo"], + ) minpatch = traits.Int( - desc='* The value of mm should be an odd integer.' - '* The default value of mm is 25.' - '* For more accurate results than mm=25, try 19 or 13.' - '* The smallest allowed patch size is 5.' - '* You may want stop at a larger patch size (say 7 or 9) and use' - 'the -Qfinal option to run that final level with quintic warps,' - 'which might run faster and provide the same degree of warp detail.' - '* Trying to make two different brain volumes match in fine detail' - 'is usually a waste of time, especially in humans. There is too' - 'much variability in anatomy to match gyrus to gyrus accurately.' - 'For this reason, the default minimum patch size is 25 voxels.' - 'Using a smaller \'-minpatch\' might try to force the warp to' - 'match features that do not match, and the result can be useless' - 'image distortions -- another reason to LOOK AT THE RESULTS.', - argstr='-minpatch %d') + desc="* The value of mm should be an odd integer." + "* The default value of mm is 25." + "* For more accurate results than mm=25, try 19 or 13." + "* The smallest allowed patch size is 5." + "* You may want stop at a larger patch size (say 7 or 9) and use" + "the -Qfinal option to run that final level with quintic warps," + "which might run faster and provide the same degree of warp detail." + "* Trying to make two different brain volumes match in fine detail" + "is usually a waste of time, especially in humans. There is too" + "much variability in anatomy to match gyrus to gyrus accurately." + "For this reason, the default minimum patch size is 25 voxels." + "Using a smaller '-minpatch' might try to force the warp to" + "match features that do not match, and the result can be useless" + "image distortions -- another reason to LOOK AT THE RESULTS.", + argstr="-minpatch %d", + ) maxlev = traits.Int( - desc='The initial refinement \'level\' at which to start.' - '* Usually used with -iniwarp; CANNOT be used with -duplo.' - '* The combination of -inilev and -iniwarp lets you take the' - 'results of a previous 3dQwarp run and refine them further:' - 'Note that the source dataset in the second run is the SAME as' - 'in the first run. If you don\'t see why this is necessary,' - 'then you probably need to seek help from an AFNI guru.', - argstr='-maxlev %d', - xor=['duplo'], - position=-1) + desc="The initial refinement 'level' at which to start." + "* Usually used with -iniwarp; CANNOT be used with -duplo." + "* The combination of -inilev and -iniwarp lets you take the" + "results of a previous 3dQwarp run and refine them further:" + "Note that the source dataset in the second run is the SAME as" + "in the first run. If you don't see why this is necessary," + "then you probably need to seek help from an AFNI guru.", + argstr="-maxlev %d", + xor=["duplo"], + position=-1, + ) gridlist = File( - desc='This option provides an alternate way to specify the patch' - 'grid sizes used in the warp optimization process. \'gl\' is' - 'a 1D file with a list of patches to use -- in most cases,' - 'you will want to use it in the following form:' - '-gridlist \'1D: 0 151 101 75 51\'' - '* Here, a 0 patch size means the global domain. Patch sizes' - 'otherwise should be odd integers >= 5.' - '* If you use the \'0\' patch size again after the first position,' - 'you will actually get an iteration at the size of the' - 'default patch level 1, where the patch sizes are 75% of' - 'the volume dimension. There is no way to force the program' - 'to literally repeat the sui generis step of lev=0.' - '* You cannot use -gridlist with -duplo or -plusminus!', - argstr='-gridlist %s', + desc="This option provides an alternate way to specify the patch" + "grid sizes used in the warp optimization process. 'gl' is" + "a 1D file with a list of patches to use -- in most cases," + "you will want to use it in the following form:" + "-gridlist '1D: 0 151 101 75 51'" + "* Here, a 0 patch size means the global domain. Patch sizes" + "otherwise should be odd integers >= 5." + "* If you use the '0' patch size again after the first position," + "you will actually get an iteration at the size of the" + "default patch level 1, where the patch sizes are 75% of" + "the volume dimension. There is no way to force the program" + "to literally repeat the sui generis step of lev=0." + "* You cannot use -gridlist with -duplo or -plusminus!", + argstr="-gridlist %s", exists=True, copyfile=False, - xor=['duplo', 'plusminus']) + xor=["duplo", "plusminus"], + ) allsave = traits.Bool( - desc='This option lets you save the output warps from each level' - 'of the refinement process. Mostly used for experimenting.' - '* Cannot be used with -nopadWARP, -duplo, or -plusminus.' - '* Will only save all the outputs if the program terminates' - 'normally -- if it crashes, or freezes, then all these' - 'warps are lost.', - argstr='-allsave', - xor=['nopadWARP', 'duplo', 'plusminus']) + desc="This option lets you save the output warps from each level" + "of the refinement process. Mostly used for experimenting." + "* Cannot be used with -nopadWARP, -duplo, or -plusminus." + "* Will only save all the outputs if the program terminates" + "normally -- if it crashes, or freezes, then all these" + "warps are lost.", + argstr="-allsave", + xor=["nopadWARP", "duplo", "plusminus"], + ) duplo = traits.Bool( - desc='Start off with 1/2 scale versions of the volumes,' - 'for getting a speedy coarse first alignment.' - '* Then scales back up to register the full volumes.' - 'The goal is greater speed, and it seems to help this' - 'positively piggish program to be more expeditious.' - '* However, accuracy is somewhat lower with \'-duplo\',' - 'for reasons that currenly elude Zhark; for this reason,' - 'the Emperor does not usually use \'-duplo\'.', - argstr='-duplo', - xor=[ - 'gridlist', 'maxlev', 'inilev', 'iniwarp', 'plusminus', 'allsave' - ]) + desc="Start off with 1/2 scale versions of the volumes," + "for getting a speedy coarse first alignment." + "* Then scales back up to register the full volumes." + "The goal is greater speed, and it seems to help this" + "positively piggish program to be more expeditious." + "* However, accuracy is somewhat lower with '-duplo'," + "for reasons that currenly elude Zhark; for this reason," + "the Emperor does not usually use '-duplo'.", + argstr="-duplo", + xor=["gridlist", "maxlev", "inilev", "iniwarp", "plusminus", "allsave"], + ) workhard = traits.Bool( - desc='Iterate more times, which can help when the volumes are' - 'hard to align at all, or when you hope to get a more precise' - 'alignment.' - '* Slows the program down (possibly a lot), of course.' - '* When you combine \'-workhard\' with \'-duplo\', only the' - 'full size volumes get the extra iterations.' - '* For finer control over which refinement levels work hard,' - 'you can use this option in the form (for example)' - ' -workhard:4:7' - 'which implies the extra iterations will be done at levels' - '4, 5, 6, and 7, but not otherwise.' - '* You can also use \'-superhard\' to iterate even more, but' - 'this extra option will REALLY slow things down.' - '-->>* Under most circumstances, you should not need to use either' - '-workhard or -superhard.' - '-->>* The fastest way to register to a template image is via the' - '-duplo option, and without the -workhard or -superhard options.' - '-->>* If you use this option in the form \'-Workhard\' (first letter' - 'in upper case), then the second iteration at each level is' - 'done with quintic polynomial warps.', - argstr='-workhard', - xor=['boxopt', 'ballopt']) + desc="Iterate more times, which can help when the volumes are" + "hard to align at all, or when you hope to get a more precise" + "alignment." + "* Slows the program down (possibly a lot), of course." + "* When you combine '-workhard' with '-duplo', only the" + "full size volumes get the extra iterations." + "* For finer control over which refinement levels work hard," + "you can use this option in the form (for example)" + " -workhard:4:7" + "which implies the extra iterations will be done at levels" + "4, 5, 6, and 7, but not otherwise." + "* You can also use '-superhard' to iterate even more, but" + "this extra option will REALLY slow things down." + "-->>* Under most circumstances, you should not need to use either" + "-workhard or -superhard." + "-->>* The fastest way to register to a template image is via the" + "-duplo option, and without the -workhard or -superhard options." + "-->>* If you use this option in the form '-Workhard' (first letter" + "in upper case), then the second iteration at each level is" + "done with quintic polynomial warps.", + argstr="-workhard", + xor=["boxopt", "ballopt"], + ) Qfinal = traits.Bool( - desc='At the finest patch size (the final level), use Hermite' - 'quintic polynomials for the warp instead of cubic polynomials.' - '* In a 3D \'patch\', there are 2x2x2x3=24 cubic polynomial basis' - 'function parameters over which to optimize (2 polynomials' - 'dependent on each of the x,y,z directions, and 3 different' - 'directions of displacement).' - '* There are 3x3x3x3=81 quintic polynomial parameters per patch.' - '* With -Qfinal, the final level will have more detail in' - 'the allowed warps, at the cost of yet more CPU time.' - '* However, no patch below 7x7x7 in size will be done with quintic' - 'polynomials.' - '* This option is also not usually needed, and is experimental.', - argstr='-Qfinal') + desc="At the finest patch size (the final level), use Hermite" + "quintic polynomials for the warp instead of cubic polynomials." + "* In a 3D 'patch', there are 2x2x2x3=24 cubic polynomial basis" + "function parameters over which to optimize (2 polynomials" + "dependent on each of the x,y,z directions, and 3 different" + "directions of displacement)." + "* There are 3x3x3x3=81 quintic polynomial parameters per patch." + "* With -Qfinal, the final level will have more detail in" + "the allowed warps, at the cost of yet more CPU time." + "* However, no patch below 7x7x7 in size will be done with quintic" + "polynomials." + "* This option is also not usually needed, and is experimental.", + argstr="-Qfinal", + ) Qonly = traits.Bool( - desc='Use Hermite quintic polynomials at all levels.' - '* Very slow (about 4 times longer). Also experimental.' - '* Will produce a (discrete representation of a) C2 warp.', - argstr='-Qonly') + desc="Use Hermite quintic polynomials at all levels." + "* Very slow (about 4 times longer). Also experimental." + "* Will produce a (discrete representation of a) C2 warp.", + argstr="-Qonly", + ) plusminus = traits.Bool( - desc='Normally, the warp displacements dis(x) are defined to match' - 'base(x) to source(x+dis(x)). With this option, the match' - 'is between base(x-dis(x)) and source(x+dis(x)) -- the two' - 'images \'meet in the middle\'.' - '* One goal is to mimic the warping done to MRI EPI data by' - 'field inhomogeneities, when registering between a \'blip up\'' - 'and a \'blip down\' down volume, which will have opposite' - 'distortions.' - '* Define Wp(x) = x+dis(x) and Wm(x) = x-dis(x). Then since' - 'base(Wm(x)) matches source(Wp(x)), by substituting INV(Wm(x))' - 'wherever we see x, we have base(x) matches source(Wp(INV(Wm(x))));' - 'that is, the warp V(x) that one would get from the \'usual\' way' - 'of running 3dQwarp is V(x) = Wp(INV(Wm(x))).' - '* Conversely, we can calculate Wp(x) in terms of V(x) as follows:' - 'If V(x) = x + dv(x), define Vh(x) = x + dv(x)/2;' - 'then Wp(x) = V(INV(Vh(x)))' - '* With the above formulas, it is possible to compute Wp(x) from' - 'V(x) and vice-versa, using program 3dNwarpCalc. The requisite' - 'commands are left as an exercise for the aspiring AFNI Jedi Master.' - '* You can use the semi-secret \'-pmBASE\' option to get the V(x)' - 'warp and the source dataset warped to base space, in addition to' - 'the Wp(x) \'_PLUS\' and Wm(x) \'_MINUS\' warps.' - '-->>* Alas: -plusminus does not work with -duplo or -allineate :-(' - '* However, you can use -iniwarp with -plusminus :-)' - '-->>* The outputs have _PLUS (from the source dataset) and _MINUS' - '(from the base dataset) in their filenames, in addition to' - 'the prefix. The -iwarp option, if present, will be ignored.', - argstr='-plusminus', - xor=['duplo', 'allsave', 'iwarp']) + desc="Normally, the warp displacements dis(x) are defined to match" + "base(x) to source(x+dis(x)). With this option, the match" + "is between base(x-dis(x)) and source(x+dis(x)) -- the two" + "images 'meet in the middle'." + "* One goal is to mimic the warping done to MRI EPI data by" + "field inhomogeneities, when registering between a 'blip up'" + "and a 'blip down' down volume, which will have opposite" + "distortions." + "* Define Wp(x) = x+dis(x) and Wm(x) = x-dis(x). Then since" + "base(Wm(x)) matches source(Wp(x)), by substituting INV(Wm(x))" + "wherever we see x, we have base(x) matches source(Wp(INV(Wm(x))));" + "that is, the warp V(x) that one would get from the 'usual' way" + "of running 3dQwarp is V(x) = Wp(INV(Wm(x)))." + "* Conversely, we can calculate Wp(x) in terms of V(x) as follows:" + "If V(x) = x + dv(x), define Vh(x) = x + dv(x)/2;" + "then Wp(x) = V(INV(Vh(x)))" + "* With the above formulas, it is possible to compute Wp(x) from" + "V(x) and vice-versa, using program 3dNwarpCalc. The requisite" + "commands are left as an exercise for the aspiring AFNI Jedi Master." + "* You can use the semi-secret '-pmBASE' option to get the V(x)" + "warp and the source dataset warped to base space, in addition to" + "the Wp(x) '_PLUS' and Wm(x) '_MINUS' warps." + "-->>* Alas: -plusminus does not work with -duplo or -allineate :-(" + "* However, you can use -iniwarp with -plusminus :-)" + "-->>* The outputs have _PLUS (from the source dataset) and _MINUS" + "(from the base dataset) in their filenames, in addition to" + "the prefix. The -iwarp option, if present, will be ignored.", + argstr="-plusminus", + xor=["duplo", "allsave", "iwarp"], + ) nopad = traits.Bool( - desc='Do NOT use zero-padding on the 3D base and source images.' - '[Default == zero-pad, if needed]' - '* The underlying model for deformations goes to zero at the' - 'edge of the volume being warped. However, if there is' - 'significant data near an edge of the volume, then it won\'t' - 'get displaced much, and so the results might not be good.' - '* Zero padding is designed as a way to work around this potential' - 'problem. You should NOT need the \'-nopad\' option for any' - 'reason that Zhark can think of, but it is here to be symmetrical' - 'with 3dAllineate.' - '* Note that the output (warped from source) dataset will be on the' - 'base dataset grid whether or not zero-padding is allowed. However,' - 'unless you use the following option, allowing zero-padding (i.e.,' - 'the default operation) will make the output WARP dataset(s) be' - 'on a larger grid (also see \'-expad\' below).', - argstr='-nopad') + desc="Do NOT use zero-padding on the 3D base and source images." + "[Default == zero-pad, if needed]" + "* The underlying model for deformations goes to zero at the" + "edge of the volume being warped. However, if there is" + "significant data near an edge of the volume, then it won't" + "get displaced much, and so the results might not be good." + "* Zero padding is designed as a way to work around this potential" + "problem. You should NOT need the '-nopad' option for any" + "reason that Zhark can think of, but it is here to be symmetrical" + "with 3dAllineate." + "* Note that the output (warped from source) dataset will be on the" + "base dataset grid whether or not zero-padding is allowed. However," + "unless you use the following option, allowing zero-padding (i.e.," + "the default operation) will make the output WARP dataset(s) be" + "on a larger grid (also see '-expad' below).", + argstr="-nopad", + ) nopadWARP = traits.Bool( - desc='If for some reason you require the warp volume to' - 'match the base volume, then use this option to have the output' - 'WARP dataset(s) truncated.', - argstr='-nopadWARP', - xor=['allsave', 'expad']) + desc="If for some reason you require the warp volume to" + "match the base volume, then use this option to have the output" + "WARP dataset(s) truncated.", + argstr="-nopadWARP", + xor=["allsave", "expad"], + ) expad = traits.Int( - desc='This option instructs the program to pad the warp by an extra' - '\'EE\' voxels (and then 3dQwarp starts optimizing it).' - '* This option is seldom needed, but can be useful if you' - 'might later catenate the nonlinear warp -- via 3dNwarpCat --' - 'with an affine transformation that contains a large shift.' - 'Under that circumstance, the nonlinear warp might be shifted' - 'partially outside its original grid, so expanding that grid' - 'can avoid this problem.' - '* Note that this option perforce turns off \'-nopadWARP\'.', - argstr='-expad %d', - xor=['nopadWARP']) + desc="This option instructs the program to pad the warp by an extra" + "'EE' voxels (and then 3dQwarp starts optimizing it)." + "* This option is seldom needed, but can be useful if you" + "might later catenate the nonlinear warp -- via 3dNwarpCat --" + "with an affine transformation that contains a large shift." + "Under that circumstance, the nonlinear warp might be shifted" + "partially outside its original grid, so expanding that grid" + "can avoid this problem." + "* Note that this option perforce turns off '-nopadWARP'.", + argstr="-expad %d", + xor=["nopadWARP"], + ) ballopt = traits.Bool( - desc='Normally, the incremental warp parameters are optimized inside' - 'a rectangular \'box\' (24 dimensional for cubic patches, 81 for' - 'quintic patches), whose limits define the amount of distortion' - 'allowed at each step. Using \'-ballopt\' switches these limits' - 'to be applied to a \'ball\' (interior of a hypersphere), which' - 'can allow for larger incremental displacements. Use this' - 'option if you think things need to be able to move farther.', - argstr='-ballopt', - xor=['workhard', 'boxopt']) + desc="Normally, the incremental warp parameters are optimized inside" + "a rectangular 'box' (24 dimensional for cubic patches, 81 for" + "quintic patches), whose limits define the amount of distortion" + "allowed at each step. Using '-ballopt' switches these limits" + "to be applied to a 'ball' (interior of a hypersphere), which" + "can allow for larger incremental displacements. Use this" + "option if you think things need to be able to move farther.", + argstr="-ballopt", + xor=["workhard", "boxopt"], + ) baxopt = traits.Bool( - desc='Use the \'box\' optimization limits instead of the \'ball\'' - '[this is the default at present].' - '* Note that if \'-workhard\' is used, then ball and box optimization' - 'are alternated in the different iterations at each level, so' - 'these two options have no effect in that case.', - argstr='-boxopt', - xor=['workhard', 'ballopt']) + desc="Use the 'box' optimization limits instead of the 'ball'" + "[this is the default at present]." + "* Note that if '-workhard' is used, then ball and box optimization" + "are alternated in the different iterations at each level, so" + "these two options have no effect in that case.", + argstr="-boxopt", + xor=["workhard", "ballopt"], + ) verb = traits.Bool( - desc='more detailed description of the process', - argstr='-verb', - xor=['quiet']) + desc="more detailed description of the process", argstr="-verb", xor=["quiet"] + ) quiet = traits.Bool( - desc='Cut out most of the fun fun fun progress messages :-(', - argstr='-quiet', - xor=['verb']) + desc="Cut out most of the fun fun fun progress messages :-(", + argstr="-quiet", + xor=["verb"], + ) # Hidden and semi-hidden options - overwrite = traits.Bool(desc='Overwrite outputs', argstr='-overwrite') + overwrite = traits.Bool(desc="Overwrite outputs", argstr="-overwrite") lpc = traits.Bool( - desc='Local Pearson minimization (i.e., EPI-T1 registration)' - 'This option has not be extensively tested' - 'If you use \'-lpc\', then \'-maxlev 0\' is automatically set.' - 'If you want to go to more refined levels, you can set \'-maxlev\'' - 'This should be set up to have lpc as the second to last argument' - 'and maxlev as the second to last argument, as needed by AFNI' - 'Using maxlev > 1 is not recommended for EPI-T1 alignment.', - argstr='-lpc', - xor=['nmi', 'mi', 'hel', 'lpa', 'pear'], - position=-2) + desc="Local Pearson minimization (i.e., EPI-T1 registration)" + "This option has not be extensively tested" + "If you use '-lpc', then '-maxlev 0' is automatically set." + "If you want to go to more refined levels, you can set '-maxlev'" + "This should be set up to have lpc as the second to last argument" + "and maxlev as the second to last argument, as needed by AFNI" + "Using maxlev > 1 is not recommended for EPI-T1 alignment.", + argstr="-lpc", + xor=["nmi", "mi", "hel", "lpa", "pear"], + position=-2, + ) lpa = traits.Bool( - desc='Local Pearson maximization' - 'This option has not be extensively tested', - argstr='-lpa', - xor=['nmi', 'mi', 'lpc', 'hel', 'pear']) + desc="Local Pearson maximization" "This option has not be extensively tested", + argstr="-lpa", + xor=["nmi", "mi", "lpc", "hel", "pear"], + ) hel = traits.Bool( - desc='Hellinger distance: a matching function for the adventurous' - 'This option has NOT be extensively tested for usefullness' - 'and should be considered experimental at this infundibulum.', - argstr='-hel', - xor=['nmi', 'mi', 'lpc', 'lpa', 'pear']) + desc="Hellinger distance: a matching function for the adventurous" + "This option has NOT be extensively tested for usefullness" + "and should be considered experimental at this infundibulum.", + argstr="-hel", + xor=["nmi", "mi", "lpc", "lpa", "pear"], + ) mi = traits.Bool( - desc='Mutual Information: a matching function for the adventurous' - 'This option has NOT be extensively tested for usefullness' - 'and should be considered experimental at this infundibulum.', - argstr='-mi', - xor=['mi', 'hel', 'lpc', 'lpa', 'pear']) + desc="Mutual Information: a matching function for the adventurous" + "This option has NOT be extensively tested for usefullness" + "and should be considered experimental at this infundibulum.", + argstr="-mi", + xor=["mi", "hel", "lpc", "lpa", "pear"], + ) nmi = traits.Bool( - desc= - 'Normalized Mutual Information: a matching function for the adventurous' - 'This option has NOT be extensively tested for usefullness' - 'and should be considered experimental at this infundibulum.', - argstr='-nmi', - xor=['nmi', 'hel', 'lpc', 'lpa', 'pear']) + desc="Normalized Mutual Information: a matching function for the adventurous" + "This option has NOT be extensively tested for usefullness" + "and should be considered experimental at this infundibulum.", + argstr="-nmi", + xor=["nmi", "hel", "lpc", "lpa", "pear"], + ) class QwarpOutputSpec(TraitedSpec): warped_source = File( - desc='Warped source file. If plusminus is used, this is the undistorted' - 'source file.') - warped_base = File(desc='Undistorted base file.') + desc="Warped source file. If plusminus is used, this is the undistorted" + "source file." + ) + warped_base = File(desc="Undistorted base file.") source_warp = File( desc="Displacement in mm for the source image." "If plusminus is used this is the field suceptibility correction" - "warp (in 'mm') for source image.") + "warp (in 'mm') for source image." + ) base_warp = File( desc="Displacement in mm for the base image." "If plus minus is used, this is the field suceptibility correction" "warp (in 'mm') for base image. This is only output if plusminus" - "or iwarp options are passed") + "or iwarp options are passed" + ) weights = File(desc="Auto-computed weight volume.") @@ -3756,12 +4051,13 @@ class Qwarp(AFNICommand): "3dQwarp -allineate -allineate_opts '-cose lpa -verb' -base mni.nii -source structural.nii \ -prefix ppp_structural" >>> res3 = qwarp3.run() # doctest: +SKIP """ - _cmd = '3dQwarp' + + _cmd = "3dQwarp" input_spec = QwarpInputSpec output_spec = QwarpOutputSpec def _format_arg(self, name, spec, value): - if name == 'allineate_opts': + if name == "allineate_opts": return spec.argstr % ("'" + value + "'") return super(Qwarp, self)._format_arg(name, spec, value) @@ -3769,87 +4065,111 @@ def _list_outputs(self): outputs = self.output_spec().get() if not isdefined(self.inputs.out_file): - prefix = self._gen_fname(self.inputs.in_file, suffix='_QW') + prefix = self._gen_fname(self.inputs.in_file, suffix="_QW") outputtype = self.inputs.outputtype - if outputtype == 'AFNI': - ext = '.HEAD' - suffix = '+tlrc' + if outputtype == "AFNI": + ext = ".HEAD" + suffix = "+tlrc" else: ext = Info.output_type_to_ext(outputtype) - suffix = '' + suffix = "" else: prefix = self.inputs.out_file - ext_ind = max([ - prefix.lower().rfind('.nii.gz'), - prefix.lower().rfind('.nii') - ]) + ext_ind = max( + [prefix.lower().rfind(".nii.gz"), prefix.lower().rfind(".nii")] + ) if ext_ind == -1: - ext = '.HEAD' - suffix = '+tlrc' + ext = ".HEAD" + suffix = "+tlrc" else: ext = prefix[ext_ind:] - suffix = '' + suffix = "" # All outputs should be in the same directory as the prefix out_dir = os.path.dirname(os.path.abspath(prefix)) - outputs['warped_source'] = fname_presuffix( - prefix, suffix=suffix, use_ext=False, newpath=out_dir) + ext + outputs["warped_source"] = ( + fname_presuffix(prefix, suffix=suffix, use_ext=False, newpath=out_dir) + ext + ) if not self.inputs.nowarp: - outputs['source_warp'] = fname_presuffix( - prefix, suffix='_WARP' + suffix, use_ext=False, - newpath=out_dir) + ext + outputs["source_warp"] = ( + fname_presuffix( + prefix, suffix="_WARP" + suffix, use_ext=False, newpath=out_dir + ) + + ext + ) if self.inputs.iwarp: - outputs['base_warp'] = fname_presuffix( - prefix, suffix='_WARPINV' + suffix, use_ext=False, - newpath=out_dir) + ext + outputs["base_warp"] = ( + fname_presuffix( + prefix, suffix="_WARPINV" + suffix, use_ext=False, newpath=out_dir + ) + + ext + ) if isdefined(self.inputs.out_weight_file): - outputs['weights'] = os.path.abspath(self.inputs.out_weight_file) + outputs["weights"] = os.path.abspath(self.inputs.out_weight_file) if self.inputs.plusminus: - outputs['warped_source'] = fname_presuffix( - prefix, suffix='_PLUS' + suffix, use_ext=False, - newpath=out_dir) + ext - outputs['warped_base'] = fname_presuffix( - prefix, suffix='_MINUS' + suffix, use_ext=False, - newpath=out_dir) + ext - outputs['source_warp'] = fname_presuffix( - prefix, suffix='_PLUS_WARP' + suffix, use_ext=False, - newpath=out_dir) + ext - outputs['base_warp'] = fname_presuffix( - prefix, suffix='_MINUS_WARP' + suffix, use_ext=False, - newpath=out_dir) + ext + outputs["warped_source"] = ( + fname_presuffix( + prefix, suffix="_PLUS" + suffix, use_ext=False, newpath=out_dir + ) + + ext + ) + outputs["warped_base"] = ( + fname_presuffix( + prefix, suffix="_MINUS" + suffix, use_ext=False, newpath=out_dir + ) + + ext + ) + outputs["source_warp"] = ( + fname_presuffix( + prefix, suffix="_PLUS_WARP" + suffix, use_ext=False, newpath=out_dir + ) + + ext + ) + outputs["base_warp"] = ( + fname_presuffix( + prefix, + suffix="_MINUS_WARP" + suffix, + use_ext=False, + newpath=out_dir, + ) + + ext + ) return outputs def _gen_filename(self, name): - if name == 'out_file': - return self._gen_fname(self.inputs.in_file, suffix='_QW') + if name == "out_file": + return self._gen_fname(self.inputs.in_file, suffix="_QW") class QwarpPlusMinusInputSpec(QwarpInputSpec): source_file = File( - desc='Source image (opposite phase encoding direction than base image)', - argstr='-source %s', + desc="Source image (opposite phase encoding direction than base image)", + argstr="-source %s", exists=True, - deprecated='1.1.2', - new_name='in_file', - copyfile=False) + deprecated="1.1.2", + new_name="in_file", + copyfile=False, + ) out_file = File( - 'Qwarp.nii.gz', - argstr='-prefix %s', + "Qwarp.nii.gz", + argstr="-prefix %s", position=0, usedefault=True, - desc="Output file") + desc="Output file", + ) plusminus = traits.Bool( True, usedefault=True, position=1, - desc='Normally, the warp displacements dis(x) are defined to match' - 'base(x) to source(x+dis(x)). With this option, the match' - 'is between base(x-dis(x)) and source(x+dis(x)) -- the two' - 'images \'meet in the middle\'. For more info, view Qwarp` interface', - argstr='-plusminus', - xor=['duplo', 'allsave', 'iwarp']) + desc="Normally, the warp displacements dis(x) are defined to match" + "base(x) to source(x+dis(x)). With this option, the match" + "is between base(x-dis(x)) and source(x+dis(x)) -- the two" + "images 'meet in the middle'. For more info, view Qwarp` interface", + argstr="-plusminus", + xor=["duplo", "allsave", "iwarp"], + ) class QwarpPlusMinus(Qwarp): diff --git a/nipype/interfaces/afni/svm.py b/nipype/interfaces/afni/svm.py index 0453778642..13c83af51c 100644 --- a/nipype/interfaces/afni/svm.py +++ b/nipype/interfaces/afni/svm.py @@ -10,68 +10,74 @@ class SVMTrainInputSpec(AFNICommandInputSpec): # training options ttype = traits.Str( - desc='tname: classification or regression', - argstr='-type %s', - mandatory=True) + desc="tname: classification or regression", argstr="-type %s", mandatory=True + ) in_file = File( - desc='A 3D+t AFNI brik dataset to be used for training.', - argstr='-trainvol %s', + desc="A 3D+t AFNI brik dataset to be used for training.", + argstr="-trainvol %s", mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( name_template="%s_vectors", - desc='output sum of weighted linear support vectors file name', - argstr='-bucket %s', - suffix='_bucket', - name_source="in_file") + desc="output sum of weighted linear support vectors file name", + argstr="-bucket %s", + suffix="_bucket", + name_source="in_file", + ) model = File( name_template="%s_model", - desc='basename for the brik containing the SVM model', - argstr='-model %s', - suffix='_model', - name_source="in_file") + desc="basename for the brik containing the SVM model", + argstr="-model %s", + suffix="_model", + name_source="in_file", + ) alphas = File( name_template="%s_alphas", - desc='output alphas file name', - argstr='-alpha %s', - suffix='_alphas', - name_source="in_file") + desc="output alphas file name", + argstr="-alpha %s", + suffix="_alphas", + name_source="in_file", + ) mask = File( - desc='byte-format brik file used to mask voxels in the analysis', - argstr='-mask %s', + desc="byte-format brik file used to mask voxels in the analysis", + argstr="-mask %s", position=-1, exists=True, - copyfile=False) + copyfile=False, + ) nomodelmask = traits.Bool( - desc='Flag to enable the omission of a mask file', - argstr='-nomodelmask') + desc="Flag to enable the omission of a mask file", argstr="-nomodelmask" + ) trainlabels = File( - desc= - '.1D labels corresponding to the stimulus paradigm for the training data.', - argstr='-trainlabels %s', - exists=True) + desc=".1D labels corresponding to the stimulus paradigm for the training data.", + argstr="-trainlabels %s", + exists=True, + ) censor = File( - desc= - '.1D censor file that allows the user to ignore certain samples in the training data.', - argstr='-censor %s', - exists=True) + desc=".1D censor file that allows the user to ignore certain samples in the training data.", + argstr="-censor %s", + exists=True, + ) kernel = traits.Str( - desc= - 'string specifying type of kernel function:linear, polynomial, rbf, sigmoid', - argstr='-kernel %s') + desc="string specifying type of kernel function:linear, polynomial, rbf, sigmoid", + argstr="-kernel %s", + ) max_iterations = traits.Int( - desc='Specify the maximum number of iterations for the optimization.', - argstr='-max_iterations %d') + desc="Specify the maximum number of iterations for the optimization.", + argstr="-max_iterations %d", + ) w_out = traits.Bool( - desc='output sum of weighted linear support vectors', argstr='-wout') - options = traits.Str(desc='additional options for SVM-light', argstr='%s') + desc="output sum of weighted linear support vectors", argstr="-wout" + ) + options = traits.Str(desc="additional options for SVM-light", argstr="%s") class SVMTrainOutputSpec(TraitedSpec): - out_file = File(desc='sum of weighted linear support vectors file name') - model = File(desc='brik containing the SVM model file name') - alphas = File(desc='output alphas file name') + out_file = File(desc="sum of weighted linear support vectors file name") + model = File(desc="brik containing the SVM model file name") + alphas = File(desc="output alphas file name") class SVMTrain(AFNICommand): @@ -95,10 +101,10 @@ class SVMTrain(AFNICommand): """ - _cmd = '3dsvm' + _cmd = "3dsvm" input_spec = SVMTrainInputSpec output_spec = SVMTrainOutputSpec - _additional_metadata = ['suffix'] + _additional_metadata = ["suffix"] def _format_arg(self, name, trait_spec, value): return super(SVMTrain, self)._format_arg(name, trait_spec, value) @@ -107,39 +113,43 @@ def _format_arg(self, name, trait_spec, value): class SVMTestInputSpec(AFNICommandInputSpec): # testing options model = traits.Str( - desc='modname is the basename for the brik containing the SVM model', - argstr='-model %s', - mandatory=True) + desc="modname is the basename for the brik containing the SVM model", + argstr="-model %s", + mandatory=True, + ) in_file = File( - desc='A 3D or 3D+t AFNI brik dataset to be used for testing.', - argstr='-testvol %s', + desc="A 3D or 3D+t AFNI brik dataset to be used for testing.", + argstr="-testvol %s", exists=True, - mandatory=True) + mandatory=True, + ) out_file = File( name_template="%s_predictions", - desc='filename for .1D prediction file(s).', - argstr='-predictions %s') + desc="filename for .1D prediction file(s).", + argstr="-predictions %s", + ) testlabels = File( - desc= - '*true* class category .1D labels for the test dataset. It is used to calculate the prediction accuracy performance', + desc="*true* class category .1D labels for the test dataset. It is used to calculate the prediction accuracy performance", exists=True, - argstr='-testlabels %s') + argstr="-testlabels %s", + ) classout = traits.Bool( - desc= - 'Flag to specify that pname files should be integer-valued, corresponding to class category decisions.', - argstr='-classout') + desc="Flag to specify that pname files should be integer-valued, corresponding to class category decisions.", + argstr="-classout", + ) nopredcensord = traits.Bool( - desc= - 'Flag to prevent writing predicted values for censored time-points', - argstr='-nopredcensord') + desc="Flag to prevent writing predicted values for censored time-points", + argstr="-nopredcensord", + ) nodetrend = traits.Bool( - desc= - 'Flag to specify that pname files should not be linearly detrended', - argstr='-nodetrend') + desc="Flag to specify that pname files should not be linearly detrended", + argstr="-nodetrend", + ) multiclass = traits.Bool( - desc='Specifies multiclass algorithm for classification', - argstr='-multiclass %s') - options = traits.Str(desc='additional options for SVM-light', argstr='%s') + desc="Specifies multiclass algorithm for classification", + argstr="-multiclass %s", + ) + options = traits.Str(desc="additional options for SVM-light", argstr="%s") class SVMTest(AFNICommand): @@ -160,6 +170,7 @@ class SVMTest(AFNICommand): >>> res = svmTest.run() # doctest: +SKIP """ - _cmd = '3dsvm' + + _cmd = "3dsvm" input_spec = SVMTestInputSpec output_spec = AFNICommandOutputSpec diff --git a/nipype/interfaces/afni/tests/test_auto_ABoverlap.py b/nipype/interfaces/afni/tests/test_auto_ABoverlap.py index 5fe7d2efd7..63e2e8d652 100644 --- a/nipype/interfaces/afni/tests/test_auto_ABoverlap.py +++ b/nipype/interfaces/afni/tests/test_auto_ABoverlap.py @@ -4,46 +4,30 @@ def test_ABoverlap_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), in_file_a=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-3, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-3, ), in_file_b=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-2, - ), - no_automask=dict(argstr='-no_automask', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - out_file=dict( - argstr=' |& tee %s', - extensions=None, - position=-1, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-2, ), + no_automask=dict(argstr="-no_automask",), + num_threads=dict(nohash=True, usedefault=True,), + out_file=dict(argstr=" |& tee %s", extensions=None, position=-1,), outputtype=dict(), - quiet=dict(argstr='-quiet', ), - verb=dict(argstr='-verb', ), + quiet=dict(argstr="-quiet",), + verb=dict(argstr="-verb",), ) inputs = ABoverlap.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ABoverlap_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ABoverlap.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_AFNICommand.py b/nipype/interfaces/afni/tests/test_auto_AFNICommand.py index 17f152c304..0764b4947b 100644 --- a/nipype/interfaces/afni/tests/test_auto_AFNICommand.py +++ b/nipype/interfaces/afni/tests/test_auto_AFNICommand.py @@ -4,20 +4,14 @@ def test_AFNICommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source=['in_file'], - name_template='%s_afni', + name_source=["in_file"], + name_template="%s_afni", ), outputtype=dict(), ) diff --git a/nipype/interfaces/afni/tests/test_auto_AFNICommandBase.py b/nipype/interfaces/afni/tests/test_auto_AFNICommandBase.py index ce0a85708c..58de0b425a 100644 --- a/nipype/interfaces/afni/tests/test_auto_AFNICommandBase.py +++ b/nipype/interfaces/afni/tests/test_auto_AFNICommandBase.py @@ -4,11 +4,7 @@ def test_AFNICommandBase_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), environ=dict(nohash=True, usedefault=True,), ) inputs = AFNICommandBase.input_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_AFNIPythonCommand.py b/nipype/interfaces/afni/tests/test_auto_AFNIPythonCommand.py index d9e3508113..e437676286 100644 --- a/nipype/interfaces/afni/tests/test_auto_AFNIPythonCommand.py +++ b/nipype/interfaces/afni/tests/test_auto_AFNIPythonCommand.py @@ -4,20 +4,14 @@ def test_AFNIPythonCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source=['in_file'], - name_template='%s_afni', + name_source=["in_file"], + name_template="%s_afni", ), outputtype=dict(), ) diff --git a/nipype/interfaces/afni/tests/test_auto_AFNItoNIFTI.py b/nipype/interfaces/afni/tests/test_auto_AFNItoNIFTI.py index df58646223..19012ff364 100644 --- a/nipype/interfaces/afni/tests/test_auto_AFNItoNIFTI.py +++ b/nipype/interfaces/afni/tests/test_auto_AFNItoNIFTI.py @@ -4,48 +4,34 @@ def test_AFNItoNIFTI_inputs(): input_map = dict( - args=dict(argstr='%s', ), - denote=dict(argstr='-denote', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + denote=dict(argstr="-denote",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-1, - ), - newid=dict( - argstr='-newid', - xor=['oldid'], - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - oldid=dict( - argstr='-oldid', - xor=['newid'], + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), + newid=dict(argstr="-newid", xor=["oldid"],), + num_threads=dict(nohash=True, usedefault=True,), + oldid=dict(argstr="-oldid", xor=["newid"],), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, hash_files=False, - name_source='in_file', - name_template='%s.nii', + name_source="in_file", + name_template="%s.nii", ), outputtype=dict(), - pure=dict(argstr='-pure', ), + pure=dict(argstr="-pure",), ) inputs = AFNItoNIFTI.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AFNItoNIFTI_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = AFNItoNIFTI.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_AlignEpiAnatPy.py b/nipype/interfaces/afni/tests/test_auto_AlignEpiAnatPy.py index cb2389e67b..e2b8f5a3c9 100644 --- a/nipype/interfaces/afni/tests/test_auto_AlignEpiAnatPy.py +++ b/nipype/interfaces/afni/tests/test_auto_AlignEpiAnatPy.py @@ -4,63 +4,42 @@ def test_AlignEpiAnatPy_inputs(): input_map = dict( - anat=dict( - argstr='-anat %s', - copyfile=False, - extensions=None, - mandatory=True, - ), - anat2epi=dict(argstr='-anat2epi', ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - epi2anat=dict(argstr='-epi2anat', ), - epi_base=dict( - argstr='-epi_base %s', - mandatory=True, - ), - epi_strip=dict(argstr='-epi_strip %s', ), + anat=dict(argstr="-anat %s", copyfile=False, extensions=None, mandatory=True,), + anat2epi=dict(argstr="-anat2epi",), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + epi2anat=dict(argstr="-epi2anat",), + epi_base=dict(argstr="-epi_base %s", mandatory=True,), + epi_strip=dict(argstr="-epi_strip %s",), in_file=dict( - argstr='-epi %s', - copyfile=False, - extensions=None, - mandatory=True, + argstr="-epi %s", copyfile=False, extensions=None, mandatory=True, ), outputtype=dict(), - py27_path=dict(usedefault=True, ), - save_skullstrip=dict(argstr='-save_skullstrip', ), - suffix=dict( - argstr='-suffix %s', - usedefault=True, - ), - tshift=dict( - argstr='-tshift %s', - usedefault=True, - ), - volreg=dict( - argstr='-volreg %s', - usedefault=True, - ), + py27_path=dict(usedefault=True,), + save_skullstrip=dict(argstr="-save_skullstrip",), + suffix=dict(argstr="-suffix %s", usedefault=True,), + tshift=dict(argstr="-tshift %s", usedefault=True,), + volreg=dict(argstr="-volreg %s", usedefault=True,), ) inputs = AlignEpiAnatPy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AlignEpiAnatPy_outputs(): output_map = dict( - anat_al_mat=dict(extensions=None, ), - anat_al_orig=dict(extensions=None, ), - epi_al_mat=dict(extensions=None, ), - epi_al_orig=dict(extensions=None, ), - epi_al_tlrc_mat=dict(extensions=None, ), - epi_reg_al_mat=dict(extensions=None, ), - epi_tlrc_al=dict(extensions=None, ), - epi_vr_al_mat=dict(extensions=None, ), - epi_vr_motion=dict(extensions=None, ), - skullstrip=dict(extensions=None, ), + anat_al_mat=dict(extensions=None,), + anat_al_orig=dict(extensions=None,), + epi_al_mat=dict(extensions=None,), + epi_al_orig=dict(extensions=None,), + epi_al_tlrc_mat=dict(extensions=None,), + epi_reg_al_mat=dict(extensions=None,), + epi_tlrc_al=dict(extensions=None,), + epi_vr_al_mat=dict(extensions=None,), + epi_vr_motion=dict(extensions=None,), + skullstrip=dict(extensions=None,), ) outputs = AlignEpiAnatPy.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_Allineate.py b/nipype/interfaces/afni/tests/test_auto_Allineate.py index 0891e60621..356e7d52a0 100644 --- a/nipype/interfaces/afni/tests/test_auto_Allineate.py +++ b/nipype/interfaces/afni/tests/test_auto_Allineate.py @@ -5,132 +5,103 @@ def test_Allineate_inputs(): input_map = dict( allcostx=dict( - argstr='-allcostx |& tee %s', + argstr="-allcostx |& tee %s", extensions=None, position=-1, - xor=[ - 'out_file', 'out_matrix', 'out_param_file', 'out_weight_file' - ], + xor=["out_file", "out_matrix", "out_param_file", "out_weight_file"], ), - args=dict(argstr='%s', ), - autobox=dict(argstr='-autobox', ), - automask=dict(argstr='-automask+%d', ), - autoweight=dict(argstr='-autoweight%s', ), - center_of_mass=dict(argstr='-cmass%s', ), - check=dict(argstr='-check %s', ), - convergence=dict(argstr='-conv %f', ), - cost=dict(argstr='-cost %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - epi=dict(argstr='-EPI', ), - final_interpolation=dict(argstr='-final %s', ), - fine_blur=dict(argstr='-fineblur %f', ), + args=dict(argstr="%s",), + autobox=dict(argstr="-autobox",), + automask=dict(argstr="-automask+%d",), + autoweight=dict(argstr="-autoweight%s",), + center_of_mass=dict(argstr="-cmass%s",), + check=dict(argstr="-check %s",), + convergence=dict(argstr="-conv %f",), + cost=dict(argstr="-cost %s",), + environ=dict(nohash=True, usedefault=True,), + epi=dict(argstr="-EPI",), + final_interpolation=dict(argstr="-final %s",), + fine_blur=dict(argstr="-fineblur %f",), in_file=dict( - argstr='-source %s', - copyfile=False, - extensions=None, - mandatory=True, + argstr="-source %s", copyfile=False, extensions=None, mandatory=True, ), in_matrix=dict( - argstr='-1Dmatrix_apply %s', + argstr="-1Dmatrix_apply %s", extensions=None, position=-3, - xor=['out_matrix'], + xor=["out_matrix"], ), in_param_file=dict( - argstr='-1Dparam_apply %s', - extensions=None, - xor=['out_param_file'], - ), - interpolation=dict(argstr='-interp %s', ), - master=dict( - argstr='-master %s', - extensions=None, - ), - maxrot=dict(argstr='-maxrot %f', ), - maxscl=dict(argstr='-maxscl %f', ), - maxshf=dict(argstr='-maxshf %f', ), - maxshr=dict(argstr='-maxshr %f', ), - newgrid=dict(argstr='-newgrid %f', ), - nmatch=dict(argstr='-nmatch %d', ), - no_pad=dict(argstr='-nopad', ), - nomask=dict(argstr='-nomask', ), - num_threads=dict( - nohash=True, - usedefault=True, + argstr="-1Dparam_apply %s", extensions=None, xor=["out_param_file"], ), - nwarp=dict(argstr='-nwarp %s', ), - nwarp_fixdep=dict(argstr='-nwarp_fixdep%s...', ), - nwarp_fixmot=dict(argstr='-nwarp_fixmot%s...', ), - one_pass=dict(argstr='-onepass', ), + interpolation=dict(argstr="-interp %s",), + master=dict(argstr="-master %s", extensions=None,), + maxrot=dict(argstr="-maxrot %f",), + maxscl=dict(argstr="-maxscl %f",), + maxshf=dict(argstr="-maxshf %f",), + maxshr=dict(argstr="-maxshr %f",), + newgrid=dict(argstr="-newgrid %f",), + nmatch=dict(argstr="-nmatch %d",), + no_pad=dict(argstr="-nopad",), + nomask=dict(argstr="-nomask",), + num_threads=dict(nohash=True, usedefault=True,), + nwarp=dict(argstr="-nwarp %s",), + nwarp_fixdep=dict(argstr="-nwarp_fixdep%s...",), + nwarp_fixmot=dict(argstr="-nwarp_fixmot%s...",), + one_pass=dict(argstr="-onepass",), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, hash_files=False, - name_source='in_file', - name_template='%s_allineate', - xor=['allcostx'], + name_source="in_file", + name_template="%s_allineate", + xor=["allcostx"], ), out_matrix=dict( - argstr='-1Dmatrix_save %s', - extensions=None, - xor=['in_matrix', 'allcostx'], + argstr="-1Dmatrix_save %s", extensions=None, xor=["in_matrix", "allcostx"], ), out_param_file=dict( - argstr='-1Dparam_save %s', - extensions=None, - xor=['in_param_file', 'allcostx'], - ), - out_weight_file=dict( - argstr='-wtprefix %s', + argstr="-1Dparam_save %s", extensions=None, - xor=['allcostx'], + xor=["in_param_file", "allcostx"], ), + out_weight_file=dict(argstr="-wtprefix %s", extensions=None, xor=["allcostx"],), outputtype=dict(), - overwrite=dict(argstr='-overwrite', ), - quiet=dict(argstr='-quiet', ), - reference=dict( - argstr='-base %s', - extensions=None, - ), - replacebase=dict(argstr='-replacebase', ), - replacemeth=dict(argstr='-replacemeth %s', ), - source_automask=dict(argstr='-source_automask+%d', ), - source_mask=dict( - argstr='-source_mask %s', - extensions=None, - ), - two_best=dict(argstr='-twobest %d', ), - two_blur=dict(argstr='-twoblur %f', ), - two_first=dict(argstr='-twofirst', ), - two_pass=dict(argstr='-twopass', ), - usetemp=dict(argstr='-usetemp', ), - verbose=dict(argstr='-verb', ), - warp_type=dict(argstr='-warp %s', ), - warpfreeze=dict(argstr='-warpfreeze', ), - weight=dict(argstr='-weight %s', ), + overwrite=dict(argstr="-overwrite",), + quiet=dict(argstr="-quiet",), + reference=dict(argstr="-base %s", extensions=None,), + replacebase=dict(argstr="-replacebase",), + replacemeth=dict(argstr="-replacemeth %s",), + source_automask=dict(argstr="-source_automask+%d",), + source_mask=dict(argstr="-source_mask %s", extensions=None,), + two_best=dict(argstr="-twobest %d",), + two_blur=dict(argstr="-twoblur %f",), + two_first=dict(argstr="-twofirst",), + two_pass=dict(argstr="-twopass",), + usetemp=dict(argstr="-usetemp",), + verbose=dict(argstr="-verb",), + warp_type=dict(argstr="-warp %s",), + warpfreeze=dict(argstr="-warpfreeze",), + weight=dict(argstr="-weight %s",), weight_file=dict( - argstr='-weight %s', - deprecated='1.0.0', - extensions=None, - new_name='weight', + argstr="-weight %s", deprecated="1.0.0", extensions=None, new_name="weight", ), - zclip=dict(argstr='-zclip', ), + zclip=dict(argstr="-zclip",), ) inputs = Allineate.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Allineate_outputs(): output_map = dict( - allcostx=dict(extensions=None, ), - out_file=dict(extensions=None, ), - out_matrix=dict(extensions=None, ), - out_param_file=dict(extensions=None, ), - out_weight_file=dict(extensions=None, ), + allcostx=dict(extensions=None,), + out_file=dict(extensions=None,), + out_matrix=dict(extensions=None,), + out_param_file=dict(extensions=None,), + out_weight_file=dict(extensions=None,), ) outputs = Allineate.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_AutoTLRC.py b/nipype/interfaces/afni/tests/test_auto_AutoTLRC.py index 337835e5d1..a2b13596e6 100644 --- a/nipype/interfaces/afni/tests/test_auto_AutoTLRC.py +++ b/nipype/interfaces/afni/tests/test_auto_AutoTLRC.py @@ -4,22 +4,13 @@ def test_AutoTLRC_inputs(): input_map = dict( - args=dict(argstr='%s', ), - base=dict( - argstr='-base %s', - mandatory=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + base=dict(argstr="-base %s", mandatory=True,), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='-input %s', - copyfile=False, - extensions=None, - mandatory=True, + argstr="-input %s", copyfile=False, extensions=None, mandatory=True, ), - no_ss=dict(argstr='-no_ss', ), + no_ss=dict(argstr="-no_ss",), outputtype=dict(), ) inputs = AutoTLRC.input_spec() @@ -27,8 +18,10 @@ def test_AutoTLRC_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AutoTLRC_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = AutoTLRC.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_AutoTcorrelate.py b/nipype/interfaces/afni/tests/test_auto_AutoTcorrelate.py index 3ddf4ef69c..344ec503ce 100644 --- a/nipype/interfaces/afni/tests/test_auto_AutoTcorrelate.py +++ b/nipype/interfaces/afni/tests/test_auto_AutoTcorrelate.py @@ -4,52 +4,36 @@ def test_AutoTcorrelate_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - eta2=dict(argstr='-eta2', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + eta2=dict(argstr="-eta2",), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-1, - ), - mask=dict( - argstr='-mask %s', - extensions=None, - ), - mask_only_targets=dict( - argstr='-mask_only_targets', - xor=['mask_source'], + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), + mask=dict(argstr="-mask %s", extensions=None,), + mask_only_targets=dict(argstr="-mask_only_targets", xor=["mask_source"],), mask_source=dict( - argstr='-mask_source %s', - extensions=None, - xor=['mask_only_targets'], - ), - num_threads=dict( - nohash=True, - usedefault=True, + argstr="-mask_source %s", extensions=None, xor=["mask_only_targets"], ), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file', - name_template='%s_similarity_matrix.1D', + name_source="in_file", + name_template="%s_similarity_matrix.1D", ), outputtype=dict(), - polort=dict(argstr='-polort %d', ), + polort=dict(argstr="-polort %d",), ) inputs = AutoTcorrelate.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AutoTcorrelate_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = AutoTcorrelate.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Autobox.py b/nipype/interfaces/afni/tests/test_auto_Autobox.py index 4837cf2d50..91eca4d811 100644 --- a/nipype/interfaces/afni/tests/test_auto_Autobox.py +++ b/nipype/interfaces/afni/tests/test_auto_Autobox.py @@ -4,39 +4,32 @@ def test_Autobox_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='-input %s', - copyfile=False, - extensions=None, - mandatory=True, - ), - no_clustering=dict(argstr='-noclust', ), - num_threads=dict( - nohash=True, - usedefault=True, + argstr="-input %s", copyfile=False, extensions=None, mandatory=True, ), + no_clustering=dict(argstr="-noclust",), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file', - name_template='%s_autobox', + name_source="in_file", + name_template="%s_autobox", ), outputtype=dict(), - padding=dict(argstr='-npad %d', ), + padding=dict(argstr="-npad %d",), ) inputs = Autobox.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Autobox_outputs(): output_map = dict( - out_file=dict(extensions=None, ), + out_file=dict(extensions=None,), x_max=dict(), x_min=dict(), y_max=dict(), diff --git a/nipype/interfaces/afni/tests/test_auto_Automask.py b/nipype/interfaces/afni/tests/test_auto_Automask.py index 052c143f79..27f8040695 100644 --- a/nipype/interfaces/afni/tests/test_auto_Automask.py +++ b/nipype/interfaces/afni/tests/test_auto_Automask.py @@ -4,36 +4,26 @@ def test_Automask_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), brain_file=dict( - argstr='-apply_prefix %s', + argstr="-apply_prefix %s", extensions=None, - name_source='in_file', - name_template='%s_masked', + name_source="in_file", + name_template="%s_masked", ), - clfrac=dict(argstr='-clfrac %s', ), - dilate=dict(argstr='-dilate %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - erode=dict(argstr='-erode %s', ), + clfrac=dict(argstr="-clfrac %s",), + dilate=dict(argstr="-dilate %s",), + environ=dict(nohash=True, usedefault=True,), + erode=dict(argstr="-erode %s",), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-1, - ), - num_threads=dict( - nohash=True, - usedefault=True, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file', - name_template='%s_mask', + name_source="in_file", + name_template="%s_mask", ), outputtype=dict(), ) @@ -42,10 +32,11 @@ def test_Automask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Automask_outputs(): output_map = dict( - brain_file=dict(extensions=None, ), - out_file=dict(extensions=None, ), + brain_file=dict(extensions=None,), out_file=dict(extensions=None,), ) outputs = Automask.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_Axialize.py b/nipype/interfaces/afni/tests/test_auto_Axialize.py index 0b14be386d..2c4fafbb5b 100644 --- a/nipype/interfaces/afni/tests/test_auto_Axialize.py +++ b/nipype/interfaces/afni/tests/test_auto_Axialize.py @@ -4,51 +4,34 @@ def test_Axialize_inputs(): input_map = dict( - args=dict(argstr='%s', ), - axial=dict( - argstr='-axial', - xor=['coronal', 'sagittal'], - ), - coronal=dict( - argstr='-coronal', - xor=['sagittal', 'axial'], - ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + axial=dict(argstr="-axial", xor=["coronal", "sagittal"],), + coronal=dict(argstr="-coronal", xor=["sagittal", "axial"],), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-2, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-2, ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - orientation=dict(argstr='-orient %s', ), + num_threads=dict(nohash=True, usedefault=True,), + orientation=dict(argstr="-orient %s",), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file', - name_template='%s_axialize', + name_source="in_file", + name_template="%s_axialize", ), outputtype=dict(), - sagittal=dict( - argstr='-sagittal', - xor=['coronal', 'axial'], - ), - verb=dict(argstr='-verb', ), + sagittal=dict(argstr="-sagittal", xor=["coronal", "axial"],), + verb=dict(argstr="-verb",), ) inputs = Axialize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Axialize_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Axialize.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Bandpass.py b/nipype/interfaces/afni/tests/test_auto_Bandpass.py index d8e54e0211..b17a8433ca 100644 --- a/nipype/interfaces/afni/tests/test_auto_Bandpass.py +++ b/nipype/interfaces/afni/tests/test_auto_Bandpass.py @@ -4,67 +4,44 @@ def test_Bandpass_inputs(): input_map = dict( - args=dict(argstr='%s', ), - automask=dict(argstr='-automask', ), - blur=dict(argstr='-blur %f', ), - despike=dict(argstr='-despike', ), - environ=dict( - nohash=True, - usedefault=True, - ), - highpass=dict( - argstr='%f', - mandatory=True, - position=-3, - ), + args=dict(argstr="%s",), + automask=dict(argstr="-automask",), + blur=dict(argstr="-blur %f",), + despike=dict(argstr="-despike",), + environ=dict(nohash=True, usedefault=True,), + highpass=dict(argstr="%f", mandatory=True, position=-3,), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-1, - ), - localPV=dict(argstr='-localPV %f', ), - lowpass=dict( - argstr='%f', - mandatory=True, - position=-2, - ), - mask=dict( - argstr='-mask %s', - extensions=None, - position=2, - ), - nfft=dict(argstr='-nfft %d', ), - no_detrend=dict(argstr='-nodetrend', ), - normalize=dict(argstr='-norm', ), - notrans=dict(argstr='-notrans', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - orthogonalize_dset=dict( - argstr='-dsort %s', - extensions=None, - ), - orthogonalize_file=dict(argstr='-ort %s', ), + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, + ), + localPV=dict(argstr="-localPV %f",), + lowpass=dict(argstr="%f", mandatory=True, position=-2,), + mask=dict(argstr="-mask %s", extensions=None, position=2,), + nfft=dict(argstr="-nfft %d",), + no_detrend=dict(argstr="-nodetrend",), + normalize=dict(argstr="-norm",), + notrans=dict(argstr="-notrans",), + num_threads=dict(nohash=True, usedefault=True,), + orthogonalize_dset=dict(argstr="-dsort %s", extensions=None,), + orthogonalize_file=dict(argstr="-ort %s",), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file', - name_template='%s_bp', + name_source="in_file", + name_template="%s_bp", position=1, ), outputtype=dict(), - tr=dict(argstr='-dt %f', ), + tr=dict(argstr="-dt %f",), ) inputs = Bandpass.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Bandpass_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Bandpass.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_BlurInMask.py b/nipype/interfaces/afni/tests/test_auto_BlurInMask.py index 86c62f56eb..ec44fed3ee 100644 --- a/nipype/interfaces/afni/tests/test_auto_BlurInMask.py +++ b/nipype/interfaces/afni/tests/test_auto_BlurInMask.py @@ -4,57 +4,41 @@ def test_BlurInMask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - automask=dict(argstr='-automask', ), - environ=dict( - nohash=True, - usedefault=True, - ), - float_out=dict(argstr='-float', ), - fwhm=dict( - argstr='-FWHM %f', - mandatory=True, - ), + args=dict(argstr="%s",), + automask=dict(argstr="-automask",), + environ=dict(nohash=True, usedefault=True,), + float_out=dict(argstr="-float",), + fwhm=dict(argstr="-FWHM %f", mandatory=True,), in_file=dict( - argstr='-input %s', + argstr="-input %s", copyfile=False, extensions=None, mandatory=True, position=1, ), - mask=dict( - argstr='-mask %s', - extensions=None, - ), - multimask=dict( - argstr='-Mmask %s', - extensions=None, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - options=dict( - argstr='%s', - position=2, - ), + mask=dict(argstr="-mask %s", extensions=None,), + multimask=dict(argstr="-Mmask %s", extensions=None,), + num_threads=dict(nohash=True, usedefault=True,), + options=dict(argstr="%s", position=2,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file', - name_template='%s_blur', + name_source="in_file", + name_template="%s_blur", position=-1, ), outputtype=dict(), - preserve=dict(argstr='-preserve', ), + preserve=dict(argstr="-preserve",), ) inputs = BlurInMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BlurInMask_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = BlurInMask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_BlurToFWHM.py b/nipype/interfaces/afni/tests/test_auto_BlurToFWHM.py index 35f8b2bb80..87788b8b3d 100644 --- a/nipype/interfaces/afni/tests/test_auto_BlurToFWHM.py +++ b/nipype/interfaces/afni/tests/test_auto_BlurToFWHM.py @@ -4,36 +4,20 @@ def test_BlurToFWHM_inputs(): input_map = dict( - args=dict(argstr='%s', ), - automask=dict(argstr='-automask', ), - blurmaster=dict( - argstr='-blurmaster %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fwhm=dict(argstr='-FWHM %f', ), - fwhmxy=dict(argstr='-FWHMxy %f', ), - in_file=dict( - argstr='-input %s', - extensions=None, - mandatory=True, - ), - mask=dict( - argstr='-mask %s', - extensions=None, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + automask=dict(argstr="-automask",), + blurmaster=dict(argstr="-blurmaster %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + fwhm=dict(argstr="-FWHM %f",), + fwhmxy=dict(argstr="-FWHMxy %f",), + in_file=dict(argstr="-input %s", extensions=None, mandatory=True,), + mask=dict(argstr="-mask %s", extensions=None,), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source=['in_file'], - name_template='%s_afni', + name_source=["in_file"], + name_template="%s_afni", ), outputtype=dict(), ) @@ -42,8 +26,10 @@ def test_BlurToFWHM_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BlurToFWHM_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = BlurToFWHM.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_BrickStat.py b/nipype/interfaces/afni/tests/test_auto_BrickStat.py index d1119bc1e5..0d3bc11c21 100644 --- a/nipype/interfaces/afni/tests/test_auto_BrickStat.py +++ b/nipype/interfaces/afni/tests/test_auto_BrickStat.py @@ -4,40 +4,27 @@ def test_BrickStat_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - ), - mask=dict( - argstr='-mask %s', - extensions=None, - position=2, - ), - max=dict(argstr='-max', ), - mean=dict(argstr='-mean', ), - min=dict( - argstr='-min', - position=1, - ), - percentile=dict(argstr='-percentile %.3f %.3f %.3f', ), - slow=dict(argstr='-slow', ), - sum=dict(argstr='-sum', ), - var=dict(argstr='-var', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-1,), + mask=dict(argstr="-mask %s", extensions=None, position=2,), + max=dict(argstr="-max",), + mean=dict(argstr="-mean",), + min=dict(argstr="-min", position=1,), + percentile=dict(argstr="-percentile %.3f %.3f %.3f",), + slow=dict(argstr="-slow",), + sum=dict(argstr="-sum",), + var=dict(argstr="-var",), ) inputs = BrickStat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BrickStat_outputs(): - output_map = dict(min_val=dict(), ) + output_map = dict(min_val=dict(),) outputs = BrickStat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Bucket.py b/nipype/interfaces/afni/tests/test_auto_Bucket.py index 7410a5c70b..de301feaed 100644 --- a/nipype/interfaces/afni/tests/test_auto_Bucket.py +++ b/nipype/interfaces/afni/tests/test_auto_Bucket.py @@ -4,25 +4,11 @@ def test_Bucket_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - mandatory=True, - position=-1, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - out_file=dict( - argstr='-prefix %s', - extensions=None, - name_template='buck', - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", mandatory=True, position=-1,), + num_threads=dict(nohash=True, usedefault=True,), + out_file=dict(argstr="-prefix %s", extensions=None, name_template="buck",), outputtype=dict(), ) inputs = Bucket.input_spec() @@ -30,8 +16,10 @@ def test_Bucket_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Bucket_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Bucket.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Calc.py b/nipype/interfaces/afni/tests/test_auto_Calc.py index f7da2d3a5e..5b49623773 100644 --- a/nipype/interfaces/afni/tests/test_auto_Calc.py +++ b/nipype/interfaces/afni/tests/test_auto_Calc.py @@ -4,59 +4,35 @@ def test_Calc_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - expr=dict( - argstr='-expr "%s"', - mandatory=True, - position=3, - ), - in_file_a=dict( - argstr='-a %s', - extensions=None, - mandatory=True, - position=0, - ), - in_file_b=dict( - argstr='-b %s', - extensions=None, - position=1, - ), - in_file_c=dict( - argstr='-c %s', - extensions=None, - position=2, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - other=dict( - argstr='', - extensions=None, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + expr=dict(argstr='-expr "%s"', mandatory=True, position=3,), + in_file_a=dict(argstr="-a %s", extensions=None, mandatory=True, position=0,), + in_file_b=dict(argstr="-b %s", extensions=None, position=1,), + in_file_c=dict(argstr="-c %s", extensions=None, position=2,), + num_threads=dict(nohash=True, usedefault=True,), + other=dict(argstr="", extensions=None,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file_a', - name_template='%s_calc', + name_source="in_file_a", + name_template="%s_calc", ), outputtype=dict(), - overwrite=dict(argstr='-overwrite', ), + overwrite=dict(argstr="-overwrite",), single_idx=dict(), - start_idx=dict(requires=['stop_idx'], ), - stop_idx=dict(requires=['start_idx'], ), + start_idx=dict(requires=["stop_idx"],), + stop_idx=dict(requires=["start_idx"],), ) inputs = Calc.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Calc_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Calc.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Cat.py b/nipype/interfaces/afni/tests/test_auto_Cat.py index 01bfc29781..07a1d2979c 100644 --- a/nipype/interfaces/afni/tests/test_auto_Cat.py +++ b/nipype/interfaces/afni/tests/test_auto_Cat.py @@ -4,69 +4,55 @@ def test_Cat_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_files=dict( - argstr='%s', - mandatory=True, - position=-2, + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_files=dict(argstr="%s", mandatory=True, position=-2,), + keepfree=dict(argstr="-nonfixed",), + num_threads=dict(nohash=True, usedefault=True,), + omitconst=dict(argstr="-nonconst",), + out_cint=dict( + xor=["out_format", "out_nice", "out_double", "out_fint", "out_int"], ), - keepfree=dict(argstr='-nonfixed', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - omitconst=dict(argstr='-nonconst', ), - out_cint=dict(xor=[ - 'out_format', 'out_nice', 'out_double', 'out_fint', 'out_int' - ], ), out_double=dict( - argstr='-d', - xor=['out_format', 'out_nice', 'out_int', 'out_fint', 'out_cint'], + argstr="-d", + xor=["out_format", "out_nice", "out_int", "out_fint", "out_cint"], ), out_file=dict( - argstr='> %s', + argstr="> %s", extensions=None, mandatory=True, position=-1, usedefault=True, ), out_fint=dict( - argstr='-f', - xor=[ - 'out_format', 'out_nice', 'out_double', 'out_int', 'out_cint' - ], + argstr="-f", + xor=["out_format", "out_nice", "out_double", "out_int", "out_cint"], ), out_format=dict( - argstr='-form %s', - xor=['out_int', 'out_nice', 'out_double', 'out_fint', 'out_cint'], + argstr="-form %s", + xor=["out_int", "out_nice", "out_double", "out_fint", "out_cint"], ), out_int=dict( - argstr='-i', - xor=[ - 'out_format', 'out_nice', 'out_double', 'out_fint', 'out_cint' - ], + argstr="-i", + xor=["out_format", "out_nice", "out_double", "out_fint", "out_cint"], ), out_nice=dict( - argstr='-n', - xor=[ - 'out_format', 'out_int', 'out_double', 'out_fint', 'out_cint' - ], + argstr="-n", + xor=["out_format", "out_int", "out_double", "out_fint", "out_cint"], ), outputtype=dict(), - sel=dict(argstr='-sel %s', ), - stack=dict(argstr='-stack', ), + sel=dict(argstr="-sel %s",), + stack=dict(argstr="-stack",), ) inputs = Cat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Cat_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Cat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_CatMatvec.py b/nipype/interfaces/afni/tests/test_auto_CatMatvec.py index 117b64d0ea..f1716f6084 100644 --- a/nipype/interfaces/afni/tests/test_auto_CatMatvec.py +++ b/nipype/interfaces/afni/tests/test_auto_CatMatvec.py @@ -4,39 +4,20 @@ def test_CatMatvec_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fourxfour=dict( - argstr='-4x4', - xor=['matrix', 'oneline'], - ), - in_file=dict( - argstr='%s', - mandatory=True, - position=-2, - ), - matrix=dict( - argstr='-MATRIX', - xor=['oneline', 'fourxfour'], - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - oneline=dict( - argstr='-ONELINE', - xor=['matrix', 'fourxfour'], - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + fourxfour=dict(argstr="-4x4", xor=["matrix", "oneline"],), + in_file=dict(argstr="%s", mandatory=True, position=-2,), + matrix=dict(argstr="-MATRIX", xor=["oneline", "fourxfour"],), + num_threads=dict(nohash=True, usedefault=True,), + oneline=dict(argstr="-ONELINE", xor=["matrix", "fourxfour"],), out_file=dict( - argstr=' > %s', + argstr=" > %s", extensions=None, keep_extension=False, mandatory=True, - name_source='in_file', - name_template='%s_cat.aff12.1D', + name_source="in_file", + name_template="%s_cat.aff12.1D", position=-1, ), outputtype=dict(), @@ -46,8 +27,10 @@ def test_CatMatvec_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CatMatvec_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = CatMatvec.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_CenterMass.py b/nipype/interfaces/afni/tests/test_auto_CenterMass.py index 9fe4acf7db..07975af563 100644 --- a/nipype/interfaces/afni/tests/test_auto_CenterMass.py +++ b/nipype/interfaces/afni/tests/test_auto_CenterMass.py @@ -4,47 +4,37 @@ def test_CenterMass_inputs(): input_map = dict( - all_rois=dict(argstr='-all_rois', ), - args=dict(argstr='%s', ), - automask=dict(argstr='-automask', ), + all_rois=dict(argstr="-all_rois",), + args=dict(argstr="%s",), + automask=dict(argstr="-automask",), cm_file=dict( - argstr='> %s', + argstr="> %s", extensions=None, hash_files=False, keep_extension=False, - name_source='in_file', - name_template='%s_cm.out', + name_source="in_file", + name_template="%s_cm.out", position=-1, ), - environ=dict( - nohash=True, - usedefault=True, - ), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=True, - extensions=None, - mandatory=True, - position=-2, - ), - local_ijk=dict(argstr='-local_ijk', ), - mask_file=dict( - argstr='-mask %s', - extensions=None, + argstr="%s", copyfile=True, extensions=None, mandatory=True, position=-2, ), - roi_vals=dict(argstr='-roi_vals %s', ), - set_cm=dict(argstr='-set %f %f %f', ), + local_ijk=dict(argstr="-local_ijk",), + mask_file=dict(argstr="-mask %s", extensions=None,), + roi_vals=dict(argstr="-roi_vals %s",), + set_cm=dict(argstr="-set %f %f %f",), ) inputs = CenterMass.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CenterMass_outputs(): output_map = dict( - cm=dict(), - cm_file=dict(extensions=None, ), - out_file=dict(extensions=None, ), + cm=dict(), cm_file=dict(extensions=None,), out_file=dict(extensions=None,), ) outputs = CenterMass.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_ClipLevel.py b/nipype/interfaces/afni/tests/test_auto_ClipLevel.py index 95152081ca..576ea68f89 100644 --- a/nipype/interfaces/afni/tests/test_auto_ClipLevel.py +++ b/nipype/interfaces/afni/tests/test_auto_ClipLevel.py @@ -4,40 +4,22 @@ def test_ClipLevel_inputs(): input_map = dict( - args=dict(argstr='%s', ), - doall=dict( - argstr='-doall', - position=3, - xor='grad', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - grad=dict( - argstr='-grad %s', - extensions=None, - position=3, - xor='doall', - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - ), - mfrac=dict( - argstr='-mfrac %s', - position=2, - ), + args=dict(argstr="%s",), + doall=dict(argstr="-doall", position=3, xor="grad",), + environ=dict(nohash=True, usedefault=True,), + grad=dict(argstr="-grad %s", extensions=None, position=3, xor="doall",), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-1,), + mfrac=dict(argstr="-mfrac %s", position=2,), ) inputs = ClipLevel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ClipLevel_outputs(): - output_map = dict(clip_val=dict(), ) + output_map = dict(clip_val=dict(),) outputs = ClipLevel.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_ConvertDset.py b/nipype/interfaces/afni/tests/test_auto_ConvertDset.py index 41f816dfc7..249fe47843 100644 --- a/nipype/interfaces/afni/tests/test_auto_ConvertDset.py +++ b/nipype/interfaces/afni/tests/test_auto_ConvertDset.py @@ -4,32 +4,14 @@ def test_ConvertDset_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-input %s', - extensions=None, - mandatory=True, - position=-2, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-input %s", extensions=None, mandatory=True, position=-2,), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', - extensions=None, - mandatory=True, - position=-1, - ), - out_type=dict( - argstr='-o_%s', - mandatory=True, - position=0, + argstr="-prefix %s", extensions=None, mandatory=True, position=-1, ), + out_type=dict(argstr="-o_%s", mandatory=True, position=0,), outputtype=dict(), ) inputs = ConvertDset.input_spec() @@ -37,8 +19,10 @@ def test_ConvertDset_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ConvertDset_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ConvertDset.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Copy.py b/nipype/interfaces/afni/tests/test_auto_Copy.py index 9a716722e1..a601202c48 100644 --- a/nipype/interfaces/afni/tests/test_auto_Copy.py +++ b/nipype/interfaces/afni/tests/test_auto_Copy.py @@ -4,39 +4,31 @@ def test_Copy_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-2, - ), - num_threads=dict( - nohash=True, - usedefault=True, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-2, ), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, - name_source='in_file', - name_template='%s_copy', + name_source="in_file", + name_template="%s_copy", position=-1, ), outputtype=dict(), - verbose=dict(argstr='-verb', ), + verbose=dict(argstr="-verb",), ) inputs = Copy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Copy_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Copy.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Deconvolve.py b/nipype/interfaces/afni/tests/test_auto_Deconvolve.py index c6969bbbe6..c03fafcf46 100644 --- a/nipype/interfaces/afni/tests/test_auto_Deconvolve.py +++ b/nipype/interfaces/afni/tests/test_auto_Deconvolve.py @@ -4,129 +4,69 @@ def test_Deconvolve_inputs(): input_map = dict( - STATmask=dict( - argstr='-STATmask %s', - extensions=None, - ), - TR_1D=dict(argstr='-TR_1D %f', ), - allzero_OK=dict(argstr='-allzero_OK', ), - args=dict(argstr='%s', ), - automask=dict(argstr='-automask', ), - cbucket=dict(argstr='-cbucket %s', ), - censor=dict( - argstr='-censor %s', - extensions=None, - ), - dmbase=dict(argstr='-dmbase', ), - dname=dict(argstr='-D%s=%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - force_TR=dict( - argstr='-force_TR %f', - position=0, - ), - fout=dict(argstr='-fout', ), - global_times=dict( - argstr='-global_times', - xor=['local_times'], - ), - glt_label=dict( - argstr='-glt_label %d %s...', - position=-1, - requires=['gltsym'], - ), - gltsym=dict( - argstr="-gltsym 'SYM: %s'...", - position=-2, - ), - goforit=dict(argstr='-GOFORIT %i', ), - in_files=dict( - argstr='-input %s', - copyfile=False, - position=1, - sep=' ', - ), - input1D=dict( - argstr='-input1D %s', - extensions=None, - ), - legendre=dict(argstr='-legendre', ), - local_times=dict( - argstr='-local_times', - xor=['global_times'], - ), - mask=dict( - argstr='-mask %s', - extensions=None, - ), - noblock=dict(argstr='-noblock', ), - nocond=dict(argstr='-nocond', ), - nodmbase=dict(argstr='-nodmbase', ), - nofdr=dict(argstr='-noFDR', ), - nolegendre=dict(argstr='-nolegendre', ), - nosvd=dict(argstr='-nosvd', ), - num_glt=dict( - argstr='-num_glt %d', - position=-3, - ), - num_stimts=dict( - argstr='-num_stimts %d', - position=-6, - ), - num_threads=dict( - argstr='-jobs %d', - nohash=True, - ), - ortvec=dict(argstr='-ortvec %s %s', ), - out_file=dict( - argstr='-bucket %s', - extensions=None, - ), + STATmask=dict(argstr="-STATmask %s", extensions=None,), + TR_1D=dict(argstr="-TR_1D %f",), + allzero_OK=dict(argstr="-allzero_OK",), + args=dict(argstr="%s",), + automask=dict(argstr="-automask",), + cbucket=dict(argstr="-cbucket %s",), + censor=dict(argstr="-censor %s", extensions=None,), + dmbase=dict(argstr="-dmbase",), + dname=dict(argstr="-D%s=%s",), + environ=dict(nohash=True, usedefault=True,), + force_TR=dict(argstr="-force_TR %f", position=0,), + fout=dict(argstr="-fout",), + global_times=dict(argstr="-global_times", xor=["local_times"],), + glt_label=dict(argstr="-glt_label %d %s...", position=-1, requires=["gltsym"],), + gltsym=dict(argstr="-gltsym 'SYM: %s'...", position=-2,), + goforit=dict(argstr="-GOFORIT %i",), + in_files=dict(argstr="-input %s", copyfile=False, position=1, sep=" ",), + input1D=dict(argstr="-input1D %s", extensions=None,), + legendre=dict(argstr="-legendre",), + local_times=dict(argstr="-local_times", xor=["global_times"],), + mask=dict(argstr="-mask %s", extensions=None,), + noblock=dict(argstr="-noblock",), + nocond=dict(argstr="-nocond",), + nodmbase=dict(argstr="-nodmbase",), + nofdr=dict(argstr="-noFDR",), + nolegendre=dict(argstr="-nolegendre",), + nosvd=dict(argstr="-nosvd",), + num_glt=dict(argstr="-num_glt %d", position=-3,), + num_stimts=dict(argstr="-num_stimts %d", position=-6,), + num_threads=dict(argstr="-jobs %d", nohash=True,), + ortvec=dict(argstr="-ortvec %s %s",), + out_file=dict(argstr="-bucket %s", extensions=None,), outputtype=dict(), - polort=dict(argstr='-polort %d', ), - rmsmin=dict(argstr='-rmsmin %f', ), - rout=dict(argstr='-rout', ), - sat=dict( - argstr='-sat', - xor=['trans'], - ), - singvals=dict(argstr='-singvals', ), + polort=dict(argstr="-polort %d",), + rmsmin=dict(argstr="-rmsmin %f",), + rout=dict(argstr="-rout",), + sat=dict(argstr="-sat", xor=["trans"],), + singvals=dict(argstr="-singvals",), stim_label=dict( - argstr='-stim_label %d %s...', - position=-4, - requires=['stim_times'], - ), - stim_times=dict( - argstr="-stim_times %d %s '%s'...", - position=-5, - ), - stim_times_subtract=dict(argstr='-stim_times_subtract %f', ), - svd=dict(argstr='-svd', ), - tout=dict(argstr='-tout', ), - trans=dict( - argstr='-trans', - xor=['sat'], - ), - vout=dict(argstr='-vout', ), - x1D=dict( - argstr='-x1D %s', - extensions=None, - ), - x1D_stop=dict(argstr='-x1D_stop', ), + argstr="-stim_label %d %s...", position=-4, requires=["stim_times"], + ), + stim_times=dict(argstr="-stim_times %d %s '%s'...", position=-5,), + stim_times_subtract=dict(argstr="-stim_times_subtract %f",), + svd=dict(argstr="-svd",), + tout=dict(argstr="-tout",), + trans=dict(argstr="-trans", xor=["sat"],), + vout=dict(argstr="-vout",), + x1D=dict(argstr="-x1D %s", extensions=None,), + x1D_stop=dict(argstr="-x1D_stop",), ) inputs = Deconvolve.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Deconvolve_outputs(): output_map = dict( - cbucket=dict(extensions=None, ), - out_file=dict(extensions=None, ), - reml_script=dict(extensions=None, ), - x1D=dict(extensions=None, ), + cbucket=dict(extensions=None,), + out_file=dict(extensions=None,), + reml_script=dict(extensions=None,), + x1D=dict(extensions=None,), ) outputs = Deconvolve.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_DegreeCentrality.py b/nipype/interfaces/afni/tests/test_auto_DegreeCentrality.py index 183745d2cb..0fb10a0713 100644 --- a/nipype/interfaces/afni/tests/test_auto_DegreeCentrality.py +++ b/nipype/interfaces/afni/tests/test_auto_DegreeCentrality.py @@ -4,49 +4,37 @@ def test_DegreeCentrality_inputs(): input_map = dict( - args=dict(argstr='%s', ), - autoclip=dict(argstr='-autoclip', ), - automask=dict(argstr='-automask', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + autoclip=dict(argstr="-autoclip",), + automask=dict(argstr="-automask",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-1, - ), - mask=dict( - argstr='-mask %s', - extensions=None, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - oned_file=dict(argstr='-out1D %s', ), + mask=dict(argstr="-mask %s", extensions=None,), + num_threads=dict(nohash=True, usedefault=True,), + oned_file=dict(argstr="-out1D %s",), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source=['in_file'], - name_template='%s_afni', + name_source=["in_file"], + name_template="%s_afni", ), outputtype=dict(), - polort=dict(argstr='-polort %d', ), - sparsity=dict(argstr='-sparsity %f', ), - thresh=dict(argstr='-thresh %f', ), + polort=dict(argstr="-polort %d",), + sparsity=dict(argstr="-sparsity %f",), + thresh=dict(argstr="-thresh %f",), ) inputs = DegreeCentrality.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DegreeCentrality_outputs(): output_map = dict( - oned_file=dict(extensions=None, ), - out_file=dict(extensions=None, ), + oned_file=dict(extensions=None,), out_file=dict(extensions=None,), ) outputs = DegreeCentrality.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_Despike.py b/nipype/interfaces/afni/tests/test_auto_Despike.py index 9d955e11d2..00a6666894 100644 --- a/nipype/interfaces/afni/tests/test_auto_Despike.py +++ b/nipype/interfaces/afni/tests/test_auto_Despike.py @@ -4,27 +4,17 @@ def test_Despike_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-1, - ), - num_threads=dict( - nohash=True, - usedefault=True, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file', - name_template='%s_despike', + name_source="in_file", + name_template="%s_despike", ), outputtype=dict(), ) @@ -33,8 +23,10 @@ def test_Despike_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Despike_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Despike.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Detrend.py b/nipype/interfaces/afni/tests/test_auto_Detrend.py index 227f7585ac..275c45208b 100644 --- a/nipype/interfaces/afni/tests/test_auto_Detrend.py +++ b/nipype/interfaces/afni/tests/test_auto_Detrend.py @@ -4,27 +4,17 @@ def test_Detrend_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-1, - ), - num_threads=dict( - nohash=True, - usedefault=True, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file', - name_template='%s_detrend', + name_source="in_file", + name_template="%s_detrend", ), outputtype=dict(), ) @@ -33,8 +23,10 @@ def test_Detrend_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Detrend_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Detrend.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Dot.py b/nipype/interfaces/afni/tests/test_auto_Dot.py index 561be9cd93..d1c7812cdd 100644 --- a/nipype/interfaces/afni/tests/test_auto_Dot.py +++ b/nipype/interfaces/afni/tests/test_auto_Dot.py @@ -4,48 +4,34 @@ def test_Dot_inputs(): input_map = dict( - args=dict(argstr='%s', ), - demean=dict(argstr='-demean', ), - docoef=dict(argstr='-docoef', ), - docor=dict(argstr='-docor', ), - dodice=dict(argstr='-dodice', ), - dodot=dict(argstr='-dodot', ), - doeta2=dict(argstr='-doeta2', ), - dosums=dict(argstr='-dosums', ), - environ=dict( - nohash=True, - usedefault=True, - ), - full=dict(argstr='-full', ), - in_files=dict( - argstr='%s ...', - position=-2, - ), - mask=dict( - argstr='-mask %s', - extensions=None, - ), - mrange=dict(argstr='-mrange %s %s', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - out_file=dict( - argstr=' |& tee %s', - extensions=None, - position=-1, - ), + args=dict(argstr="%s",), + demean=dict(argstr="-demean",), + docoef=dict(argstr="-docoef",), + docor=dict(argstr="-docor",), + dodice=dict(argstr="-dodice",), + dodot=dict(argstr="-dodot",), + doeta2=dict(argstr="-doeta2",), + dosums=dict(argstr="-dosums",), + environ=dict(nohash=True, usedefault=True,), + full=dict(argstr="-full",), + in_files=dict(argstr="%s ...", position=-2,), + mask=dict(argstr="-mask %s", extensions=None,), + mrange=dict(argstr="-mrange %s %s",), + num_threads=dict(nohash=True, usedefault=True,), + out_file=dict(argstr=" |& tee %s", extensions=None, position=-1,), outputtype=dict(), - show_labels=dict(argstr='-show_labels', ), - upper=dict(argstr='-upper', ), + show_labels=dict(argstr="-show_labels",), + upper=dict(argstr="-upper",), ) inputs = Dot.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Dot_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Dot.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_ECM.py b/nipype/interfaces/afni/tests/test_auto_ECM.py index 0bcc8f005e..83d5508bae 100644 --- a/nipype/interfaces/afni/tests/test_auto_ECM.py +++ b/nipype/interfaces/afni/tests/test_auto_ECM.py @@ -4,53 +4,42 @@ def test_ECM_inputs(): input_map = dict( - args=dict(argstr='%s', ), - autoclip=dict(argstr='-autoclip', ), - automask=dict(argstr='-automask', ), - environ=dict( - nohash=True, - usedefault=True, - ), - eps=dict(argstr='-eps %f', ), - fecm=dict(argstr='-fecm', ), - full=dict(argstr='-full', ), + args=dict(argstr="%s",), + autoclip=dict(argstr="-autoclip",), + automask=dict(argstr="-automask",), + environ=dict(nohash=True, usedefault=True,), + eps=dict(argstr="-eps %f",), + fecm=dict(argstr="-fecm",), + full=dict(argstr="-full",), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-1, - ), - mask=dict( - argstr='-mask %s', - extensions=None, - ), - max_iter=dict(argstr='-max_iter %d', ), - memory=dict(argstr='-memory %f', ), - num_threads=dict( - nohash=True, - usedefault=True, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), + mask=dict(argstr="-mask %s", extensions=None,), + max_iter=dict(argstr="-max_iter %d",), + memory=dict(argstr="-memory %f",), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source=['in_file'], - name_template='%s_afni', + name_source=["in_file"], + name_template="%s_afni", ), outputtype=dict(), - polort=dict(argstr='-polort %d', ), - scale=dict(argstr='-scale %f', ), - shift=dict(argstr='-shift %f', ), - sparsity=dict(argstr='-sparsity %f', ), - thresh=dict(argstr='-thresh %f', ), + polort=dict(argstr="-polort %d",), + scale=dict(argstr="-scale %f",), + shift=dict(argstr="-shift %f",), + sparsity=dict(argstr="-sparsity %f",), + thresh=dict(argstr="-thresh %f",), ) inputs = ECM.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ECM_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ECM.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Edge3.py b/nipype/interfaces/afni/tests/test_auto_Edge3.py index e15dc131b8..484920d596 100644 --- a/nipype/interfaces/afni/tests/test_auto_Edge3.py +++ b/nipype/interfaces/afni/tests/test_auto_Edge3.py @@ -4,54 +4,36 @@ def test_Edge3_inputs(): input_map = dict( - args=dict(argstr='%s', ), - datum=dict(argstr='-datum %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fscale=dict( - argstr='-fscale', - xor=['gscale', 'nscale', 'scale_floats'], - ), - gscale=dict( - argstr='-gscale', - xor=['fscale', 'nscale', 'scale_floats'], - ), + args=dict(argstr="%s",), + datum=dict(argstr="-datum %s",), + environ=dict(nohash=True, usedefault=True,), + fscale=dict(argstr="-fscale", xor=["gscale", "nscale", "scale_floats"],), + gscale=dict(argstr="-gscale", xor=["fscale", "nscale", "scale_floats"],), in_file=dict( - argstr='-input %s', + argstr="-input %s", copyfile=False, extensions=None, mandatory=True, position=0, ), - nscale=dict( - argstr='-nscale', - xor=['fscale', 'gscale', 'scale_floats'], - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - out_file=dict( - argstr='-prefix %s', - extensions=None, - position=-1, - ), + nscale=dict(argstr="-nscale", xor=["fscale", "gscale", "scale_floats"],), + num_threads=dict(nohash=True, usedefault=True,), + out_file=dict(argstr="-prefix %s", extensions=None, position=-1,), outputtype=dict(), scale_floats=dict( - argstr='-scale_floats %f', - xor=['fscale', 'gscale', 'nscale'], + argstr="-scale_floats %f", xor=["fscale", "gscale", "nscale"], ), - verbose=dict(argstr='-verbose', ), + verbose=dict(argstr="-verbose",), ) inputs = Edge3.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Edge3_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Edge3.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Eval.py b/nipype/interfaces/afni/tests/test_auto_Eval.py index 475898c32a..1bc22fcb6c 100644 --- a/nipype/interfaces/afni/tests/test_auto_Eval.py +++ b/nipype/interfaces/afni/tests/test_auto_Eval.py @@ -4,59 +4,35 @@ def test_Eval_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - expr=dict( - argstr='-expr "%s"', - mandatory=True, - position=3, - ), - in_file_a=dict( - argstr='-a %s', - extensions=None, - mandatory=True, - position=0, - ), - in_file_b=dict( - argstr='-b %s', - extensions=None, - position=1, - ), - in_file_c=dict( - argstr='-c %s', - extensions=None, - position=2, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - other=dict( - argstr='', - extensions=None, - ), - out1D=dict(argstr='-1D', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + expr=dict(argstr='-expr "%s"', mandatory=True, position=3,), + in_file_a=dict(argstr="-a %s", extensions=None, mandatory=True, position=0,), + in_file_b=dict(argstr="-b %s", extensions=None, position=1,), + in_file_c=dict(argstr="-c %s", extensions=None, position=2,), + num_threads=dict(nohash=True, usedefault=True,), + other=dict(argstr="", extensions=None,), + out1D=dict(argstr="-1D",), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file_a', - name_template='%s_calc', + name_source="in_file_a", + name_template="%s_calc", ), outputtype=dict(), single_idx=dict(), - start_idx=dict(requires=['stop_idx'], ), - stop_idx=dict(requires=['start_idx'], ), + start_idx=dict(requires=["stop_idx"],), + stop_idx=dict(requires=["start_idx"],), ) inputs = Eval.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Eval_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Eval.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_FWHMx.py b/nipype/interfaces/afni/tests/test_auto_FWHMx.py index 8d7d7d234c..1785a675b5 100644 --- a/nipype/interfaces/afni/tests/test_auto_FWHMx.py +++ b/nipype/interfaces/afni/tests/test_auto_FWHMx.py @@ -4,84 +4,57 @@ def test_FWHMx_inputs(): input_map = dict( - acf=dict( - argstr='-acf', - usedefault=True, - ), - args=dict(argstr='%s', ), - arith=dict( - argstr='-arith', - xor=['geom'], - ), - automask=dict( - argstr='-automask', - usedefault=True, - ), - combine=dict(argstr='-combine', ), - compat=dict(argstr='-compat', ), - demed=dict( - argstr='-demed', - xor=['detrend'], - ), - detrend=dict( - argstr='-detrend', - usedefault=True, - xor=['demed'], - ), - environ=dict( - nohash=True, - usedefault=True, - ), - geom=dict( - argstr='-geom', - xor=['arith'], - ), - in_file=dict( - argstr='-input %s', - extensions=None, - mandatory=True, - ), - mask=dict( - argstr='-mask %s', - extensions=None, - ), + acf=dict(argstr="-acf", usedefault=True,), + args=dict(argstr="%s",), + arith=dict(argstr="-arith", xor=["geom"],), + automask=dict(argstr="-automask", usedefault=True,), + combine=dict(argstr="-combine",), + compat=dict(argstr="-compat",), + demed=dict(argstr="-demed", xor=["detrend"],), + detrend=dict(argstr="-detrend", usedefault=True, xor=["demed"],), + environ=dict(nohash=True, usedefault=True,), + geom=dict(argstr="-geom", xor=["arith"],), + in_file=dict(argstr="-input %s", extensions=None, mandatory=True,), + mask=dict(argstr="-mask %s", extensions=None,), out_detrend=dict( - argstr='-detprefix %s', + argstr="-detprefix %s", extensions=None, keep_extension=False, - name_source='in_file', - name_template='%s_detrend', + name_source="in_file", + name_template="%s_detrend", ), out_file=dict( - argstr='> %s', + argstr="> %s", extensions=None, keep_extension=False, - name_source='in_file', - name_template='%s_fwhmx.out', + name_source="in_file", + name_template="%s_fwhmx.out", position=-1, ), out_subbricks=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, keep_extension=False, - name_source='in_file', - name_template='%s_subbricks.out', + name_source="in_file", + name_template="%s_subbricks.out", ), - unif=dict(argstr='-unif', ), + unif=dict(argstr="-unif",), ) inputs = FWHMx.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FWHMx_outputs(): output_map = dict( acf_param=dict(), fwhm=dict(), - out_acf=dict(extensions=None, ), - out_detrend=dict(extensions=None, ), - out_file=dict(extensions=None, ), - out_subbricks=dict(extensions=None, ), + out_acf=dict(extensions=None,), + out_detrend=dict(extensions=None,), + out_file=dict(extensions=None,), + out_subbricks=dict(extensions=None,), ) outputs = FWHMx.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_Fim.py b/nipype/interfaces/afni/tests/test_auto_Fim.py index 49cf009096..6a07de12e5 100644 --- a/nipype/interfaces/afni/tests/test_auto_Fim.py +++ b/nipype/interfaces/afni/tests/test_auto_Fim.py @@ -4,41 +4,26 @@ def test_Fim_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fim_thr=dict( - argstr='-fim_thr %f', - position=3, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + fim_thr=dict(argstr="-fim_thr %f", position=3,), ideal_file=dict( - argstr='-ideal_file %s', - extensions=None, - mandatory=True, - position=2, + argstr="-ideal_file %s", extensions=None, mandatory=True, position=2, ), in_file=dict( - argstr='-input %s', + argstr="-input %s", copyfile=False, extensions=None, mandatory=True, position=1, ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - out=dict( - argstr='-out %s', - position=4, - ), + num_threads=dict(nohash=True, usedefault=True,), + out=dict(argstr="-out %s", position=4,), out_file=dict( - argstr='-bucket %s', + argstr="-bucket %s", extensions=None, - name_source='in_file', - name_template='%s_fim', + name_source="in_file", + name_template="%s_fim", ), outputtype=dict(), ) @@ -47,8 +32,10 @@ def test_Fim_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Fim_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Fim.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Fourier.py b/nipype/interfaces/afni/tests/test_auto_Fourier.py index 1f1ef8bc4f..0c648fb149 100644 --- a/nipype/interfaces/afni/tests/test_auto_Fourier.py +++ b/nipype/interfaces/afni/tests/test_auto_Fourier.py @@ -4,46 +4,32 @@ def test_Fourier_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - highpass=dict( - argstr='-highpass %f', - mandatory=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + highpass=dict(argstr="-highpass %f", mandatory=True,), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-1, - ), - lowpass=dict( - argstr='-lowpass %f', - mandatory=True, - ), - num_threads=dict( - nohash=True, - usedefault=True, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), + lowpass=dict(argstr="-lowpass %f", mandatory=True,), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file', - name_template='%s_fourier', + name_source="in_file", + name_template="%s_fourier", ), outputtype=dict(), - retrend=dict(argstr='-retrend', ), + retrend=dict(argstr="-retrend",), ) inputs = Fourier.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Fourier_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Fourier.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_GCOR.py b/nipype/interfaces/afni/tests/test_auto_GCOR.py index 98708d79fa..b63b269d6a 100644 --- a/nipype/interfaces/afni/tests/test_auto_GCOR.py +++ b/nipype/interfaces/afni/tests/test_auto_GCOR.py @@ -4,33 +4,28 @@ def test_GCOR_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='-input %s', + argstr="-input %s", copyfile=False, extensions=None, mandatory=True, position=-1, ), - mask=dict( - argstr='-mask %s', - copyfile=False, - extensions=None, - ), - nfirst=dict(argstr='-nfirst %d', ), - no_demean=dict(argstr='-no_demean', ), + mask=dict(argstr="-mask %s", copyfile=False, extensions=None,), + nfirst=dict(argstr="-nfirst %d",), + no_demean=dict(argstr="-no_demean",), ) inputs = GCOR.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GCOR_outputs(): - output_map = dict(out=dict(), ) + output_map = dict(out=dict(),) outputs = GCOR.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Hist.py b/nipype/interfaces/afni/tests/test_auto_Hist.py index 92ce044053..30443b3a44 100644 --- a/nipype/interfaces/afni/tests/test_auto_Hist.py +++ b/nipype/interfaces/afni/tests/test_auto_Hist.py @@ -4,56 +4,46 @@ def test_Hist_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bin_width=dict(argstr='-binwidth %f', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + bin_width=dict(argstr="-binwidth %f",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='-input %s', + argstr="-input %s", copyfile=False, extensions=None, mandatory=True, position=1, ), - mask=dict( - argstr='-mask %s', - extensions=None, - ), - max_value=dict(argstr='-max %f', ), - min_value=dict(argstr='-min %f', ), - nbin=dict(argstr='-nbin %d', ), + mask=dict(argstr="-mask %s", extensions=None,), + max_value=dict(argstr="-max %f",), + min_value=dict(argstr="-min %f",), + nbin=dict(argstr="-nbin %d",), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, keep_extension=False, - name_source=['in_file'], - name_template='%s_hist', + name_source=["in_file"], + name_template="%s_hist", ), out_show=dict( - argstr='> %s', + argstr="> %s", extensions=None, keep_extension=False, - name_source='in_file', - name_template='%s_hist.out', + name_source="in_file", + name_template="%s_hist.out", position=-1, ), - showhist=dict( - argstr='-showhist', - usedefault=True, - ), + showhist=dict(argstr="-showhist", usedefault=True,), ) inputs = Hist.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Hist_outputs(): - output_map = dict( - out_file=dict(extensions=None, ), - out_show=dict(extensions=None, ), - ) + output_map = dict(out_file=dict(extensions=None,), out_show=dict(extensions=None,),) outputs = Hist.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_LFCD.py b/nipype/interfaces/afni/tests/test_auto_LFCD.py index 11c6857ccc..aebe0dcded 100644 --- a/nipype/interfaces/afni/tests/test_auto_LFCD.py +++ b/nipype/interfaces/afni/tests/test_auto_LFCD.py @@ -4,45 +4,34 @@ def test_LFCD_inputs(): input_map = dict( - args=dict(argstr='%s', ), - autoclip=dict(argstr='-autoclip', ), - automask=dict(argstr='-automask', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + autoclip=dict(argstr="-autoclip",), + automask=dict(argstr="-automask",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-1, - ), - mask=dict( - argstr='-mask %s', - extensions=None, - ), - num_threads=dict( - nohash=True, - usedefault=True, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), + mask=dict(argstr="-mask %s", extensions=None,), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source=['in_file'], - name_template='%s_afni', + name_source=["in_file"], + name_template="%s_afni", ), outputtype=dict(), - polort=dict(argstr='-polort %d', ), - thresh=dict(argstr='-thresh %f', ), + polort=dict(argstr="-polort %d",), + thresh=dict(argstr="-thresh %f",), ) inputs = LFCD.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LFCD_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = LFCD.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_LocalBistat.py b/nipype/interfaces/afni/tests/test_auto_LocalBistat.py index 91e96d9eed..ea718b9d1c 100644 --- a/nipype/interfaces/afni/tests/test_auto_LocalBistat.py +++ b/nipype/interfaces/afni/tests/test_auto_LocalBistat.py @@ -4,65 +4,35 @@ def test_LocalBistat_inputs(): input_map = dict( - args=dict(argstr='%s', ), - automask=dict( - argstr='-automask', - xor=['weight_file'], - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file1=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - in_file2=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - ), - mask_file=dict( - argstr='-mask %s', - extensions=None, - ), - neighborhood=dict( - argstr="-nbhd '%s(%s)'", - mandatory=True, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + automask=dict(argstr="-automask", xor=["weight_file"],), + environ=dict(nohash=True, usedefault=True,), + in_file1=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + in_file2=dict(argstr="%s", extensions=None, mandatory=True, position=-1,), + mask_file=dict(argstr="-mask %s", extensions=None,), + neighborhood=dict(argstr="-nbhd '%s(%s)'", mandatory=True,), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, keep_extension=True, - name_source='in_file1', - name_template='%s_bistat', + name_source="in_file1", + name_template="%s_bistat", position=0, ), outputtype=dict(), - stat=dict( - argstr='-stat %s...', - mandatory=True, - ), - weight_file=dict( - argstr='-weight %s', - extensions=None, - xor=['automask'], - ), + stat=dict(argstr="-stat %s...", mandatory=True,), + weight_file=dict(argstr="-weight %s", extensions=None, xor=["automask"],), ) inputs = LocalBistat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LocalBistat_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = LocalBistat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Localstat.py b/nipype/interfaces/afni/tests/test_auto_Localstat.py index e7b4089474..4b036e6d84 100644 --- a/nipype/interfaces/afni/tests/test_auto_Localstat.py +++ b/nipype/interfaces/afni/tests/test_auto_Localstat.py @@ -4,70 +4,46 @@ def test_Localstat_inputs(): input_map = dict( - args=dict(argstr='%s', ), - automask=dict(argstr='-automask', ), - environ=dict( - nohash=True, - usedefault=True, - ), - grid_rmode=dict( - argstr='-grid_rmode %s', - requires=['reduce_restore_grid'], - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - ), - mask_file=dict( - argstr='-mask %s', - extensions=None, - ), - neighborhood=dict( - argstr="-nbhd '%s(%s)'", - mandatory=True, - ), - nonmask=dict(argstr='-use_nonmask', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + automask=dict(argstr="-automask",), + environ=dict(nohash=True, usedefault=True,), + grid_rmode=dict(argstr="-grid_rmode %s", requires=["reduce_restore_grid"],), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-1,), + mask_file=dict(argstr="-mask %s", extensions=None,), + neighborhood=dict(argstr="-nbhd '%s(%s)'", mandatory=True,), + nonmask=dict(argstr="-use_nonmask",), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_localstat', + name_source="in_file", + name_template="%s_localstat", position=0, ), outputtype=dict(), - overwrite=dict(argstr='-overwrite', ), - quiet=dict(argstr='-quiet', ), + overwrite=dict(argstr="-overwrite",), + quiet=dict(argstr="-quiet",), reduce_grid=dict( - argstr='-reduce_grid %s', - xor=['reduce_restore_grid', 'reduce_max_vox'], + argstr="-reduce_grid %s", xor=["reduce_restore_grid", "reduce_max_vox"], ), reduce_max_vox=dict( - argstr='-reduce_max_vox %s', - xor=['reduce_restore_grid', 'reduce_grid'], + argstr="-reduce_max_vox %s", xor=["reduce_restore_grid", "reduce_grid"], ), reduce_restore_grid=dict( - argstr='-reduce_restore_grid %s', - xor=['reduce_max_vox', 'reduce_grid'], - ), - stat=dict( - argstr='-stat %s...', - mandatory=True, + argstr="-reduce_restore_grid %s", xor=["reduce_max_vox", "reduce_grid"], ), + stat=dict(argstr="-stat %s...", mandatory=True,), ) inputs = Localstat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Localstat_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Localstat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_MaskTool.py b/nipype/interfaces/afni/tests/test_auto_MaskTool.py index 1b3be1de0e..dffe07ff32 100644 --- a/nipype/interfaces/afni/tests/test_auto_MaskTool.py +++ b/nipype/interfaces/afni/tests/test_auto_MaskTool.py @@ -4,52 +4,37 @@ def test_MaskTool_inputs(): input_map = dict( - args=dict(argstr='%s', ), - count=dict( - argstr='-count', - position=2, - ), - datum=dict(argstr='-datum %s', ), - dilate_inputs=dict(argstr='-dilate_inputs %s', ), - dilate_results=dict(argstr='-dilate_results %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fill_dirs=dict( - argstr='-fill_dirs %s', - requires=['fill_holes'], - ), - fill_holes=dict(argstr='-fill_holes', ), - frac=dict(argstr='-frac %s', ), - in_file=dict( - argstr='-input %s', - copyfile=False, - mandatory=True, - position=-1, - ), - inter=dict(argstr='-inter', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + count=dict(argstr="-count", position=2,), + datum=dict(argstr="-datum %s",), + dilate_inputs=dict(argstr="-dilate_inputs %s",), + dilate_results=dict(argstr="-dilate_results %s",), + environ=dict(nohash=True, usedefault=True,), + fill_dirs=dict(argstr="-fill_dirs %s", requires=["fill_holes"],), + fill_holes=dict(argstr="-fill_holes",), + frac=dict(argstr="-frac %s",), + in_file=dict(argstr="-input %s", copyfile=False, mandatory=True, position=-1,), + inter=dict(argstr="-inter",), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file', - name_template='%s_mask', + name_source="in_file", + name_template="%s_mask", ), outputtype=dict(), - union=dict(argstr='-union', ), - verbose=dict(argstr='-verb %s', ), + union=dict(argstr="-union",), + verbose=dict(argstr="-verb %s",), ) inputs = MaskTool.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MaskTool_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MaskTool.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Maskave.py b/nipype/interfaces/afni/tests/test_auto_Maskave.py index f322d55fa8..b882cfdba9 100644 --- a/nipype/interfaces/afni/tests/test_auto_Maskave.py +++ b/nipype/interfaces/afni/tests/test_auto_Maskave.py @@ -4,48 +4,33 @@ def test_Maskave_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-2, - ), - mask=dict( - argstr='-mask %s', - extensions=None, - position=1, - ), - num_threads=dict( - nohash=True, - usedefault=True, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-2, ), + mask=dict(argstr="-mask %s", extensions=None, position=1,), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='> %s', + argstr="> %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_maskave.1D', + name_source="in_file", + name_template="%s_maskave.1D", position=-1, ), outputtype=dict(), - quiet=dict( - argstr='-quiet', - position=2, - ), + quiet=dict(argstr="-quiet", position=2,), ) inputs = Maskave.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Maskave_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Maskave.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Means.py b/nipype/interfaces/afni/tests/test_auto_Means.py index 9a6d759123..b88957fef3 100644 --- a/nipype/interfaces/afni/tests/test_auto_Means.py +++ b/nipype/interfaces/afni/tests/test_auto_Means.py @@ -4,50 +4,37 @@ def test_Means_inputs(): input_map = dict( - args=dict(argstr='%s', ), - count=dict(argstr='-count', ), - datum=dict(argstr='-datum %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file_a=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - in_file_b=dict( - argstr='%s', - extensions=None, - position=-1, - ), - mask_inter=dict(argstr='-mask_inter', ), - mask_union=dict(argstr='-mask_union', ), - non_zero=dict(argstr='-non_zero', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + count=dict(argstr="-count",), + datum=dict(argstr="-datum %s",), + environ=dict(nohash=True, usedefault=True,), + in_file_a=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + in_file_b=dict(argstr="%s", extensions=None, position=-1,), + mask_inter=dict(argstr="-mask_inter",), + mask_union=dict(argstr="-mask_union",), + non_zero=dict(argstr="-non_zero",), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file_a', - name_template='%s_mean', + name_source="in_file_a", + name_template="%s_mean", ), outputtype=dict(), - scale=dict(argstr='-%sscale', ), - sqr=dict(argstr='-sqr', ), - std_dev=dict(argstr='-stdev', ), - summ=dict(argstr='-sum', ), + scale=dict(argstr="-%sscale",), + sqr=dict(argstr="-sqr",), + std_dev=dict(argstr="-stdev",), + summ=dict(argstr="-sum",), ) inputs = Means.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Means_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Means.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Merge.py b/nipype/interfaces/afni/tests/test_auto_Merge.py index 30b838620e..9ccaf8d4d4 100644 --- a/nipype/interfaces/afni/tests/test_auto_Merge.py +++ b/nipype/interfaces/afni/tests/test_auto_Merge.py @@ -4,31 +4,17 @@ def test_Merge_inputs(): input_map = dict( - args=dict(argstr='%s', ), - blurfwhm=dict( - argstr='-1blur_fwhm %d', - units='mm', - ), - doall=dict(argstr='-doall', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_files=dict( - argstr='%s', - copyfile=False, - mandatory=True, - position=-1, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + blurfwhm=dict(argstr="-1blur_fwhm %d", units="mm",), + doall=dict(argstr="-doall",), + environ=dict(nohash=True, usedefault=True,), + in_files=dict(argstr="%s", copyfile=False, mandatory=True, position=-1,), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_files', - name_template='%s_merge', + name_source="in_files", + name_template="%s_merge", ), outputtype=dict(), ) @@ -37,8 +23,10 @@ def test_Merge_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Merge_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Merge.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Notes.py b/nipype/interfaces/afni/tests/test_auto_Notes.py index 65365e0829..a51727578d 100644 --- a/nipype/interfaces/afni/tests/test_auto_Notes.py +++ b/nipype/interfaces/afni/tests/test_auto_Notes.py @@ -4,46 +4,29 @@ def test_Notes_inputs(): input_map = dict( - add=dict(argstr='-a "%s"', ), - add_history=dict( - argstr='-h "%s"', - xor=['rep_history'], - ), - args=dict(argstr='%s', ), - delete=dict(argstr='-d %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), + add=dict(argstr='-a "%s"',), + add_history=dict(argstr='-h "%s"', xor=["rep_history"],), + args=dict(argstr="%s",), + delete=dict(argstr="-d %d",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-1, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - out_file=dict( - argstr='%s', - extensions=None, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), + num_threads=dict(nohash=True, usedefault=True,), + out_file=dict(argstr="%s", extensions=None,), outputtype=dict(), - rep_history=dict( - argstr='-HH "%s"', - xor=['add_history'], - ), - ses=dict(argstr='-ses', ), + rep_history=dict(argstr='-HH "%s"', xor=["add_history"],), + ses=dict(argstr="-ses",), ) inputs = Notes.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Notes_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Notes.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_NwarpAdjust.py b/nipype/interfaces/afni/tests/test_auto_NwarpAdjust.py index e3882d3559..265e3720a2 100644 --- a/nipype/interfaces/afni/tests/test_auto_NwarpAdjust.py +++ b/nipype/interfaces/afni/tests/test_auto_NwarpAdjust.py @@ -4,37 +4,30 @@ def test_NwarpAdjust_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_files=dict(argstr='-source %s', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_files=dict(argstr="-source %s",), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, keep_extension=True, - name_source='in_files', - name_template='%s_NwarpAdjust', - requires=['in_files'], + name_source="in_files", + name_template="%s_NwarpAdjust", + requires=["in_files"], ), outputtype=dict(), - warps=dict( - argstr='-nwarp %s', - mandatory=True, - ), + warps=dict(argstr="-nwarp %s", mandatory=True,), ) inputs = NwarpAdjust.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NwarpAdjust_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = NwarpAdjust.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_NwarpApply.py b/nipype/interfaces/afni/tests/test_auto_NwarpApply.py index 490f296d3b..727b210c4e 100644 --- a/nipype/interfaces/afni/tests/test_auto_NwarpApply.py +++ b/nipype/interfaces/afni/tests/test_auto_NwarpApply.py @@ -4,52 +4,33 @@ def test_NwarpApply_inputs(): input_map = dict( - ainterp=dict(argstr='-ainterp %s', ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-source %s', - mandatory=True, - ), - interp=dict( - argstr='-interp %s', - usedefault=True, - ), - inv_warp=dict(argstr='-iwarp', ), - master=dict( - argstr='-master %s', - extensions=None, - ), + ainterp=dict(argstr="-ainterp %s",), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-source %s", mandatory=True,), + interp=dict(argstr="-interp %s", usedefault=True,), + inv_warp=dict(argstr="-iwarp",), + master=dict(argstr="-master %s", extensions=None,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file', - name_template='%s_Nwarp', - ), - quiet=dict( - argstr='-quiet', - xor=['verb'], - ), - short=dict(argstr='-short', ), - verb=dict( - argstr='-verb', - xor=['quiet'], - ), - warp=dict( - argstr='-nwarp %s', - mandatory=True, + name_source="in_file", + name_template="%s_Nwarp", ), + quiet=dict(argstr="-quiet", xor=["verb"],), + short=dict(argstr="-short",), + verb=dict(argstr="-verb", xor=["quiet"],), + warp=dict(argstr="-nwarp %s", mandatory=True,), ) inputs = NwarpApply.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NwarpApply_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = NwarpApply.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_NwarpCat.py b/nipype/interfaces/afni/tests/test_auto_NwarpCat.py index 666de06590..82b1e6a125 100644 --- a/nipype/interfaces/afni/tests/test_auto_NwarpCat.py +++ b/nipype/interfaces/afni/tests/test_auto_NwarpCat.py @@ -4,43 +4,32 @@ def test_NwarpCat_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - expad=dict(argstr='-expad %d', ), - in_files=dict( - argstr='%s', - mandatory=True, - position=-1, - ), - interp=dict( - argstr='-interp %s', - usedefault=True, - ), - inv_warp=dict(argstr='-iwarp', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + expad=dict(argstr="-expad %d",), + in_files=dict(argstr="%s", mandatory=True, position=-1,), + interp=dict(argstr="-interp %s", usedefault=True,), + inv_warp=dict(argstr="-iwarp",), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_files', - name_template='%s_NwarpCat', + name_source="in_files", + name_template="%s_NwarpCat", ), outputtype=dict(), - space=dict(argstr='-space %s', ), - verb=dict(argstr='-verb', ), + space=dict(argstr="-space %s",), + verb=dict(argstr="-verb",), ) inputs = NwarpCat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NwarpCat_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = NwarpCat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_OneDToolPy.py b/nipype/interfaces/afni/tests/test_auto_OneDToolPy.py index 3024c438c8..e9ae2472be 100644 --- a/nipype/interfaces/afni/tests/test_auto_OneDToolPy.py +++ b/nipype/interfaces/afni/tests/test_auto_OneDToolPy.py @@ -4,46 +4,39 @@ def test_OneDToolPy_inputs(): input_map = dict( - args=dict(argstr='%s', ), - censor_motion=dict(argstr='-censor_motion %f %s', ), - censor_prev_TR=dict(argstr='-censor_prev_TR', ), - demean=dict(argstr='-demean', ), - derivative=dict(argstr='-derivative', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-infile %s', - extensions=None, - mandatory=True, - ), + args=dict(argstr="%s",), + censor_motion=dict(argstr="-censor_motion %f %s",), + censor_prev_TR=dict(argstr="-censor_prev_TR",), + demean=dict(argstr="-demean",), + derivative=dict(argstr="-derivative",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-infile %s", extensions=None, mandatory=True,), out_file=dict( - argstr='-write %s', - extensions=None, - xor=['show_cormat_warnings'], + argstr="-write %s", extensions=None, xor=["show_cormat_warnings"], ), outputtype=dict(), - py27_path=dict(usedefault=True, ), - set_nruns=dict(argstr='-set_nruns %d', ), - show_censor_count=dict(argstr='-show_censor_count', ), + py27_path=dict(usedefault=True,), + set_nruns=dict(argstr="-set_nruns %d",), + show_censor_count=dict(argstr="-show_censor_count",), show_cormat_warnings=dict( - argstr='-show_cormat_warnings |& tee %s', + argstr="-show_cormat_warnings |& tee %s", extensions=None, position=-1, - xor=['out_file'], + xor=["out_file"], ), - show_indices_interest=dict(argstr='-show_indices_interest', ), - show_trs_run=dict(argstr='-show_trs_run %d', ), - show_trs_uncensored=dict(argstr='-show_trs_uncensored %s', ), + show_indices_interest=dict(argstr="-show_indices_interest",), + show_trs_run=dict(argstr="-show_trs_run %d",), + show_trs_uncensored=dict(argstr="-show_trs_uncensored %s",), ) inputs = OneDToolPy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_OneDToolPy_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = OneDToolPy.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_OutlierCount.py b/nipype/interfaces/afni/tests/test_auto_OutlierCount.py index f630daf9f6..3b90e076d7 100644 --- a/nipype/interfaces/afni/tests/test_auto_OutlierCount.py +++ b/nipype/interfaces/afni/tests/test_auto_OutlierCount.py @@ -4,74 +4,43 @@ def test_OutlierCount_inputs(): input_map = dict( - args=dict(argstr='%s', ), - autoclip=dict( - argstr='-autoclip', - usedefault=True, - xor=['mask'], - ), - automask=dict( - argstr='-automask', - usedefault=True, - xor=['mask'], - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fraction=dict( - argstr='-fraction', - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - interval=dict( - argstr='-range', - usedefault=True, - ), - legendre=dict( - argstr='-legendre', - usedefault=True, - ), - mask=dict( - argstr='-mask %s', - extensions=None, - xor=['autoclip', 'automask'], - ), + args=dict(argstr="%s",), + autoclip=dict(argstr="-autoclip", usedefault=True, xor=["mask"],), + automask=dict(argstr="-automask", usedefault=True, xor=["mask"],), + environ=dict(nohash=True, usedefault=True,), + fraction=dict(argstr="-fraction", usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + interval=dict(argstr="-range", usedefault=True,), + legendre=dict(argstr="-legendre", usedefault=True,), + mask=dict(argstr="-mask %s", extensions=None, xor=["autoclip", "automask"],), out_file=dict( extensions=None, keep_extension=False, - name_source=['in_file'], - name_template='%s_outliers', + name_source=["in_file"], + name_template="%s_outliers", ), outliers_file=dict( - argstr='-save %s', + argstr="-save %s", extensions=None, keep_extension=True, - name_source=['in_file'], - name_template='%s_outliers', - output_name='out_outliers', + name_source=["in_file"], + name_template="%s_outliers", + output_name="out_outliers", ), - polort=dict(argstr='-polort %d', ), - qthr=dict( - argstr='-qthr %.5f', - usedefault=True, - ), - save_outliers=dict(usedefault=True, ), + polort=dict(argstr="-polort %d",), + qthr=dict(argstr="-qthr %.5f", usedefault=True,), + save_outliers=dict(usedefault=True,), ) inputs = OutlierCount.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_OutlierCount_outputs(): output_map = dict( - out_file=dict(extensions=None, ), - out_outliers=dict(extensions=None, ), + out_file=dict(extensions=None,), out_outliers=dict(extensions=None,), ) outputs = OutlierCount.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_QualityIndex.py b/nipype/interfaces/afni/tests/test_auto_QualityIndex.py index 209353e4dc..359743b19f 100644 --- a/nipype/interfaces/afni/tests/test_auto_QualityIndex.py +++ b/nipype/interfaces/afni/tests/test_auto_QualityIndex.py @@ -4,61 +4,34 @@ def test_QualityIndex_inputs(): input_map = dict( - args=dict(argstr='%s', ), - autoclip=dict( - argstr='-autoclip', - usedefault=True, - xor=['mask'], - ), - automask=dict( - argstr='-automask', - usedefault=True, - xor=['mask'], - ), - clip=dict(argstr='-clip %f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - interval=dict( - argstr='-range', - usedefault=True, - ), - mask=dict( - argstr='-mask %s', - extensions=None, - xor=['autoclip', 'automask'], - ), + args=dict(argstr="%s",), + autoclip=dict(argstr="-autoclip", usedefault=True, xor=["mask"],), + automask=dict(argstr="-automask", usedefault=True, xor=["mask"],), + clip=dict(argstr="-clip %f",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + interval=dict(argstr="-range", usedefault=True,), + mask=dict(argstr="-mask %s", extensions=None, xor=["autoclip", "automask"],), out_file=dict( - argstr='> %s', + argstr="> %s", extensions=None, keep_extension=False, - name_source=['in_file'], - name_template='%s_tqual', + name_source=["in_file"], + name_template="%s_tqual", position=-1, ), - quadrant=dict( - argstr='-quadrant', - usedefault=True, - ), - spearman=dict( - argstr='-spearman', - usedefault=True, - ), + quadrant=dict(argstr="-quadrant", usedefault=True,), + spearman=dict(argstr="-spearman", usedefault=True,), ) inputs = QualityIndex.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_QualityIndex_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = QualityIndex.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Qwarp.py b/nipype/interfaces/afni/tests/test_auto_Qwarp.py index 877dd82fb2..14ff6192d5 100644 --- a/nipype/interfaces/afni/tests/test_auto_Qwarp.py +++ b/nipype/interfaces/afni/tests/test_auto_Qwarp.py @@ -4,171 +4,89 @@ def test_Qwarp_inputs(): input_map = dict( - Qfinal=dict(argstr='-Qfinal', ), - Qonly=dict(argstr='-Qonly', ), - allineate=dict(argstr='-allineate', ), - allineate_opts=dict( - argstr='-allineate_opts %s', - requires=['allineate'], - ), - allsave=dict( - argstr='-allsave', - xor=['nopadWARP', 'duplo', 'plusminus'], - ), - args=dict(argstr='%s', ), - ballopt=dict( - argstr='-ballopt', - xor=['workhard', 'boxopt'], - ), + Qfinal=dict(argstr="-Qfinal",), + Qonly=dict(argstr="-Qonly",), + allineate=dict(argstr="-allineate",), + allineate_opts=dict(argstr="-allineate_opts %s", requires=["allineate"],), + allsave=dict(argstr="-allsave", xor=["nopadWARP", "duplo", "plusminus"],), + args=dict(argstr="%s",), + ballopt=dict(argstr="-ballopt", xor=["workhard", "boxopt"],), base_file=dict( - argstr='-base %s', - copyfile=False, - extensions=None, - mandatory=True, + argstr="-base %s", copyfile=False, extensions=None, mandatory=True, ), - baxopt=dict( - argstr='-boxopt', - xor=['workhard', 'ballopt'], - ), - blur=dict(argstr='-blur %s', ), + baxopt=dict(argstr="-boxopt", xor=["workhard", "ballopt"],), + blur=dict(argstr="-blur %s",), duplo=dict( - argstr='-duplo', - xor=[ - 'gridlist', 'maxlev', 'inilev', 'iniwarp', 'plusminus', - 'allsave' - ], - ), - emask=dict( - argstr='-emask %s', - copyfile=False, - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - expad=dict( - argstr='-expad %d', - xor=['nopadWARP'], + argstr="-duplo", + xor=["gridlist", "maxlev", "inilev", "iniwarp", "plusminus", "allsave"], ), + emask=dict(argstr="-emask %s", copyfile=False, extensions=None,), + environ=dict(nohash=True, usedefault=True,), + expad=dict(argstr="-expad %d", xor=["nopadWARP"],), gridlist=dict( - argstr='-gridlist %s', + argstr="-gridlist %s", copyfile=False, extensions=None, - xor=['duplo', 'plusminus'], - ), - hel=dict( - argstr='-hel', - xor=['nmi', 'mi', 'lpc', 'lpa', 'pear'], + xor=["duplo", "plusminus"], ), + hel=dict(argstr="-hel", xor=["nmi", "mi", "lpc", "lpa", "pear"],), in_file=dict( - argstr='-source %s', - copyfile=False, - extensions=None, - mandatory=True, - ), - inilev=dict( - argstr='-inilev %d', - xor=['duplo'], - ), - iniwarp=dict( - argstr='-iniwarp %s', - xor=['duplo'], - ), - iwarp=dict( - argstr='-iwarp', - xor=['plusminus'], - ), - lpa=dict( - argstr='-lpa', - xor=['nmi', 'mi', 'lpc', 'hel', 'pear'], - ), - lpc=dict( - argstr='-lpc', - position=-2, - xor=['nmi', 'mi', 'hel', 'lpa', 'pear'], - ), - maxlev=dict( - argstr='-maxlev %d', - position=-1, - xor=['duplo'], - ), - mi=dict( - argstr='-mi', - xor=['mi', 'hel', 'lpc', 'lpa', 'pear'], - ), - minpatch=dict(argstr='-minpatch %d', ), - nmi=dict( - argstr='-nmi', - xor=['nmi', 'hel', 'lpc', 'lpa', 'pear'], - ), - noXdis=dict(argstr='-noXdis', ), - noYdis=dict(argstr='-noYdis', ), - noZdis=dict(argstr='-noZdis', ), - noneg=dict(argstr='-noneg', ), - nopad=dict(argstr='-nopad', ), - nopadWARP=dict( - argstr='-nopadWARP', - xor=['allsave', 'expad'], - ), - nopenalty=dict(argstr='-nopenalty', ), - nowarp=dict(argstr='-nowarp', ), - noweight=dict(argstr='-noweight', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + argstr="-source %s", copyfile=False, extensions=None, mandatory=True, + ), + inilev=dict(argstr="-inilev %d", xor=["duplo"],), + iniwarp=dict(argstr="-iniwarp %s", xor=["duplo"],), + iwarp=dict(argstr="-iwarp", xor=["plusminus"],), + lpa=dict(argstr="-lpa", xor=["nmi", "mi", "lpc", "hel", "pear"],), + lpc=dict(argstr="-lpc", position=-2, xor=["nmi", "mi", "hel", "lpa", "pear"],), + maxlev=dict(argstr="-maxlev %d", position=-1, xor=["duplo"],), + mi=dict(argstr="-mi", xor=["mi", "hel", "lpc", "lpa", "pear"],), + minpatch=dict(argstr="-minpatch %d",), + nmi=dict(argstr="-nmi", xor=["nmi", "hel", "lpc", "lpa", "pear"],), + noXdis=dict(argstr="-noXdis",), + noYdis=dict(argstr="-noYdis",), + noZdis=dict(argstr="-noZdis",), + noneg=dict(argstr="-noneg",), + nopad=dict(argstr="-nopad",), + nopadWARP=dict(argstr="-nopadWARP", xor=["allsave", "expad"],), + nopenalty=dict(argstr="-nopenalty",), + nowarp=dict(argstr="-nowarp",), + noweight=dict(argstr="-noweight",), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', - extensions=None, - name_source=['in_file'], - name_template='ppp_%s', - ), - out_weight_file=dict( - argstr='-wtprefix %s', + argstr="-prefix %s", extensions=None, + name_source=["in_file"], + name_template="ppp_%s", ), + out_weight_file=dict(argstr="-wtprefix %s", extensions=None,), outputtype=dict(), - overwrite=dict(argstr='-overwrite', ), - pblur=dict(argstr='-pblur %s', ), - pear=dict(argstr='-pear', ), - penfac=dict(argstr='-penfac %f', ), - plusminus=dict( - argstr='-plusminus', - xor=['duplo', 'allsave', 'iwarp'], - ), - quiet=dict( - argstr='-quiet', - xor=['verb'], - ), - resample=dict(argstr='-resample', ), - verb=dict( - argstr='-verb', - xor=['quiet'], - ), - wball=dict(argstr='-wball %s', ), - weight=dict( - argstr='-weight %s', - extensions=None, - ), - wmask=dict(argstr='-wpass %s %f', ), - workhard=dict( - argstr='-workhard', - xor=['boxopt', 'ballopt'], - ), + overwrite=dict(argstr="-overwrite",), + pblur=dict(argstr="-pblur %s",), + pear=dict(argstr="-pear",), + penfac=dict(argstr="-penfac %f",), + plusminus=dict(argstr="-plusminus", xor=["duplo", "allsave", "iwarp"],), + quiet=dict(argstr="-quiet", xor=["verb"],), + resample=dict(argstr="-resample",), + verb=dict(argstr="-verb", xor=["quiet"],), + wball=dict(argstr="-wball %s",), + weight=dict(argstr="-weight %s", extensions=None,), + wmask=dict(argstr="-wpass %s %f",), + workhard=dict(argstr="-workhard", xor=["boxopt", "ballopt"],), ) inputs = Qwarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Qwarp_outputs(): output_map = dict( - base_warp=dict(extensions=None, ), - source_warp=dict(extensions=None, ), - warped_base=dict(extensions=None, ), - warped_source=dict(extensions=None, ), - weights=dict(extensions=None, ), + base_warp=dict(extensions=None,), + source_warp=dict(extensions=None,), + warped_base=dict(extensions=None,), + warped_source=dict(extensions=None,), + weights=dict(extensions=None,), ) outputs = Qwarp.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_QwarpPlusMinus.py b/nipype/interfaces/afni/tests/test_auto_QwarpPlusMinus.py index 5066483014..6b56af1006 100644 --- a/nipype/interfaces/afni/tests/test_auto_QwarpPlusMinus.py +++ b/nipype/interfaces/afni/tests/test_auto_QwarpPlusMinus.py @@ -4,180 +4,98 @@ def test_QwarpPlusMinus_inputs(): input_map = dict( - Qfinal=dict(argstr='-Qfinal', ), - Qonly=dict(argstr='-Qonly', ), - allineate=dict(argstr='-allineate', ), - allineate_opts=dict( - argstr='-allineate_opts %s', - requires=['allineate'], - ), - allsave=dict( - argstr='-allsave', - xor=['nopadWARP', 'duplo', 'plusminus'], - ), - args=dict(argstr='%s', ), - ballopt=dict( - argstr='-ballopt', - xor=['workhard', 'boxopt'], - ), + Qfinal=dict(argstr="-Qfinal",), + Qonly=dict(argstr="-Qonly",), + allineate=dict(argstr="-allineate",), + allineate_opts=dict(argstr="-allineate_opts %s", requires=["allineate"],), + allsave=dict(argstr="-allsave", xor=["nopadWARP", "duplo", "plusminus"],), + args=dict(argstr="%s",), + ballopt=dict(argstr="-ballopt", xor=["workhard", "boxopt"],), base_file=dict( - argstr='-base %s', - copyfile=False, - extensions=None, - mandatory=True, + argstr="-base %s", copyfile=False, extensions=None, mandatory=True, ), - baxopt=dict( - argstr='-boxopt', - xor=['workhard', 'ballopt'], - ), - blur=dict(argstr='-blur %s', ), + baxopt=dict(argstr="-boxopt", xor=["workhard", "ballopt"],), + blur=dict(argstr="-blur %s",), duplo=dict( - argstr='-duplo', - xor=[ - 'gridlist', 'maxlev', 'inilev', 'iniwarp', 'plusminus', - 'allsave' - ], - ), - emask=dict( - argstr='-emask %s', - copyfile=False, - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - expad=dict( - argstr='-expad %d', - xor=['nopadWARP'], + argstr="-duplo", + xor=["gridlist", "maxlev", "inilev", "iniwarp", "plusminus", "allsave"], ), + emask=dict(argstr="-emask %s", copyfile=False, extensions=None,), + environ=dict(nohash=True, usedefault=True,), + expad=dict(argstr="-expad %d", xor=["nopadWARP"],), gridlist=dict( - argstr='-gridlist %s', + argstr="-gridlist %s", copyfile=False, extensions=None, - xor=['duplo', 'plusminus'], - ), - hel=dict( - argstr='-hel', - xor=['nmi', 'mi', 'lpc', 'lpa', 'pear'], + xor=["duplo", "plusminus"], ), + hel=dict(argstr="-hel", xor=["nmi", "mi", "lpc", "lpa", "pear"],), in_file=dict( - argstr='-source %s', - copyfile=False, - extensions=None, - mandatory=True, - ), - inilev=dict( - argstr='-inilev %d', - xor=['duplo'], - ), - iniwarp=dict( - argstr='-iniwarp %s', - xor=['duplo'], - ), - iwarp=dict( - argstr='-iwarp', - xor=['plusminus'], - ), - lpa=dict( - argstr='-lpa', - xor=['nmi', 'mi', 'lpc', 'hel', 'pear'], - ), - lpc=dict( - argstr='-lpc', - position=-2, - xor=['nmi', 'mi', 'hel', 'lpa', 'pear'], - ), - maxlev=dict( - argstr='-maxlev %d', - position=-1, - xor=['duplo'], - ), - mi=dict( - argstr='-mi', - xor=['mi', 'hel', 'lpc', 'lpa', 'pear'], - ), - minpatch=dict(argstr='-minpatch %d', ), - nmi=dict( - argstr='-nmi', - xor=['nmi', 'hel', 'lpc', 'lpa', 'pear'], - ), - noXdis=dict(argstr='-noXdis', ), - noYdis=dict(argstr='-noYdis', ), - noZdis=dict(argstr='-noZdis', ), - noneg=dict(argstr='-noneg', ), - nopad=dict(argstr='-nopad', ), - nopadWARP=dict( - argstr='-nopadWARP', - xor=['allsave', 'expad'], - ), - nopenalty=dict(argstr='-nopenalty', ), - nowarp=dict(argstr='-nowarp', ), - noweight=dict(argstr='-noweight', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + argstr="-source %s", copyfile=False, extensions=None, mandatory=True, + ), + inilev=dict(argstr="-inilev %d", xor=["duplo"],), + iniwarp=dict(argstr="-iniwarp %s", xor=["duplo"],), + iwarp=dict(argstr="-iwarp", xor=["plusminus"],), + lpa=dict(argstr="-lpa", xor=["nmi", "mi", "lpc", "hel", "pear"],), + lpc=dict(argstr="-lpc", position=-2, xor=["nmi", "mi", "hel", "lpa", "pear"],), + maxlev=dict(argstr="-maxlev %d", position=-1, xor=["duplo"],), + mi=dict(argstr="-mi", xor=["mi", "hel", "lpc", "lpa", "pear"],), + minpatch=dict(argstr="-minpatch %d",), + nmi=dict(argstr="-nmi", xor=["nmi", "hel", "lpc", "lpa", "pear"],), + noXdis=dict(argstr="-noXdis",), + noYdis=dict(argstr="-noYdis",), + noZdis=dict(argstr="-noZdis",), + noneg=dict(argstr="-noneg",), + nopad=dict(argstr="-nopad",), + nopadWARP=dict(argstr="-nopadWARP", xor=["allsave", "expad"],), + nopenalty=dict(argstr="-nopenalty",), + nowarp=dict(argstr="-nowarp",), + noweight=dict(argstr="-noweight",), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', - extensions=None, - position=0, - usedefault=True, - ), - out_weight_file=dict( - argstr='-wtprefix %s', - extensions=None, + argstr="-prefix %s", extensions=None, position=0, usedefault=True, ), + out_weight_file=dict(argstr="-wtprefix %s", extensions=None,), outputtype=dict(), - overwrite=dict(argstr='-overwrite', ), - pblur=dict(argstr='-pblur %s', ), - pear=dict(argstr='-pear', ), - penfac=dict(argstr='-penfac %f', ), + overwrite=dict(argstr="-overwrite",), + pblur=dict(argstr="-pblur %s",), + pear=dict(argstr="-pear",), + penfac=dict(argstr="-penfac %f",), plusminus=dict( - argstr='-plusminus', + argstr="-plusminus", position=1, usedefault=True, - xor=['duplo', 'allsave', 'iwarp'], - ), - quiet=dict( - argstr='-quiet', - xor=['verb'], + xor=["duplo", "allsave", "iwarp"], ), - resample=dict(argstr='-resample', ), + quiet=dict(argstr="-quiet", xor=["verb"],), + resample=dict(argstr="-resample",), source_file=dict( - argstr='-source %s', + argstr="-source %s", copyfile=False, - deprecated='1.1.2', + deprecated="1.1.2", extensions=None, - new_name='in_file', - ), - verb=dict( - argstr='-verb', - xor=['quiet'], - ), - wball=dict(argstr='-wball %s', ), - weight=dict( - argstr='-weight %s', - extensions=None, - ), - wmask=dict(argstr='-wpass %s %f', ), - workhard=dict( - argstr='-workhard', - xor=['boxopt', 'ballopt'], + new_name="in_file", ), + verb=dict(argstr="-verb", xor=["quiet"],), + wball=dict(argstr="-wball %s",), + weight=dict(argstr="-weight %s", extensions=None,), + wmask=dict(argstr="-wpass %s %f",), + workhard=dict(argstr="-workhard", xor=["boxopt", "ballopt"],), ) inputs = QwarpPlusMinus.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_QwarpPlusMinus_outputs(): output_map = dict( - base_warp=dict(extensions=None, ), - source_warp=dict(extensions=None, ), - warped_base=dict(extensions=None, ), - warped_source=dict(extensions=None, ), - weights=dict(extensions=None, ), + base_warp=dict(extensions=None,), + source_warp=dict(extensions=None,), + warped_base=dict(extensions=None,), + warped_source=dict(extensions=None,), + weights=dict(extensions=None,), ) outputs = QwarpPlusMinus.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_ROIStats.py b/nipype/interfaces/afni/tests/test_auto_ROIStats.py index f71c3158cd..026e3ff076 100644 --- a/nipype/interfaces/afni/tests/test_auto_ROIStats.py +++ b/nipype/interfaces/afni/tests/test_auto_ROIStats.py @@ -4,67 +4,46 @@ def test_ROIStats_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debug=dict(argstr='-debug', ), - environ=dict( - nohash=True, - usedefault=True, - ), - format1D=dict( - argstr='-1Dformat', - xor=['format1DR'], - ), - format1DR=dict( - argstr='-1DRformat', - xor=['format1D'], - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), + args=dict(argstr="%s",), + debug=dict(argstr="-debug",), + environ=dict(nohash=True, usedefault=True,), + format1D=dict(argstr="-1Dformat", xor=["format1DR"],), + format1DR=dict(argstr="-1DRformat", xor=["format1D"],), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), mask=dict( - argstr='-mask %s', - deprecated='1.1.4', + argstr="-mask %s", + deprecated="1.1.4", extensions=None, - new_name='mask_file', + new_name="mask_file", position=3, ), - mask_f2short=dict(argstr='-mask_f2short', ), - mask_file=dict( - argstr='-mask %s', - extensions=None, - ), - nobriklab=dict(argstr='-nobriklab', ), - nomeanout=dict(argstr='-nomeanout', ), - num_roi=dict(argstr='-numroi %s', ), + mask_f2short=dict(argstr="-mask_f2short",), + mask_file=dict(argstr="-mask %s", extensions=None,), + nobriklab=dict(argstr="-nobriklab",), + nomeanout=dict(argstr="-nomeanout",), + num_roi=dict(argstr="-numroi %s",), out_file=dict( - argstr='> %s', + argstr="> %s", extensions=None, keep_extension=False, - name_source='in_file', - name_template='%s_roistat.1D', + name_source="in_file", + name_template="%s_roistat.1D", position=-1, ), - quiet=dict(argstr='-quiet', ), - roisel=dict( - argstr='-roisel %s', - extensions=None, - ), - stat=dict(argstr='%s...', ), - zerofill=dict( - argstr='-zerofill %s', - requires=['num_roi'], - ), + quiet=dict(argstr="-quiet",), + roisel=dict(argstr="-roisel %s", extensions=None,), + stat=dict(argstr="%s...",), + zerofill=dict(argstr="-zerofill %s", requires=["num_roi"],), ) inputs = ROIStats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ROIStats_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ROIStats.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_ReHo.py b/nipype/interfaces/afni/tests/test_auto_ReHo.py index 2e0c3fb6eb..1200399a1d 100644 --- a/nipype/interfaces/afni/tests/test_auto_ReHo.py +++ b/nipype/interfaces/afni/tests/test_auto_ReHo.py @@ -4,58 +4,37 @@ def test_ReHo_inputs(): input_map = dict( - args=dict(argstr='%s', ), - chi_sq=dict(argstr='-chi_sq', ), + args=dict(argstr="%s",), + chi_sq=dict(argstr="-chi_sq",), ellipsoid=dict( - argstr='-neigh_X %s -neigh_Y %s -neigh_Z %s', - xor=['sphere', 'neighborhood'], - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-inset %s', - extensions=None, - mandatory=True, - position=1, - ), - label_set=dict( - argstr='-in_rois %s', - extensions=None, - ), - mask_file=dict( - argstr='-mask %s', - extensions=None, - ), - neighborhood=dict( - argstr='-nneigh %s', - xor=['sphere', 'ellipsoid'], - ), + argstr="-neigh_X %s -neigh_Y %s -neigh_Z %s", + xor=["sphere", "neighborhood"], + ), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-inset %s", extensions=None, mandatory=True, position=1,), + label_set=dict(argstr="-in_rois %s", extensions=None,), + mask_file=dict(argstr="-mask %s", extensions=None,), + neighborhood=dict(argstr="-nneigh %s", xor=["sphere", "ellipsoid"],), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_reho', + name_source="in_file", + name_template="%s_reho", position=0, ), - overwrite=dict(argstr='-overwrite', ), - sphere=dict( - argstr='-neigh_RAD %s', - xor=['neighborhood', 'ellipsoid'], - ), + overwrite=dict(argstr="-overwrite",), + sphere=dict(argstr="-neigh_RAD %s", xor=["neighborhood", "ellipsoid"],), ) inputs = ReHo.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ReHo_outputs(): - output_map = dict( - out_file=dict(extensions=None, ), - out_vals=dict(extensions=None, ), - ) + output_map = dict(out_file=dict(extensions=None,), out_vals=dict(extensions=None,),) outputs = ReHo.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Refit.py b/nipype/interfaces/afni/tests/test_auto_Refit.py index fd7688dce4..65952d85fa 100644 --- a/nipype/interfaces/afni/tests/test_auto_Refit.py +++ b/nipype/interfaces/afni/tests/test_auto_Refit.py @@ -4,45 +4,37 @@ def test_Refit_inputs(): input_map = dict( - args=dict(argstr='%s', ), - atrcopy=dict(argstr='-atrcopy %s %s', ), - atrfloat=dict(argstr='-atrfloat %s %s', ), - atrint=dict(argstr='-atrint %s %s', ), - atrstring=dict(argstr='-atrstring %s %s', ), - deoblique=dict(argstr='-deoblique', ), - duporigin_file=dict( - argstr='-duporigin %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + atrcopy=dict(argstr="-atrcopy %s %s",), + atrfloat=dict(argstr="-atrfloat %s %s",), + atrint=dict(argstr="-atrint %s %s",), + atrstring=dict(argstr="-atrstring %s %s",), + deoblique=dict(argstr="-deoblique",), + duporigin_file=dict(argstr="-duporigin %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=True, - extensions=None, - mandatory=True, - position=-1, + argstr="%s", copyfile=True, extensions=None, mandatory=True, position=-1, ), - nosaveatr=dict(argstr='-nosaveatr', ), - saveatr=dict(argstr='-saveatr', ), - space=dict(argstr='-space %s', ), - xdel=dict(argstr='-xdel %f', ), - xorigin=dict(argstr='-xorigin %s', ), - xyzscale=dict(argstr='-xyzscale %f', ), - ydel=dict(argstr='-ydel %f', ), - yorigin=dict(argstr='-yorigin %s', ), - zdel=dict(argstr='-zdel %f', ), - zorigin=dict(argstr='-zorigin %s', ), + nosaveatr=dict(argstr="-nosaveatr",), + saveatr=dict(argstr="-saveatr",), + space=dict(argstr="-space %s",), + xdel=dict(argstr="-xdel %f",), + xorigin=dict(argstr="-xorigin %s",), + xyzscale=dict(argstr="-xyzscale %f",), + ydel=dict(argstr="-ydel %f",), + yorigin=dict(argstr="-yorigin %s",), + zdel=dict(argstr="-zdel %f",), + zorigin=dict(argstr="-zorigin %s",), ) inputs = Refit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Refit_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Refit.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Remlfit.py b/nipype/interfaces/afni/tests/test_auto_Remlfit.py index 0ace6a1b04..3040c1f48e 100644 --- a/nipype/interfaces/afni/tests/test_auto_Remlfit.py +++ b/nipype/interfaces/afni/tests/test_auto_Remlfit.py @@ -4,151 +4,69 @@ def test_Remlfit_inputs(): input_map = dict( - STATmask=dict( - argstr='-STATmask %s', - extensions=None, - ), - addbase=dict( - argstr='-addbase %s', - copyfile=False, - sep=' ', - ), - args=dict(argstr='%s', ), - automask=dict( - argstr='-automask', - usedefault=True, - ), - dsort=dict( - argstr='-dsort %s', - copyfile=False, - extensions=None, - ), - dsort_nods=dict( - argstr='-dsort_nods', - requires=['dsort'], - ), - environ=dict( - nohash=True, - usedefault=True, - ), - errts_file=dict( - argstr='-Rerrts %s', - extensions=None, - ), - fitts_file=dict( - argstr='-Rfitts %s', - extensions=None, - ), - fout=dict(argstr='-fout', ), - glt_file=dict( - argstr='-Rglt %s', - extensions=None, - ), - gltsym=dict(argstr='-gltsym "%s" %s...', ), - goforit=dict(argstr='-GOFORIT', ), - in_files=dict( - argstr='-input "%s"', - copyfile=False, - mandatory=True, - sep=' ', - ), - mask=dict( - argstr='-mask %s', - extensions=None, - ), - matim=dict( - argstr='-matim %s', - extensions=None, - xor=['matrix'], - ), - matrix=dict( - argstr='-matrix %s', - extensions=None, - mandatory=True, - ), - nobout=dict(argstr='-nobout', ), - nodmbase=dict( - argstr='-nodmbase', - requires=['addbase', 'dsort'], - ), - nofdr=dict(argstr='-noFDR', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - obeta=dict( - argstr='-Obeta %s', - extensions=None, - ), - obuck=dict( - argstr='-Obuck %s', - extensions=None, - ), - oerrts=dict( - argstr='-Oerrts %s', - extensions=None, - ), - ofitts=dict( - argstr='-Ofitts %s', - extensions=None, - ), - oglt=dict( - argstr='-Oglt %s', - extensions=None, - ), - out_file=dict( - argstr='-Rbuck %s', - extensions=None, - ), + STATmask=dict(argstr="-STATmask %s", extensions=None,), + addbase=dict(argstr="-addbase %s", copyfile=False, sep=" ",), + args=dict(argstr="%s",), + automask=dict(argstr="-automask", usedefault=True,), + dsort=dict(argstr="-dsort %s", copyfile=False, extensions=None,), + dsort_nods=dict(argstr="-dsort_nods", requires=["dsort"],), + environ=dict(nohash=True, usedefault=True,), + errts_file=dict(argstr="-Rerrts %s", extensions=None,), + fitts_file=dict(argstr="-Rfitts %s", extensions=None,), + fout=dict(argstr="-fout",), + glt_file=dict(argstr="-Rglt %s", extensions=None,), + gltsym=dict(argstr='-gltsym "%s" %s...',), + goforit=dict(argstr="-GOFORIT",), + in_files=dict(argstr='-input "%s"', copyfile=False, mandatory=True, sep=" ",), + mask=dict(argstr="-mask %s", extensions=None,), + matim=dict(argstr="-matim %s", extensions=None, xor=["matrix"],), + matrix=dict(argstr="-matrix %s", extensions=None, mandatory=True,), + nobout=dict(argstr="-nobout",), + nodmbase=dict(argstr="-nodmbase", requires=["addbase", "dsort"],), + nofdr=dict(argstr="-noFDR",), + num_threads=dict(nohash=True, usedefault=True,), + obeta=dict(argstr="-Obeta %s", extensions=None,), + obuck=dict(argstr="-Obuck %s", extensions=None,), + oerrts=dict(argstr="-Oerrts %s", extensions=None,), + ofitts=dict(argstr="-Ofitts %s", extensions=None,), + oglt=dict(argstr="-Oglt %s", extensions=None,), + out_file=dict(argstr="-Rbuck %s", extensions=None,), outputtype=dict(), - ovar=dict( - argstr='-Ovar %s', - extensions=None, - ), - polort=dict( - argstr='-polort %d', - xor=['matrix'], - ), - quiet=dict(argstr='-quiet', ), - rbeta_file=dict( - argstr='-Rbeta %s', - extensions=None, - ), - rout=dict(argstr='-rout', ), - slibase=dict(argstr='-slibase %s', ), - slibase_sm=dict(argstr='-slibase_sm %s', ), - tout=dict(argstr='-tout', ), - usetemp=dict(argstr='-usetemp', ), - var_file=dict( - argstr='-Rvar %s', - extensions=None, - ), - verb=dict(argstr='-verb', ), - wherr_file=dict( - argstr='-Rwherr %s', - extensions=None, - ), + ovar=dict(argstr="-Ovar %s", extensions=None,), + polort=dict(argstr="-polort %d", xor=["matrix"],), + quiet=dict(argstr="-quiet",), + rbeta_file=dict(argstr="-Rbeta %s", extensions=None,), + rout=dict(argstr="-rout",), + slibase=dict(argstr="-slibase %s",), + slibase_sm=dict(argstr="-slibase_sm %s",), + tout=dict(argstr="-tout",), + usetemp=dict(argstr="-usetemp",), + var_file=dict(argstr="-Rvar %s", extensions=None,), + verb=dict(argstr="-verb",), + wherr_file=dict(argstr="-Rwherr %s", extensions=None,), ) inputs = Remlfit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Remlfit_outputs(): output_map = dict( - errts_file=dict(extensions=None, ), - fitts_file=dict(extensions=None, ), - glt_file=dict(extensions=None, ), - obeta=dict(extensions=None, ), - obuck=dict(extensions=None, ), - oerrts=dict(extensions=None, ), - ofitts=dict(extensions=None, ), - oglt=dict(extensions=None, ), - out_file=dict(extensions=None, ), - ovar=dict(extensions=None, ), - rbeta_file=dict(extensions=None, ), - var_file=dict(extensions=None, ), - wherr_file=dict(extensions=None, ), + errts_file=dict(extensions=None,), + fitts_file=dict(extensions=None,), + glt_file=dict(extensions=None,), + obeta=dict(extensions=None,), + obuck=dict(extensions=None,), + oerrts=dict(extensions=None,), + ofitts=dict(extensions=None,), + oglt=dict(extensions=None,), + out_file=dict(extensions=None,), + ovar=dict(extensions=None,), + rbeta_file=dict(extensions=None,), + var_file=dict(extensions=None,), + wherr_file=dict(extensions=None,), ) outputs = Remlfit.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_Resample.py b/nipype/interfaces/afni/tests/test_auto_Resample.py index f2c7456a6a..3053112816 100644 --- a/nipype/interfaces/afni/tests/test_auto_Resample.py +++ b/nipype/interfaces/afni/tests/test_auto_Resample.py @@ -4,44 +4,37 @@ def test_Resample_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='-inset %s', + argstr="-inset %s", copyfile=False, extensions=None, mandatory=True, position=-1, ), - master=dict( - argstr='-master %s', - extensions=None, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - orientation=dict(argstr='-orient %s', ), + master=dict(argstr="-master %s", extensions=None,), + num_threads=dict(nohash=True, usedefault=True,), + orientation=dict(argstr="-orient %s",), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file', - name_template='%s_resample', + name_source="in_file", + name_template="%s_resample", ), outputtype=dict(), - resample_mode=dict(argstr='-rmode %s', ), - voxel_size=dict(argstr='-dxyz %f %f %f', ), + resample_mode=dict(argstr="-rmode %s",), + voxel_size=dict(argstr="-dxyz %f %f %f",), ) inputs = Resample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Resample_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Resample.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Retroicor.py b/nipype/interfaces/afni/tests/test_auto_Retroicor.py index 9e3c10631c..9c95116d09 100644 --- a/nipype/interfaces/afni/tests/test_auto_Retroicor.py +++ b/nipype/interfaces/afni/tests/test_auto_Retroicor.py @@ -4,68 +4,40 @@ def test_Retroicor_inputs(): input_map = dict( - args=dict(argstr='%s', ), - card=dict( - argstr='-card %s', - extensions=None, - position=-2, - ), + args=dict(argstr="%s",), + card=dict(argstr="-card %s", extensions=None, position=-2,), cardphase=dict( - argstr='-cardphase %s', - extensions=None, - hash_files=False, - position=-6, - ), - environ=dict( - nohash=True, - usedefault=True, + argstr="-cardphase %s", extensions=None, hash_files=False, position=-6, ), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-1, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - order=dict( - argstr='-order %s', - position=-5, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), + num_threads=dict(nohash=True, usedefault=True,), + order=dict(argstr="-order %s", position=-5,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source=['in_file'], - name_template='%s_retroicor', + name_source=["in_file"], + name_template="%s_retroicor", position=1, ), outputtype=dict(), - resp=dict( - argstr='-resp %s', - extensions=None, - position=-3, - ), + resp=dict(argstr="-resp %s", extensions=None, position=-3,), respphase=dict( - argstr='-respphase %s', - extensions=None, - hash_files=False, - position=-7, - ), - threshold=dict( - argstr='-threshold %d', - position=-4, + argstr="-respphase %s", extensions=None, hash_files=False, position=-7, ), + threshold=dict(argstr="-threshold %d", position=-4,), ) inputs = Retroicor.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Retroicor_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Retroicor.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_SVMTest.py b/nipype/interfaces/afni/tests/test_auto_SVMTest.py index 79f33834e9..e4ab1e00bc 100644 --- a/nipype/interfaces/afni/tests/test_auto_SVMTest.py +++ b/nipype/interfaces/afni/tests/test_auto_SVMTest.py @@ -4,47 +4,31 @@ def test_SVMTest_inputs(): input_map = dict( - args=dict(argstr='%s', ), - classout=dict(argstr='-classout', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-testvol %s', - extensions=None, - mandatory=True, - ), - model=dict( - argstr='-model %s', - mandatory=True, - ), - multiclass=dict(argstr='-multiclass %s', ), - nodetrend=dict(argstr='-nodetrend', ), - nopredcensord=dict(argstr='-nopredcensord', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - options=dict(argstr='%s', ), + args=dict(argstr="%s",), + classout=dict(argstr="-classout",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-testvol %s", extensions=None, mandatory=True,), + model=dict(argstr="-model %s", mandatory=True,), + multiclass=dict(argstr="-multiclass %s",), + nodetrend=dict(argstr="-nodetrend",), + nopredcensord=dict(argstr="-nopredcensord",), + num_threads=dict(nohash=True, usedefault=True,), + options=dict(argstr="%s",), out_file=dict( - argstr='-predictions %s', - extensions=None, - name_template='%s_predictions', + argstr="-predictions %s", extensions=None, name_template="%s_predictions", ), outputtype=dict(), - testlabels=dict( - argstr='-testlabels %s', - extensions=None, - ), + testlabels=dict(argstr="-testlabels %s", extensions=None,), ) inputs = SVMTest.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SVMTest_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = SVMTest.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_SVMTrain.py b/nipype/interfaces/afni/tests/test_auto_SVMTrain.py index 307b8628f8..5edf36b7fa 100644 --- a/nipype/interfaces/afni/tests/test_auto_SVMTrain.py +++ b/nipype/interfaces/afni/tests/test_auto_SVMTrain.py @@ -5,76 +5,55 @@ def test_SVMTrain_inputs(): input_map = dict( alphas=dict( - argstr='-alpha %s', + argstr="-alpha %s", extensions=None, - name_source='in_file', - name_template='%s_alphas', - suffix='_alphas', - ), - args=dict(argstr='%s', ), - censor=dict( - argstr='-censor %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, + name_source="in_file", + name_template="%s_alphas", + suffix="_alphas", ), + args=dict(argstr="%s",), + censor=dict(argstr="-censor %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='-trainvol %s', - copyfile=False, - extensions=None, - mandatory=True, - ), - kernel=dict(argstr='-kernel %s', ), - mask=dict( - argstr='-mask %s', - copyfile=False, - extensions=None, - position=-1, + argstr="-trainvol %s", copyfile=False, extensions=None, mandatory=True, ), - max_iterations=dict(argstr='-max_iterations %d', ), + kernel=dict(argstr="-kernel %s",), + mask=dict(argstr="-mask %s", copyfile=False, extensions=None, position=-1,), + max_iterations=dict(argstr="-max_iterations %d",), model=dict( - argstr='-model %s', + argstr="-model %s", extensions=None, - name_source='in_file', - name_template='%s_model', - suffix='_model', + name_source="in_file", + name_template="%s_model", + suffix="_model", ), - nomodelmask=dict(argstr='-nomodelmask', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - options=dict(argstr='%s', ), + nomodelmask=dict(argstr="-nomodelmask",), + num_threads=dict(nohash=True, usedefault=True,), + options=dict(argstr="%s",), out_file=dict( - argstr='-bucket %s', + argstr="-bucket %s", extensions=None, - name_source='in_file', - name_template='%s_vectors', - suffix='_bucket', + name_source="in_file", + name_template="%s_vectors", + suffix="_bucket", ), outputtype=dict(), - trainlabels=dict( - argstr='-trainlabels %s', - extensions=None, - ), - ttype=dict( - argstr='-type %s', - mandatory=True, - ), - w_out=dict(argstr='-wout', ), + trainlabels=dict(argstr="-trainlabels %s", extensions=None,), + ttype=dict(argstr="-type %s", mandatory=True,), + w_out=dict(argstr="-wout",), ) inputs = SVMTrain.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SVMTrain_outputs(): output_map = dict( - alphas=dict(extensions=None, ), - model=dict(extensions=None, ), - out_file=dict(extensions=None, ), + alphas=dict(extensions=None,), + model=dict(extensions=None,), + out_file=dict(extensions=None,), ) outputs = SVMTrain.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_Seg.py b/nipype/interfaces/afni/tests/test_auto_Seg.py index a5d475d9a0..f243aac8c7 100644 --- a/nipype/interfaces/afni/tests/test_auto_Seg.py +++ b/nipype/interfaces/afni/tests/test_auto_Seg.py @@ -4,40 +4,35 @@ def test_Seg_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bias_classes=dict(argstr='-bias_classes %s', ), - bias_fwhm=dict(argstr='-bias_fwhm %f', ), - blur_meth=dict(argstr='-blur_meth %s', ), - bmrf=dict(argstr='-bmrf %f', ), - classes=dict(argstr='-classes %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + bias_classes=dict(argstr="-bias_classes %s",), + bias_fwhm=dict(argstr="-bias_fwhm %f",), + blur_meth=dict(argstr="-blur_meth %s",), + bmrf=dict(argstr="-bmrf %f",), + classes=dict(argstr="-classes %s",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='-anat %s', + argstr="-anat %s", copyfile=True, extensions=None, mandatory=True, position=-1, ), - main_N=dict(argstr='-main_N %d', ), - mask=dict( - argstr='-mask %s', - mandatory=True, - position=-2, - ), - mixfloor=dict(argstr='-mixfloor %f', ), - mixfrac=dict(argstr='-mixfrac %s', ), - prefix=dict(argstr='-prefix %s', ), + main_N=dict(argstr="-main_N %d",), + mask=dict(argstr="-mask %s", mandatory=True, position=-2,), + mixfloor=dict(argstr="-mixfloor %f",), + mixfrac=dict(argstr="-mixfrac %s",), + prefix=dict(argstr="-prefix %s",), ) inputs = Seg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Seg_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Seg.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_SkullStrip.py b/nipype/interfaces/afni/tests/test_auto_SkullStrip.py index c198b0172a..4fbf078da5 100644 --- a/nipype/interfaces/afni/tests/test_auto_SkullStrip.py +++ b/nipype/interfaces/afni/tests/test_auto_SkullStrip.py @@ -4,27 +4,21 @@ def test_SkullStrip_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='-input %s', + argstr="-input %s", copyfile=False, extensions=None, mandatory=True, position=1, ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file', - name_template='%s_skullstrip', + name_source="in_file", + name_template="%s_skullstrip", ), outputtype=dict(), ) @@ -33,8 +27,10 @@ def test_SkullStrip_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SkullStrip_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = SkullStrip.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Synthesize.py b/nipype/interfaces/afni/tests/test_auto_Synthesize.py index 2c71bd41b7..df23d9923b 100644 --- a/nipype/interfaces/afni/tests/test_auto_Synthesize.py +++ b/nipype/interfaces/afni/tests/test_auto_Synthesize.py @@ -4,48 +4,31 @@ def test_Synthesize_inputs(): input_map = dict( - TR=dict(argstr='-TR %f', ), - args=dict(argstr='%s', ), + TR=dict(argstr="-TR %f",), + args=dict(argstr="%s",), cbucket=dict( - argstr='-cbucket %s', - copyfile=False, - extensions=None, - mandatory=True, - ), - cenfill=dict(argstr='-cenfill %s', ), - dry_run=dict(argstr='-dry', ), - environ=dict( - nohash=True, - usedefault=True, + argstr="-cbucket %s", copyfile=False, extensions=None, mandatory=True, ), + cenfill=dict(argstr="-cenfill %s",), + dry_run=dict(argstr="-dry",), + environ=dict(nohash=True, usedefault=True,), matrix=dict( - argstr='-matrix %s', - copyfile=False, - extensions=None, - mandatory=True, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - out_file=dict( - argstr='-prefix %s', - extensions=None, - name_template='syn', + argstr="-matrix %s", copyfile=False, extensions=None, mandatory=True, ), + num_threads=dict(nohash=True, usedefault=True,), + out_file=dict(argstr="-prefix %s", extensions=None, name_template="syn",), outputtype=dict(), - select=dict( - argstr='-select %s', - mandatory=True, - ), + select=dict(argstr="-select %s", mandatory=True,), ) inputs = Synthesize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Synthesize_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Synthesize.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_TCat.py b/nipype/interfaces/afni/tests/test_auto_TCat.py index acd3ec7b27..b9a4a16054 100644 --- a/nipype/interfaces/afni/tests/test_auto_TCat.py +++ b/nipype/interfaces/afni/tests/test_auto_TCat.py @@ -4,41 +4,29 @@ def test_TCat_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_files=dict( - argstr=' %s', - copyfile=False, - mandatory=True, - position=-1, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_files=dict(argstr=" %s", copyfile=False, mandatory=True, position=-1,), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_files', - name_template='%s_tcat', + name_source="in_files", + name_template="%s_tcat", ), outputtype=dict(), - rlt=dict( - argstr='-rlt%s', - position=1, - ), - verbose=dict(argstr='-verb', ), + rlt=dict(argstr="-rlt%s", position=1,), + verbose=dict(argstr="-verb",), ) inputs = TCat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TCat_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = TCat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_TCatSubBrick.py b/nipype/interfaces/afni/tests/test_auto_TCatSubBrick.py index 516c2d15d0..7644b191a6 100644 --- a/nipype/interfaces/afni/tests/test_auto_TCatSubBrick.py +++ b/nipype/interfaces/afni/tests/test_auto_TCatSubBrick.py @@ -4,39 +4,23 @@ def test_TCatSubBrick_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_files=dict( - argstr='%s%s ...', - copyfile=False, - mandatory=True, - position=-1, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - out_file=dict( - argstr='-prefix %s', - extensions=None, - genfile=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_files=dict(argstr="%s%s ...", copyfile=False, mandatory=True, position=-1,), + num_threads=dict(nohash=True, usedefault=True,), + out_file=dict(argstr="-prefix %s", extensions=None, genfile=True,), outputtype=dict(), - rlt=dict( - argstr='-rlt%s', - position=1, - ), + rlt=dict(argstr="-rlt%s", position=1,), ) inputs = TCatSubBrick.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TCatSubBrick_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = TCatSubBrick.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_TCorr1D.py b/nipype/interfaces/afni/tests/test_auto_TCorr1D.py index a87efb29da..0a306ceca3 100644 --- a/nipype/interfaces/afni/tests/test_auto_TCorr1D.py +++ b/nipype/interfaces/afni/tests/test_auto_TCorr1D.py @@ -4,64 +4,43 @@ def test_TCorr1D_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), ktaub=dict( - argstr=' -ktaub', - position=1, - xor=['pearson', 'spearman', 'quadrant'], - ), - num_threads=dict( - nohash=True, - usedefault=True, + argstr=" -ktaub", position=1, xor=["pearson", "spearman", "quadrant"], ), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, keep_extension=True, - name_source='xset', - name_template='%s_correlation.nii.gz', + name_source="xset", + name_template="%s_correlation.nii.gz", ), outputtype=dict(), pearson=dict( - argstr=' -pearson', - position=1, - xor=['spearman', 'quadrant', 'ktaub'], + argstr=" -pearson", position=1, xor=["spearman", "quadrant", "ktaub"], ), quadrant=dict( - argstr=' -quadrant', - position=1, - xor=['pearson', 'spearman', 'ktaub'], + argstr=" -quadrant", position=1, xor=["pearson", "spearman", "ktaub"], ), spearman=dict( - argstr=' -spearman', - position=1, - xor=['pearson', 'quadrant', 'ktaub'], + argstr=" -spearman", position=1, xor=["pearson", "quadrant", "ktaub"], ), xset=dict( - argstr=' %s', - copyfile=False, - extensions=None, - mandatory=True, - position=-2, - ), - y_1d=dict( - argstr=' %s', - extensions=None, - mandatory=True, - position=-1, + argstr=" %s", copyfile=False, extensions=None, mandatory=True, position=-2, ), + y_1d=dict(argstr=" %s", extensions=None, mandatory=True, position=-1,), ) inputs = TCorr1D.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TCorr1D_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = TCorr1D.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_TCorrMap.py b/nipype/interfaces/afni/tests/test_auto_TCorrMap.py index 364a19af41..5a03aa3732 100644 --- a/nipype/interfaces/afni/tests/test_auto_TCorrMap.py +++ b/nipype/interfaces/afni/tests/test_auto_TCorrMap.py @@ -5,135 +5,106 @@ def test_TCorrMap_inputs(): input_map = dict( absolute_threshold=dict( - argstr='-Thresh %f %s', - extensions=None, - name_source='in_file', - suffix='_thresh', - xor=('absolute_threshold', 'var_absolute_threshold', - 'var_absolute_threshold_normalize'), - ), - args=dict(argstr='%s', ), - automask=dict(argstr='-automask', ), + argstr="-Thresh %f %s", + extensions=None, + name_source="in_file", + suffix="_thresh", + xor=( + "absolute_threshold", + "var_absolute_threshold", + "var_absolute_threshold_normalize", + ), + ), + args=dict(argstr="%s",), + automask=dict(argstr="-automask",), average_expr=dict( - argstr='-Aexpr %s %s', + argstr="-Aexpr %s %s", extensions=None, - name_source='in_file', - suffix='_aexpr', - xor=('average_expr', 'average_expr_nonzero', 'sum_expr'), + name_source="in_file", + suffix="_aexpr", + xor=("average_expr", "average_expr_nonzero", "sum_expr"), ), average_expr_nonzero=dict( - argstr='-Cexpr %s %s', + argstr="-Cexpr %s %s", extensions=None, - name_source='in_file', - suffix='_cexpr', - xor=('average_expr', 'average_expr_nonzero', 'sum_expr'), + name_source="in_file", + suffix="_cexpr", + xor=("average_expr", "average_expr_nonzero", "sum_expr"), ), - bandpass=dict(argstr='-bpass %f %f', ), - blur_fwhm=dict(argstr='-Gblur %f', ), + bandpass=dict(argstr="-bpass %f %f",), + blur_fwhm=dict(argstr="-Gblur %f",), correlation_maps=dict( - argstr='-CorrMap %s', - extensions=None, - name_source='in_file', + argstr="-CorrMap %s", extensions=None, name_source="in_file", ), correlation_maps_masked=dict( - argstr='-CorrMask %s', - extensions=None, - name_source='in_file', - ), - environ=dict( - nohash=True, - usedefault=True, + argstr="-CorrMask %s", extensions=None, name_source="in_file", ), + environ=dict(nohash=True, usedefault=True,), expr=dict(), histogram=dict( - argstr='-Hist %d %s', + argstr="-Hist %d %s", extensions=None, - name_source='in_file', - suffix='_hist', + name_source="in_file", + suffix="_hist", ), histogram_bin_numbers=dict(), in_file=dict( - argstr='-input %s', - copyfile=False, - extensions=None, - mandatory=True, - ), - mask=dict( - argstr='-mask %s', - extensions=None, + argstr="-input %s", copyfile=False, extensions=None, mandatory=True, ), + mask=dict(argstr="-mask %s", extensions=None,), mean_file=dict( - argstr='-Mean %s', - extensions=None, - name_source='in_file', - suffix='_mean', - ), - num_threads=dict( - nohash=True, - usedefault=True, + argstr="-Mean %s", extensions=None, name_source="in_file", suffix="_mean", ), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source=['in_file'], - name_template='%s_afni', + name_source=["in_file"], + name_template="%s_afni", ), outputtype=dict(), pmean=dict( - argstr='-Pmean %s', - extensions=None, - name_source='in_file', - suffix='_pmean', + argstr="-Pmean %s", extensions=None, name_source="in_file", suffix="_pmean", ), - polort=dict(argstr='-polort %d', ), + polort=dict(argstr="-polort %d",), qmean=dict( - argstr='-Qmean %s', - extensions=None, - name_source='in_file', - suffix='_qmean', - ), - regress_out_timeseries=dict( - argstr='-ort %s', - extensions=None, - ), - seeds=dict( - argstr='-seed %s', - extensions=None, - xor='seeds_width', - ), - seeds_width=dict( - argstr='-Mseed %f', - xor='seeds', + argstr="-Qmean %s", extensions=None, name_source="in_file", suffix="_qmean", ), + regress_out_timeseries=dict(argstr="-ort %s", extensions=None,), + seeds=dict(argstr="-seed %s", extensions=None, xor="seeds_width",), + seeds_width=dict(argstr="-Mseed %f", xor="seeds",), sum_expr=dict( - argstr='-Sexpr %s %s', + argstr="-Sexpr %s %s", extensions=None, - name_source='in_file', - suffix='_sexpr', - xor=('average_expr', 'average_expr_nonzero', 'sum_expr'), + name_source="in_file", + suffix="_sexpr", + xor=("average_expr", "average_expr_nonzero", "sum_expr"), ), thresholds=dict(), var_absolute_threshold=dict( - argstr='-VarThresh %f %f %f %s', + argstr="-VarThresh %f %f %f %s", extensions=None, - name_source='in_file', - suffix='_varthresh', - xor=('absolute_threshold', 'var_absolute_threshold', - 'var_absolute_threshold_normalize'), + name_source="in_file", + suffix="_varthresh", + xor=( + "absolute_threshold", + "var_absolute_threshold", + "var_absolute_threshold_normalize", + ), ), var_absolute_threshold_normalize=dict( - argstr='-VarThreshN %f %f %f %s', + argstr="-VarThreshN %f %f %f %s", extensions=None, - name_source='in_file', - suffix='_varthreshn', - xor=('absolute_threshold', 'var_absolute_threshold', - 'var_absolute_threshold_normalize'), + name_source="in_file", + suffix="_varthreshn", + xor=( + "absolute_threshold", + "var_absolute_threshold", + "var_absolute_threshold_normalize", + ), ), zmean=dict( - argstr='-Zmean %s', - extensions=None, - name_source='in_file', - suffix='_zmean', + argstr="-Zmean %s", extensions=None, name_source="in_file", suffix="_zmean", ), ) inputs = TCorrMap.input_spec() @@ -141,21 +112,23 @@ def test_TCorrMap_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TCorrMap_outputs(): output_map = dict( - absolute_threshold=dict(extensions=None, ), - average_expr=dict(extensions=None, ), - average_expr_nonzero=dict(extensions=None, ), - correlation_maps=dict(extensions=None, ), - correlation_maps_masked=dict(extensions=None, ), - histogram=dict(extensions=None, ), - mean_file=dict(extensions=None, ), - pmean=dict(extensions=None, ), - qmean=dict(extensions=None, ), - sum_expr=dict(extensions=None, ), - var_absolute_threshold=dict(extensions=None, ), - var_absolute_threshold_normalize=dict(extensions=None, ), - zmean=dict(extensions=None, ), + absolute_threshold=dict(extensions=None,), + average_expr=dict(extensions=None,), + average_expr_nonzero=dict(extensions=None,), + correlation_maps=dict(extensions=None,), + correlation_maps_masked=dict(extensions=None,), + histogram=dict(extensions=None,), + mean_file=dict(extensions=None,), + pmean=dict(extensions=None,), + qmean=dict(extensions=None,), + sum_expr=dict(extensions=None,), + var_absolute_threshold=dict(extensions=None,), + var_absolute_threshold_normalize=dict(extensions=None,), + zmean=dict(extensions=None,), ) outputs = TCorrMap.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_TCorrelate.py b/nipype/interfaces/afni/tests/test_auto_TCorrelate.py index 3e75c7d099..0bf794886b 100644 --- a/nipype/interfaces/afni/tests/test_auto_TCorrelate.py +++ b/nipype/interfaces/afni/tests/test_auto_TCorrelate.py @@ -4,37 +4,23 @@ def test_TCorrelate_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='xset', - name_template='%s_tcorr', + name_source="xset", + name_template="%s_tcorr", ), outputtype=dict(), - pearson=dict(argstr='-pearson', ), - polort=dict(argstr='-polort %d', ), + pearson=dict(argstr="-pearson",), + polort=dict(argstr="-polort %d",), xset=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-2, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-2, ), yset=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-1, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), ) inputs = TCorrelate.input_spec() @@ -42,8 +28,10 @@ def test_TCorrelate_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TCorrelate_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = TCorrelate.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_TNorm.py b/nipype/interfaces/afni/tests/test_auto_TNorm.py index 144e0fdeba..8a8493f927 100644 --- a/nipype/interfaces/afni/tests/test_auto_TNorm.py +++ b/nipype/interfaces/afni/tests/test_auto_TNorm.py @@ -4,43 +4,35 @@ def test_TNorm_inputs(): input_map = dict( - L1fit=dict(argstr='-L1fit', ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + L1fit=dict(argstr="-L1fit",), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-1, - ), - norm1=dict(argstr='-norm1', ), - norm2=dict(argstr='-norm2', ), - normR=dict(argstr='-normR', ), - normx=dict(argstr='-normx', ), - num_threads=dict( - nohash=True, - usedefault=True, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), + norm1=dict(argstr="-norm1",), + norm2=dict(argstr="-norm2",), + normR=dict(argstr="-normR",), + normx=dict(argstr="-normx",), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file', - name_template='%s_tnorm', + name_source="in_file", + name_template="%s_tnorm", ), outputtype=dict(), - polort=dict(argstr='-polort %s', ), + polort=dict(argstr="-polort %s",), ) inputs = TNorm.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TNorm_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = TNorm.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_TProject.py b/nipype/interfaces/afni/tests/test_auto_TProject.py index 897b18c853..e300cb16be 100644 --- a/nipype/interfaces/afni/tests/test_auto_TProject.py +++ b/nipype/interfaces/afni/tests/test_auto_TProject.py @@ -4,68 +4,49 @@ def test_TProject_inputs(): input_map = dict( - TR=dict(argstr='-TR %g', ), - args=dict(argstr='%s', ), - automask=dict( - argstr='-automask', - xor=['mask'], - ), - bandpass=dict(argstr='-bandpass %g %g', ), - blur=dict(argstr='-blur %g', ), - cenmode=dict(argstr='-cenmode %s', ), - censor=dict( - argstr='-censor %s', - extensions=None, - ), - censortr=dict(argstr='-CENSORTR %s', ), - concat=dict( - argstr='-concat %s', - extensions=None, - ), - dsort=dict(argstr='-dsort %s...', ), - environ=dict( - nohash=True, - usedefault=True, - ), + TR=dict(argstr="-TR %g",), + args=dict(argstr="%s",), + automask=dict(argstr="-automask", xor=["mask"],), + bandpass=dict(argstr="-bandpass %g %g",), + blur=dict(argstr="-blur %g",), + cenmode=dict(argstr="-cenmode %s",), + censor=dict(argstr="-censor %s", extensions=None,), + censortr=dict(argstr="-CENSORTR %s",), + concat=dict(argstr="-concat %s", extensions=None,), + dsort=dict(argstr="-dsort %s...",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='-input %s', + argstr="-input %s", copyfile=False, extensions=None, mandatory=True, position=1, ), - mask=dict( - argstr='-mask %s', - extensions=None, - ), - noblock=dict(argstr='-noblock', ), - norm=dict(argstr='-norm', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - ort=dict( - argstr='-ort %s', - extensions=None, - ), + mask=dict(argstr="-mask %s", extensions=None,), + noblock=dict(argstr="-noblock",), + norm=dict(argstr="-norm",), + num_threads=dict(nohash=True, usedefault=True,), + ort=dict(argstr="-ort %s", extensions=None,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file', - name_template='%s_tproject', + name_source="in_file", + name_template="%s_tproject", position=-1, ), outputtype=dict(), - polort=dict(argstr='-polort %d', ), - stopband=dict(argstr='-stopband %g %g', ), + polort=dict(argstr="-polort %d",), + stopband=dict(argstr="-stopband %g %g",), ) inputs = TProject.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TProject_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = TProject.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_TShift.py b/nipype/interfaces/afni/tests/test_auto_TShift.py index 7b6589df5f..9f1e6c3342 100644 --- a/nipype/interfaces/afni/tests/test_auto_TShift.py +++ b/nipype/interfaces/afni/tests/test_auto_TShift.py @@ -4,61 +4,40 @@ def test_TShift_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - ignore=dict(argstr='-ignore %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + ignore=dict(argstr="-ignore %s",), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-1, - ), - interp=dict(argstr='-%s', ), - num_threads=dict( - nohash=True, - usedefault=True, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), + interp=dict(argstr="-%s",), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file', - name_template='%s_tshift', + name_source="in_file", + name_template="%s_tshift", ), outputtype=dict(), - rlt=dict(argstr='-rlt', ), - rltplus=dict(argstr='-rlt+', ), - slice_encoding_direction=dict(usedefault=True, ), - slice_timing=dict( - argstr='-tpattern @%s', - xor=['tpattern'], - ), - tpattern=dict( - argstr='-tpattern %s', - xor=['slice_timing'], - ), - tr=dict(argstr='-TR %s', ), - tslice=dict( - argstr='-slice %s', - xor=['tzero'], - ), - tzero=dict( - argstr='-tzero %s', - xor=['tslice'], - ), + rlt=dict(argstr="-rlt",), + rltplus=dict(argstr="-rlt+",), + slice_encoding_direction=dict(usedefault=True,), + slice_timing=dict(argstr="-tpattern @%s", xor=["tpattern"],), + tpattern=dict(argstr="-tpattern %s", xor=["slice_timing"],), + tr=dict(argstr="-TR %s",), + tslice=dict(argstr="-slice %s", xor=["tzero"],), + tzero=dict(argstr="-tzero %s", xor=["tslice"],), ) inputs = TShift.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TShift_outputs(): output_map = dict( - out_file=dict(extensions=None, ), - timing_file=dict(extensions=None, ), + out_file=dict(extensions=None,), timing_file=dict(extensions=None,), ) outputs = TShift.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_TSmooth.py b/nipype/interfaces/afni/tests/test_auto_TSmooth.py index 67c4821ed2..1223686b83 100644 --- a/nipype/interfaces/afni/tests/test_auto_TSmooth.py +++ b/nipype/interfaces/afni/tests/test_auto_TSmooth.py @@ -4,39 +4,26 @@ def test_TSmooth_inputs(): input_map = dict( - adaptive=dict(argstr='-adaptive %d', ), - args=dict(argstr='%s', ), - blackman=dict(argstr='-blackman %d', ), - custom=dict( - argstr='-custom %s', - extensions=None, - ), - datum=dict(argstr='-datum %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - hamming=dict(argstr='-hamming %d', ), + adaptive=dict(argstr="-adaptive %d",), + args=dict(argstr="%s",), + blackman=dict(argstr="-blackman %d",), + custom=dict(argstr="-custom %s", extensions=None,), + datum=dict(argstr="-datum %s",), + environ=dict(nohash=True, usedefault=True,), + hamming=dict(argstr="-hamming %d",), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-1, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), - lin=dict(argstr='-lin', ), - lin3=dict(argstr='-3lin %d', ), - med=dict(argstr='-med', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - osf=dict(argstr='-osf', ), + lin=dict(argstr="-lin",), + lin3=dict(argstr="-3lin %d",), + med=dict(argstr="-med",), + num_threads=dict(nohash=True, usedefault=True,), + osf=dict(argstr="-osf",), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file', - name_template='%s_smooth', + name_source="in_file", + name_template="%s_smooth", ), outputtype=dict(), ) @@ -45,8 +32,10 @@ def test_TSmooth_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TSmooth_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = TSmooth.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_TStat.py b/nipype/interfaces/afni/tests/test_auto_TStat.py index 1b0949ce40..0a7a99da76 100644 --- a/nipype/interfaces/afni/tests/test_auto_TStat.py +++ b/nipype/interfaces/afni/tests/test_auto_TStat.py @@ -4,32 +4,19 @@ def test_TStat_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-1, - ), - mask=dict( - argstr='-mask %s', - extensions=None, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - options=dict(argstr='%s', ), + mask=dict(argstr="-mask %s", extensions=None,), + num_threads=dict(nohash=True, usedefault=True,), + options=dict(argstr="%s",), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file', - name_template='%s_tstat', + name_source="in_file", + name_template="%s_tstat", ), outputtype=dict(), ) @@ -38,8 +25,10 @@ def test_TStat_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TStat_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = TStat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_To3D.py b/nipype/interfaces/afni/tests/test_auto_To3D.py index 86d64804a7..ffed9ba623 100644 --- a/nipype/interfaces/afni/tests/test_auto_To3D.py +++ b/nipype/interfaces/afni/tests/test_auto_To3D.py @@ -4,40 +4,32 @@ def test_To3D_inputs(): input_map = dict( - args=dict(argstr='%s', ), - assumemosaic=dict(argstr='-assume_dicom_mosaic', ), - datatype=dict(argstr='-datum %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - filetype=dict(argstr='-%s', ), - funcparams=dict(argstr='-time:zt %s alt+z2', ), - in_folder=dict( - argstr='%s/*.dcm', - mandatory=True, - position=-1, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + assumemosaic=dict(argstr="-assume_dicom_mosaic",), + datatype=dict(argstr="-datum %s",), + environ=dict(nohash=True, usedefault=True,), + filetype=dict(argstr="-%s",), + funcparams=dict(argstr="-time:zt %s alt+z2",), + in_folder=dict(argstr="%s/*.dcm", mandatory=True, position=-1,), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source=['in_folder'], - name_template='%s', + name_source=["in_folder"], + name_template="%s", ), outputtype=dict(), - skipoutliers=dict(argstr='-skip_outliers', ), + skipoutliers=dict(argstr="-skip_outliers",), ) inputs = To3D.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_To3D_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = To3D.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Undump.py b/nipype/interfaces/afni/tests/test_auto_Undump.py index d5c2dabdfc..3679a118fe 100644 --- a/nipype/interfaces/afni/tests/test_auto_Undump.py +++ b/nipype/interfaces/afni/tests/test_auto_Undump.py @@ -4,47 +4,36 @@ def test_Undump_inputs(): input_map = dict( - args=dict(argstr='%s', ), - coordinates_specification=dict(argstr='-%s', ), - datatype=dict(argstr='-datum %s', ), - default_value=dict(argstr='-dval %f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fill_value=dict(argstr='-fval %f', ), - head_only=dict(argstr='-head_only', ), + args=dict(argstr="%s",), + coordinates_specification=dict(argstr="-%s",), + datatype=dict(argstr="-datum %s",), + default_value=dict(argstr="-dval %f",), + environ=dict(nohash=True, usedefault=True,), + fill_value=dict(argstr="-fval %f",), + head_only=dict(argstr="-head_only",), in_file=dict( - argstr='-master %s', + argstr="-master %s", copyfile=False, extensions=None, mandatory=True, position=-1, ), - mask_file=dict( - argstr='-mask %s', - extensions=None, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - orient=dict(argstr='-orient %s', ), - out_file=dict( - argstr='-prefix %s', - extensions=None, - name_source='in_file', - ), + mask_file=dict(argstr="-mask %s", extensions=None,), + num_threads=dict(nohash=True, usedefault=True,), + orient=dict(argstr="-orient %s",), + out_file=dict(argstr="-prefix %s", extensions=None, name_source="in_file",), outputtype=dict(), - srad=dict(argstr='-srad %f', ), + srad=dict(argstr="-srad %f",), ) inputs = Undump.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Undump_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Undump.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Unifize.py b/nipype/interfaces/afni/tests/test_auto_Unifize.py index 73156ba847..71331215b7 100644 --- a/nipype/interfaces/afni/tests/test_auto_Unifize.py +++ b/nipype/interfaces/afni/tests/test_auto_Unifize.py @@ -4,56 +4,44 @@ def test_Unifize_inputs(): input_map = dict( - args=dict(argstr='%s', ), - cl_frac=dict(argstr='-clfrac %f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - epi=dict( - argstr='-EPI', - requires=['no_duplo', 't2'], - xor=['gm'], - ), - gm=dict(argstr='-GM', ), + args=dict(argstr="%s",), + cl_frac=dict(argstr="-clfrac %f",), + environ=dict(nohash=True, usedefault=True,), + epi=dict(argstr="-EPI", requires=["no_duplo", "t2"], xor=["gm"],), + gm=dict(argstr="-GM",), in_file=dict( - argstr='-input %s', + argstr="-input %s", copyfile=False, extensions=None, mandatory=True, position=-1, ), - no_duplo=dict(argstr='-noduplo', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + no_duplo=dict(argstr="-noduplo",), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file', - name_template='%s_unifized', + name_source="in_file", + name_template="%s_unifized", ), outputtype=dict(), - quiet=dict(argstr='-quiet', ), - rbt=dict(argstr='-rbt %f %f %f', ), - scale_file=dict( - argstr='-ssave %s', - extensions=None, - ), - t2=dict(argstr='-T2', ), - t2_up=dict(argstr='-T2up %f', ), - urad=dict(argstr='-Urad %s', ), + quiet=dict(argstr="-quiet",), + rbt=dict(argstr="-rbt %f %f %f",), + scale_file=dict(argstr="-ssave %s", extensions=None,), + t2=dict(argstr="-T2",), + t2_up=dict(argstr="-T2up %f",), + urad=dict(argstr="-Urad %s",), ) inputs = Unifize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Unifize_outputs(): output_map = dict( - out_file=dict(extensions=None, ), - scale_file=dict(extensions=None, ), + out_file=dict(extensions=None,), scale_file=dict(extensions=None,), ) outputs = Unifize.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_Volreg.py b/nipype/interfaces/afni/tests/test_auto_Volreg.py index 6f6f816a5d..9d7c6aa69b 100644 --- a/nipype/interfaces/afni/tests/test_auto_Volreg.py +++ b/nipype/interfaces/afni/tests/test_auto_Volreg.py @@ -4,77 +4,62 @@ def test_Volreg_inputs(): input_map = dict( - args=dict(argstr='%s', ), - basefile=dict( - argstr='-base %s', - extensions=None, - position=-6, - ), - copyorigin=dict(argstr='-twodup', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + basefile=dict(argstr="-base %s", extensions=None, position=-6,), + copyorigin=dict(argstr="-twodup",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-1, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), - in_weight_volume=dict(argstr="-weight '%s[%d]'", ), - interp=dict(argstr='-%s', ), + in_weight_volume=dict(argstr="-weight '%s[%d]'",), + interp=dict(argstr="-%s",), md1d_file=dict( - argstr='-maxdisp1D %s', + argstr="-maxdisp1D %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_md.1D', + name_source="in_file", + name_template="%s_md.1D", position=-4, ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + num_threads=dict(nohash=True, usedefault=True,), oned_file=dict( - argstr='-1Dfile %s', + argstr="-1Dfile %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s.1D', + name_source="in_file", + name_template="%s.1D", ), oned_matrix_save=dict( - argstr='-1Dmatrix_save %s', + argstr="-1Dmatrix_save %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s.aff12.1D', + name_source="in_file", + name_template="%s.aff12.1D", ), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file', - name_template='%s_volreg', + name_source="in_file", + name_template="%s_volreg", ), outputtype=dict(), - timeshift=dict(argstr='-tshift 0', ), - verbose=dict(argstr='-verbose', ), - zpad=dict( - argstr='-zpad %d', - position=-5, - ), + timeshift=dict(argstr="-tshift 0",), + verbose=dict(argstr="-verbose",), + zpad=dict(argstr="-zpad %d", position=-5,), ) inputs = Volreg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Volreg_outputs(): output_map = dict( - md1d_file=dict(extensions=None, ), - oned_file=dict(extensions=None, ), - oned_matrix_save=dict(extensions=None, ), - out_file=dict(extensions=None, ), + md1d_file=dict(extensions=None,), + oned_file=dict(extensions=None,), + oned_matrix_save=dict(extensions=None,), + out_file=dict(extensions=None,), ) outputs = Volreg.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_Warp.py b/nipype/interfaces/afni/tests/test_auto_Warp.py index 1adc2c1a1d..5b5e9ded71 100644 --- a/nipype/interfaces/afni/tests/test_auto_Warp.py +++ b/nipype/interfaces/afni/tests/test_auto_Warp.py @@ -4,60 +4,42 @@ def test_Warp_inputs(): input_map = dict( - args=dict(argstr='%s', ), - deoblique=dict(argstr='-deoblique', ), - environ=dict( - nohash=True, - usedefault=True, - ), - gridset=dict( - argstr='-gridset %s', - extensions=None, - ), + args=dict(argstr="%s",), + deoblique=dict(argstr="-deoblique",), + environ=dict(nohash=True, usedefault=True,), + gridset=dict(argstr="-gridset %s", extensions=None,), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-1, - ), - interp=dict(argstr='-%s', ), - matparent=dict( - argstr='-matparent %s', - extensions=None, - ), - mni2tta=dict(argstr='-mni2tta', ), - newgrid=dict(argstr='-newgrid %f', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - oblique_parent=dict( - argstr='-oblique_parent %s', - extensions=None, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), + interp=dict(argstr="-%s",), + matparent=dict(argstr="-matparent %s", extensions=None,), + mni2tta=dict(argstr="-mni2tta",), + newgrid=dict(argstr="-newgrid %f",), + num_threads=dict(nohash=True, usedefault=True,), + oblique_parent=dict(argstr="-oblique_parent %s", extensions=None,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_warp', + name_source="in_file", + name_template="%s_warp", ), outputtype=dict(), - save_warp=dict(requires=['verbose'], ), - tta2mni=dict(argstr='-tta2mni', ), - verbose=dict(argstr='-verb', ), - zpad=dict(argstr='-zpad %d', ), + save_warp=dict(requires=["verbose"],), + tta2mni=dict(argstr="-tta2mni",), + verbose=dict(argstr="-verb",), + zpad=dict(argstr="-zpad %d",), ) inputs = Warp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Warp_outputs(): output_map = dict( - out_file=dict(extensions=None, ), - warp_file=dict(extensions=None, ), + out_file=dict(extensions=None,), warp_file=dict(extensions=None,), ) outputs = Warp.output_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_ZCutUp.py b/nipype/interfaces/afni/tests/test_auto_ZCutUp.py index 5451e057c9..d234da5a50 100644 --- a/nipype/interfaces/afni/tests/test_auto_ZCutUp.py +++ b/nipype/interfaces/afni/tests/test_auto_ZCutUp.py @@ -4,28 +4,18 @@ def test_ZCutUp_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-1, - ), - keep=dict(argstr='-keep %s', ), - num_threads=dict( - nohash=True, - usedefault=True, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), + keep=dict(argstr="-keep %s",), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_file', - name_template='%s_zcutup', + name_source="in_file", + name_template="%s_zcutup", ), outputtype=dict(), ) @@ -34,8 +24,10 @@ def test_ZCutUp_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ZCutUp_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ZCutUp.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Zcat.py b/nipype/interfaces/afni/tests/test_auto_Zcat.py index b226cf4a3a..81251acfe8 100644 --- a/nipype/interfaces/afni/tests/test_auto_Zcat.py +++ b/nipype/interfaces/afni/tests/test_auto_Zcat.py @@ -4,46 +4,31 @@ def test_Zcat_inputs(): input_map = dict( - args=dict(argstr='%s', ), - datum=dict(argstr='-datum %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fscale=dict( - argstr='-fscale', - xor=['nscale'], - ), - in_files=dict( - argstr='%s', - copyfile=False, - mandatory=True, - position=-1, - ), - nscale=dict( - argstr='-nscale', - xor=['fscale'], - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + datum=dict(argstr="-datum %s",), + environ=dict(nohash=True, usedefault=True,), + fscale=dict(argstr="-fscale", xor=["nscale"],), + in_files=dict(argstr="%s", copyfile=False, mandatory=True, position=-1,), + nscale=dict(argstr="-nscale", xor=["fscale"],), + num_threads=dict(nohash=True, usedefault=True,), out_file=dict( - argstr='-prefix %s', + argstr="-prefix %s", extensions=None, - name_source='in_files', - name_template='%s_zcat', + name_source="in_files", + name_template="%s_zcat", ), outputtype=dict(), - verb=dict(argstr='-verb', ), + verb=dict(argstr="-verb",), ) inputs = Zcat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Zcat_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Zcat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_auto_Zeropad.py b/nipype/interfaces/afni/tests/test_auto_Zeropad.py index 3f9352c567..6f59445034 100644 --- a/nipype/interfaces/afni/tests/test_auto_Zeropad.py +++ b/nipype/interfaces/afni/tests/test_auto_Zeropad.py @@ -4,85 +4,40 @@ def test_Zeropad_inputs(): input_map = dict( - A=dict( - argstr='-A %i', - xor=['master'], - ), - AP=dict( - argstr='-AP %i', - xor=['master'], - ), - I=dict( - argstr='-I %i', - xor=['master'], - ), - IS=dict( - argstr='-IS %i', - xor=['master'], - ), - L=dict( - argstr='-L %i', - xor=['master'], - ), - P=dict( - argstr='-P %i', - xor=['master'], - ), - R=dict( - argstr='-R %i', - xor=['master'], - ), - RL=dict( - argstr='-RL %i', - xor=['master'], - ), - S=dict( - argstr='-S %i', - xor=['master'], - ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + A=dict(argstr="-A %i", xor=["master"],), + AP=dict(argstr="-AP %i", xor=["master"],), + I=dict(argstr="-I %i", xor=["master"],), + IS=dict(argstr="-IS %i", xor=["master"],), + L=dict(argstr="-L %i", xor=["master"],), + P=dict(argstr="-P %i", xor=["master"],), + R=dict(argstr="-R %i", xor=["master"],), + RL=dict(argstr="-RL %i", xor=["master"],), + S=dict(argstr="-S %i", xor=["master"],), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), in_files=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-1, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), master=dict( - argstr='-master %s', + argstr="-master %s", extensions=None, - xor=['I', 'S', 'A', 'P', 'L', 'R', 'z', 'RL', 'AP', 'IS', 'mm'], - ), - mm=dict( - argstr='-mm', - xor=['master'], - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - out_file=dict( - argstr='-prefix %s', - extensions=None, - name_template='zeropad', + xor=["I", "S", "A", "P", "L", "R", "z", "RL", "AP", "IS", "mm"], ), + mm=dict(argstr="-mm", xor=["master"],), + num_threads=dict(nohash=True, usedefault=True,), + out_file=dict(argstr="-prefix %s", extensions=None, name_template="zeropad",), outputtype=dict(), - z=dict( - argstr='-z %i', - xor=['master'], - ), + z=dict(argstr="-z %i", xor=["master"],), ) inputs = Zeropad.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Zeropad_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Zeropad.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/afni/tests/test_extra_Deconvolve.py b/nipype/interfaces/afni/tests/test_extra_Deconvolve.py index 93adc3b748..1efa14c66b 100644 --- a/nipype/interfaces/afni/tests/test_extra_Deconvolve.py +++ b/nipype/interfaces/afni/tests/test_extra_Deconvolve.py @@ -2,10 +2,11 @@ from ..model import Deconvolve + def test_x1dstop(): deconv = Deconvolve() - deconv.inputs.out_file = 'file.nii' - assert 'out_file' in deconv._list_outputs() + deconv.inputs.out_file = "file.nii" + assert "out_file" in deconv._list_outputs() deconv.inputs.x1D_stop = True - assert 'out_file' not in deconv._list_outputs() - assert 'cbucket' not in deconv._list_outputs() + assert "out_file" not in deconv._list_outputs() + assert "cbucket" not in deconv._list_outputs() diff --git a/nipype/interfaces/afni/utils.py b/nipype/interfaces/afni/utils.py index eb897ac110..61287b934e 100644 --- a/nipype/interfaces/afni/utils.py +++ b/nipype/interfaces/afni/utils.py @@ -14,40 +14,58 @@ import re import numpy as np -from ...utils.filemanip import (load_json, save_json, split_filename) -from ..base import (CommandLineInputSpec, CommandLine, Directory, TraitedSpec, - traits, isdefined, File, InputMultiObject, InputMultiPath, - Undefined, Str) +from ...utils.filemanip import load_json, save_json, split_filename +from ..base import ( + CommandLineInputSpec, + CommandLine, + Directory, + TraitedSpec, + traits, + isdefined, + File, + InputMultiObject, + InputMultiPath, + Undefined, + Str, +) from ...external.due import BibTeX -from .base import (AFNICommandBase, AFNICommand, AFNICommandInputSpec, - AFNICommandOutputSpec, AFNIPythonCommandInputSpec, - AFNIPythonCommand) +from .base import ( + AFNICommandBase, + AFNICommand, + AFNICommandInputSpec, + AFNICommandOutputSpec, + AFNIPythonCommandInputSpec, + AFNIPythonCommand, +) class ABoverlapInputSpec(AFNICommandInputSpec): in_file_a = File( - desc='input file A', - argstr='%s', + desc="input file A", + argstr="%s", position=-3, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) in_file_b = File( - desc='input file B', - argstr='%s', + desc="input file B", + argstr="%s", position=-2, mandatory=True, exists=True, - copyfile=False) - out_file = File( - desc='collect output to a file', argstr=' |& tee %s', position=-1) + copyfile=False, + ) + out_file = File(desc="collect output to a file", argstr=" |& tee %s", position=-1) no_automask = traits.Bool( - desc='consider input datasets as masks', argstr='-no_automask') + desc="consider input datasets as masks", argstr="-no_automask" + ) quiet = traits.Bool( - desc='be as quiet as possible (without being entirely mute)', - argstr='-quiet') + desc="be as quiet as possible (without being entirely mute)", argstr="-quiet" + ) verb = traits.Bool( - desc='print out some progress reports (to stderr)', argstr='-verb') + desc="print out some progress reports (to stderr)", argstr="-verb" + ) class ABoverlap(AFNICommand): @@ -71,50 +89,56 @@ class ABoverlap(AFNICommand): """ - _cmd = '3dABoverlap' + _cmd = "3dABoverlap" input_spec = ABoverlapInputSpec output_spec = AFNICommandOutputSpec class AFNItoNIFTIInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dAFNItoNIFTI', - argstr='%s', + desc="input file to 3dAFNItoNIFTI", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s.nii', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file', - hash_files=False) + name_template="%s.nii", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + hash_files=False, + ) float_ = traits.Bool( - desc='Force the output dataset to be 32-bit floats. This option ' - 'should be used when the input AFNI dataset has different float ' - 'scale factors for different sub-bricks, an option that ' - 'NIfTI-1.1 does not support.', - argstr='-float') + desc="Force the output dataset to be 32-bit floats. This option " + "should be used when the input AFNI dataset has different float " + "scale factors for different sub-bricks, an option that " + "NIfTI-1.1 does not support.", + argstr="-float", + ) pure = traits.Bool( - desc='Do NOT write an AFNI extension field into the output file. Only ' - 'use this option if needed. You can also use the \'nifti_tool\' ' - 'program to strip extensions from a file.', - argstr='-pure') + desc="Do NOT write an AFNI extension field into the output file. Only " + "use this option if needed. You can also use the 'nifti_tool' " + "program to strip extensions from a file.", + argstr="-pure", + ) denote = traits.Bool( - desc='When writing the AFNI extension field, remove text notes that ' - 'might contain subject identifying information.', - argstr='-denote') + desc="When writing the AFNI extension field, remove text notes that " + "might contain subject identifying information.", + argstr="-denote", + ) oldid = traits.Bool( - desc='Give the new dataset the input dataset' - 's AFNI ID code.', - argstr='-oldid', - xor=['newid']) + desc="Give the new dataset the input dataset" "s AFNI ID code.", + argstr="-oldid", + xor=["newid"], + ) newid = traits.Bool( - desc='Give the new dataset a new AFNI ID code, to distinguish it from ' - 'the input dataset.', - argstr='-newid', - xor=['oldid']) + desc="Give the new dataset a new AFNI ID code, to distinguish it from " + "the input dataset.", + argstr="-newid", + xor=["oldid"], + ) class AFNItoNIFTI(AFNICommand): @@ -137,14 +161,14 @@ class AFNItoNIFTI(AFNICommand): """ - _cmd = '3dAFNItoNIFTI' + _cmd = "3dAFNItoNIFTI" input_spec = AFNItoNIFTIInputSpec output_spec = AFNICommandOutputSpec def _overload_extension(self, value, name=None): path, base, ext = split_filename(value) - if ext.lower() not in ['.nii', '.nii.gz', '.1d', '.1D']: - ext += '.nii' + if ext.lower() not in [".nii", ".nii.gz", ".1d", ".1D"]: + ext += ".nii" return os.path.join(path, base + ext) def _gen_filename(self, name): @@ -155,20 +179,23 @@ class AutoboxInputSpec(AFNICommandInputSpec): in_file = File( exists=True, mandatory=True, - argstr='-input %s', - desc='input file', - copyfile=False) + argstr="-input %s", + desc="input file", + copyfile=False, + ) padding = traits.Int( - argstr='-npad %d', - desc='Number of extra voxels to pad on each side of box') + argstr="-npad %d", desc="Number of extra voxels to pad on each side of box" + ) out_file = File( - argstr='-prefix %s', name_source='in_file', name_template='%s_autobox') + argstr="-prefix %s", name_source="in_file", name_template="%s_autobox" + ) no_clustering = traits.Bool( - argstr='-noclust', - desc='Don\'t do any clustering to find box. Any non-zero voxel will ' - 'be preserved in the cropped volume. The default method uses ' - 'some clustering to find the cropping box, and will clip off ' - 'small isolated blobs.') + argstr="-noclust", + desc="Don't do any clustering to find box. Any non-zero voxel will " + "be preserved in the cropped volume. The default method uses " + "some clustering to find the cropping box, and will clip off " + "small isolated blobs.", + ) class AutoboxOutputSpec(TraitedSpec): # out_file not mandatory @@ -179,7 +206,7 @@ class AutoboxOutputSpec(TraitedSpec): # out_file not mandatory z_min = traits.Int() z_max = traits.Int() - out_file = File(desc='output file') + out_file = File(desc="output file") class Autobox(AFNICommand): @@ -202,17 +229,18 @@ class Autobox(AFNICommand): """ - _cmd = '3dAutobox' + _cmd = "3dAutobox" input_spec = AutoboxInputSpec output_spec = AutoboxOutputSpec def aggregate_outputs(self, runtime=None, needed_outputs=None): - outputs = super(Autobox, self).aggregate_outputs( - runtime, needed_outputs) - pattern = r'x=(?P-?\d+)\.\.(?P-?\d+) '\ - r'y=(?P-?\d+)\.\.(?P-?\d+) '\ - r'z=(?P-?\d+)\.\.(?P-?\d+)' - for line in runtime.stderr.split('\n'): + outputs = super(Autobox, self).aggregate_outputs(runtime, needed_outputs) + pattern = ( + r"x=(?P-?\d+)\.\.(?P-?\d+) " + r"y=(?P-?\d+)\.\.(?P-?\d+) " + r"z=(?P-?\d+)\.\.(?P-?\d+)" + ) + for line in runtime.stderr.split("\n"): m = re.search(pattern, line) if m: d = m.groupdict() @@ -222,40 +250,41 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): class BrickStatInputSpec(CommandLineInputSpec): in_file = File( - desc='input file to 3dmaskave', - argstr='%s', + desc="input file to 3dmaskave", + argstr="%s", position=-1, mandatory=True, - exists=True) + exists=True, + ) mask = File( - desc='-mask dset = use dset as mask to include/exclude voxels', - argstr='-mask %s', + desc="-mask dset = use dset as mask to include/exclude voxels", + argstr="-mask %s", position=2, - exists=True) + exists=True, + ) min = traits.Bool( - desc='print the minimum value in dataset', argstr='-min', position=1) + desc="print the minimum value in dataset", argstr="-min", position=1 + ) slow = traits.Bool( - desc='read the whole dataset to find the min and max values', - argstr='-slow') - max = traits.Bool( - desc='print the maximum value in the dataset', argstr='-max') - mean = traits.Bool( - desc='print the mean value in the dataset', argstr='-mean') - sum = traits.Bool( - desc='print the sum of values in the dataset', argstr='-sum') - var = traits.Bool(desc='print the variance in the dataset', argstr='-var') + desc="read the whole dataset to find the min and max values", argstr="-slow" + ) + max = traits.Bool(desc="print the maximum value in the dataset", argstr="-max") + mean = traits.Bool(desc="print the mean value in the dataset", argstr="-mean") + sum = traits.Bool(desc="print the sum of values in the dataset", argstr="-sum") + var = traits.Bool(desc="print the variance in the dataset", argstr="-var") percentile = traits.Tuple( traits.Float, traits.Float, traits.Float, - desc='p0 ps p1 write the percentile values starting ' - 'at p0% and ending at p1% at a step of ps%. ' - 'only one sub-brick is accepted.', - argstr='-percentile %.3f %.3f %.3f') + desc="p0 ps p1 write the percentile values starting " + "at p0% and ending at p1% at a step of ps%. " + "only one sub-brick is accepted.", + argstr="-percentile %.3f %.3f %.3f", + ) class BrickStatOutputSpec(TraitedSpec): - min_val = traits.Float(desc='output') + min_val = traits.Float(desc="output") class BrickStat(AFNICommandBase): @@ -278,7 +307,8 @@ class BrickStat(AFNICommandBase): >>> res = brickstat.run() # doctest: +SKIP """ - _cmd = '3dBrickStat' + + _cmd = "3dBrickStat" input_spec = BrickStatInputSpec output_spec = BrickStatOutputSpec @@ -286,16 +316,16 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() - outfile = os.path.join(os.getcwd(), 'stat_result.json') + outfile = os.path.join(os.getcwd(), "stat_result.json") if runtime is None: try: - min_val = load_json(outfile)['stat'] + min_val = load_json(outfile)["stat"] except IOError: return self.run().outputs else: min_val = [] - for line in runtime.stdout.split('\n'): + for line in runtime.stdout.split("\n"): if line: values = line.split() if len(values) > 1: @@ -315,51 +345,53 @@ class BucketInputSpec(AFNICommandInputSpec): in_file = traits.List( traits.Tuple( (File(exists=True, copyfile=False), traits.Str(argstr="'%s'")), - artstr="%s%s"), + artstr="%s%s", + ), position=-1, mandatory=True, argstr="%s", - desc='List of tuples of input datasets and subbrick selection strings' - 'as described in more detail in the following afni help string' - 'Input dataset specified using one of these forms:' - ' \'prefix+view\', \'prefix+view.HEAD\', or \'prefix+view.BRIK\'.' - 'You can also add a sub-brick selection list after the end of the' - 'dataset name. This allows only a subset of the sub-bricks to be' - 'included into the output (by default, all of the input dataset' - 'is copied into the output). A sub-brick selection list looks like' - 'one of the following forms:' - ' fred+orig[5] ==> use only sub-brick #5' - ' fred+orig[5,9,17] ==> use #5, #9, and #17' - ' fred+orig[5..8] or [5-8] ==> use #5, #6, #7, and #8' - ' fred+orig[5..13(2)] or [5-13(2)] ==> use #5, #7, #9, #11, and #13' - 'Sub-brick indexes start at 0. You can use the character \'$\'' - 'to indicate the last sub-brick in a dataset; for example, you' - 'can select every third sub-brick by using the selection list' - ' fred+orig[0..$(3)]' - 'N.B.: The sub-bricks are output in the order specified, which may' - ' not be the order in the original datasets. For example, using' - ' fred+orig[0..$(2),1..$(2)]' - ' will cause the sub-bricks in fred+orig to be output into the' - ' new dataset in an interleaved fashion. Using' - ' fred+orig[$..0]' - ' will reverse the order of the sub-bricks in the output.' - 'N.B.: Bucket datasets have multiple sub-bricks, but do NOT have' - ' a time dimension. You can input sub-bricks from a 3D+time dataset' - ' into a bucket dataset. You can use the \'3dinfo\' program to see' - ' how many sub-bricks a 3D+time or a bucket dataset contains.' - 'N.B.: In non-bucket functional datasets (like the \'fico\' datasets' - ' output by FIM, or the \'fitt\' datasets output by 3dttest), sub-brick' - ' [0] is the \'intensity\' and sub-brick [1] is the statistical parameter' - ' used as a threshold. Thus, to create a bucket dataset using the' - ' intensity from dataset A and the threshold from dataset B, and' - ' calling the output dataset C, you would type' - ' 3dbucket -prefix C -fbuc \'A+orig[0]\' -fbuc \'B+orig[1]\'' - 'WARNING: using this program, it is possible to create a dataset that' - ' has different basic datum types for different sub-bricks' - ' (e.g., shorts for brick 0, floats for brick 1).' - ' Do NOT do this! Very few AFNI programs will work correctly' - ' with such datasets!') - out_file = File(argstr='-prefix %s', name_template='buck') + desc="List of tuples of input datasets and subbrick selection strings" + "as described in more detail in the following afni help string" + "Input dataset specified using one of these forms:" + " 'prefix+view', 'prefix+view.HEAD', or 'prefix+view.BRIK'." + "You can also add a sub-brick selection list after the end of the" + "dataset name. This allows only a subset of the sub-bricks to be" + "included into the output (by default, all of the input dataset" + "is copied into the output). A sub-brick selection list looks like" + "one of the following forms:" + " fred+orig[5] ==> use only sub-brick #5" + " fred+orig[5,9,17] ==> use #5, #9, and #17" + " fred+orig[5..8] or [5-8] ==> use #5, #6, #7, and #8" + " fred+orig[5..13(2)] or [5-13(2)] ==> use #5, #7, #9, #11, and #13" + "Sub-brick indexes start at 0. You can use the character '$'" + "to indicate the last sub-brick in a dataset; for example, you" + "can select every third sub-brick by using the selection list" + " fred+orig[0..$(3)]" + "N.B.: The sub-bricks are output in the order specified, which may" + " not be the order in the original datasets. For example, using" + " fred+orig[0..$(2),1..$(2)]" + " will cause the sub-bricks in fred+orig to be output into the" + " new dataset in an interleaved fashion. Using" + " fred+orig[$..0]" + " will reverse the order of the sub-bricks in the output." + "N.B.: Bucket datasets have multiple sub-bricks, but do NOT have" + " a time dimension. You can input sub-bricks from a 3D+time dataset" + " into a bucket dataset. You can use the '3dinfo' program to see" + " how many sub-bricks a 3D+time or a bucket dataset contains." + "N.B.: In non-bucket functional datasets (like the 'fico' datasets" + " output by FIM, or the 'fitt' datasets output by 3dttest), sub-brick" + " [0] is the 'intensity' and sub-brick [1] is the statistical parameter" + " used as a threshold. Thus, to create a bucket dataset using the" + " intensity from dataset A and the threshold from dataset B, and" + " calling the output dataset C, you would type" + " 3dbucket -prefix C -fbuc 'A+orig[0]' -fbuc 'B+orig[1]'" + "WARNING: using this program, it is possible to create a dataset that" + " has different basic datum types for different sub-bricks" + " (e.g., shorts for brick 0, floats for brick 1)." + " Do NOT do this! Very few AFNI programs will work correctly" + " with such datasets!", + ) + out_file = File(argstr="-prefix %s", name_template="buck") class Bucket(AFNICommand): @@ -382,41 +414,42 @@ class Bucket(AFNICommand): """ - _cmd = '3dbucket' + _cmd = "3dbucket" input_spec = BucketInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, spec, value): - if name == 'in_file': - return spec.argstr % ( - ' '.join([i[0] + "'" + i[1] + "'" for i in value])) + if name == "in_file": + return spec.argstr % (" ".join([i[0] + "'" + i[1] + "'" for i in value])) return super(Bucket, self)._format_arg(name, spec, value) class CalcInputSpec(AFNICommandInputSpec): in_file_a = File( - desc='input file to 3dcalc', - argstr='-a %s', + desc="input file to 3dcalc", + argstr="-a %s", position=0, mandatory=True, - exists=True) + exists=True, + ) in_file_b = File( - desc='operand file to 3dcalc', argstr='-b %s', position=1, exists=True) + desc="operand file to 3dcalc", argstr="-b %s", position=1, exists=True + ) in_file_c = File( - desc='operand file to 3dcalc', argstr='-c %s', position=2, exists=True) + desc="operand file to 3dcalc", argstr="-c %s", position=2, exists=True + ) out_file = File( - name_template='%s_calc', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file_a') - expr = Str(desc='expr', argstr='-expr "%s"', position=3, mandatory=True) - start_idx = traits.Int( - desc='start index for in_file_a', requires=['stop_idx']) - stop_idx = traits.Int( - desc='stop index for in_file_a', requires=['start_idx']) - single_idx = traits.Int(desc='volume index for in_file_a') - overwrite = traits.Bool(desc='overwrite output', argstr='-overwrite') - other = File(desc='other options', argstr='') + name_template="%s_calc", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file_a", + ) + expr = Str(desc="expr", argstr='-expr "%s"', position=3, mandatory=True) + start_idx = traits.Int(desc="start index for in_file_a", requires=["stop_idx"]) + stop_idx = traits.Int(desc="stop index for in_file_a", requires=["start_idx"]) + single_idx = traits.Int(desc="volume index for in_file_a") + overwrite = traits.Bool(desc="overwrite output", argstr="-overwrite") + other = File(desc="other options", argstr="") class Calc(AFNICommand): @@ -451,82 +484,89 @@ class Calc(AFNICommand): """ - _cmd = '3dcalc' + _cmd = "3dcalc" input_spec = CalcInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, trait_spec, value): - if name == 'in_file_a': + if name == "in_file_a": arg = trait_spec.argstr % value if isdefined(self.inputs.start_idx): - arg += '[%d..%d]' % (self.inputs.start_idx, - self.inputs.stop_idx) + arg += "[%d..%d]" % (self.inputs.start_idx, self.inputs.stop_idx) if isdefined(self.inputs.single_idx): - arg += '[%d]' % (self.inputs.single_idx) + arg += "[%d]" % (self.inputs.single_idx) return arg return super(Calc, self)._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): """Skip the arguments without argstr metadata """ - return super( - Calc, self)._parse_inputs(skip=('start_idx', 'stop_idx', 'other')) + return super(Calc, self)._parse_inputs(skip=("start_idx", "stop_idx", "other")) class CatInputSpec(AFNICommandInputSpec): - in_files = traits.List( - File(exists=True), argstr="%s", mandatory=True, position=-2) + in_files = traits.List(File(exists=True), argstr="%s", mandatory=True, position=-2) out_file = File( - argstr='> %s', - value='catout.1d', + argstr="> %s", + value="catout.1d", usedefault=True, - desc='output (concatenated) file name', + desc="output (concatenated) file name", position=-1, - mandatory=True) + mandatory=True, + ) omitconst = traits.Bool( - desc='Omit columns that are identically constant from output.', - argstr='-nonconst') + desc="Omit columns that are identically constant from output.", + argstr="-nonconst", + ) keepfree = traits.Bool( - desc='Keep only columns that are marked as \'free\' in the ' - '3dAllineate header from \'-1Dparam_save\'. ' - 'If there is no such header, all columns are kept.', - argstr='-nonfixed') + desc="Keep only columns that are marked as 'free' in the " + "3dAllineate header from '-1Dparam_save'. " + "If there is no such header, all columns are kept.", + argstr="-nonfixed", + ) out_format = traits.Enum( - 'int', - 'nice', - 'double', - 'fint', - 'cint', - argstr='-form %s', - desc='specify data type for output. Valid types are \'int\', ' - '\'nice\', \'double\', \'fint\', and \'cint\'.', - xor=['out_int', 'out_nice', 'out_double', 'out_fint', 'out_cint']) + "int", + "nice", + "double", + "fint", + "cint", + argstr="-form %s", + desc="specify data type for output. Valid types are 'int', " + "'nice', 'double', 'fint', and 'cint'.", + xor=["out_int", "out_nice", "out_double", "out_fint", "out_cint"], + ) stack = traits.Bool( - desc='Stack the columns of the resultant matrix in the output.', - argstr='-stack') + desc="Stack the columns of the resultant matrix in the output.", argstr="-stack" + ) sel = traits.Str( - desc='Apply the same column/row selection string to all filenames ' - 'on the command line.', - argstr='-sel %s') + desc="Apply the same column/row selection string to all filenames " + "on the command line.", + argstr="-sel %s", + ) out_int = traits.Bool( - desc='specifiy int data type for output', - argstr='-i', - xor=['out_format', 'out_nice', 'out_double', 'out_fint', 'out_cint']) + desc="specifiy int data type for output", + argstr="-i", + xor=["out_format", "out_nice", "out_double", "out_fint", "out_cint"], + ) out_nice = traits.Bool( - desc='specifiy nice data type for output', - argstr='-n', - xor=['out_format', 'out_int', 'out_double', 'out_fint', 'out_cint']) + desc="specifiy nice data type for output", + argstr="-n", + xor=["out_format", "out_int", "out_double", "out_fint", "out_cint"], + ) out_double = traits.Bool( - desc='specifiy double data type for output', - argstr='-d', - xor=['out_format', 'out_nice', 'out_int', 'out_fint', 'out_cint']) + desc="specifiy double data type for output", + argstr="-d", + xor=["out_format", "out_nice", "out_int", "out_fint", "out_cint"], + ) out_fint = traits.Bool( - desc='specifiy int, rounded down, data type for output', - argstr='-f', - xor=['out_format', 'out_nice', 'out_double', 'out_int', 'out_cint']) + desc="specifiy int, rounded down, data type for output", + argstr="-f", + xor=["out_format", "out_nice", "out_double", "out_int", "out_cint"], + ) out_cint = traits.Bool( - desc='specifiy int, rounded up, data type for output', - xor=['out_format', 'out_nice', 'out_double', 'out_fint', 'out_int']) + desc="specifiy int, rounded up, data type for output", + xor=["out_format", "out_nice", "out_double", "out_fint", "out_int"], + ) class Cat(AFNICommand): @@ -551,7 +591,7 @@ class Cat(AFNICommand): """ - _cmd = '1dcat' + _cmd = "1dcat" input_spec = CatInputSpec output_spec = AFNICommandOutputSpec @@ -562,32 +602,37 @@ class CatMatvecInputSpec(AFNICommandInputSpec): desc="list of tuples of mfiles and associated opkeys", mandatory=True, argstr="%s", - position=-2) + position=-2, + ) out_file = File( argstr=" > %s", - name_template='%s_cat.aff12.1D', - name_source='in_file', + name_template="%s_cat.aff12.1D", + name_source="in_file", keep_extension=False, desc="File to write concattenated matvecs to", position=-1, - mandatory=True) + mandatory=True, + ) matrix = traits.Bool( desc="indicates that the resulting matrix will" "be written to outfile in the 'MATRIX(...)' format (FORM 3)." "This feature could be used, with clever scripting, to input" "a matrix directly on the command line to program 3dWarp.", argstr="-MATRIX", - xor=['oneline', 'fourxfour']) + xor=["oneline", "fourxfour"], + ) oneline = traits.Bool( desc="indicates that the resulting matrix" "will simply be written as 12 numbers on one line.", argstr="-ONELINE", - xor=['matrix', 'fourxfour']) + xor=["matrix", "fourxfour"], + ) fourxfour = traits.Bool( desc="Output matrix in augmented form (last row is 0 0 0 1)" "This option does not work with -MATRIX or -ONELINE", argstr="-4x4", - xor=['matrix', 'oneline']) + xor=["matrix", "oneline"], + ) class CatMatvec(AFNICommand): @@ -609,68 +654,75 @@ class CatMatvec(AFNICommand): """ - _cmd = 'cat_matvec' + _cmd = "cat_matvec" input_spec = CatMatvecInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, spec, value): - if name == 'in_file': + if name == "in_file": # Concatenate a series of filenames, with optional opkeys - return ' '.join('%s -%s' % (mfile, opkey) if opkey else mfile - for mfile, opkey in value) + return " ".join( + "%s -%s" % (mfile, opkey) if opkey else mfile for mfile, opkey in value + ) return super(CatMatvec, self)._format_arg(name, spec, value) class CenterMassInputSpec(CommandLineInputSpec): in_file = File( - desc='input file to 3dCM', - argstr='%s', + desc="input file to 3dCM", + argstr="%s", position=-2, mandatory=True, exists=True, - copyfile=True) + copyfile=True, + ) cm_file = File( - name_source='in_file', - name_template='%s_cm.out', + name_source="in_file", + name_template="%s_cm.out", hash_files=False, keep_extension=False, desc="File to write center of mass to", argstr="> %s", - position=-1) + position=-1, + ) mask_file = File( - desc='Only voxels with nonzero values in the provided mask will be ' - 'averaged.', - argstr='-mask %s', - exists=True) - automask = traits.Bool( - desc='Generate the mask automatically', argstr='-automask') + desc="Only voxels with nonzero values in the provided mask will be " + "averaged.", + argstr="-mask %s", + exists=True, + ) + automask = traits.Bool(desc="Generate the mask automatically", argstr="-automask") set_cm = traits.Tuple( (traits.Float(), traits.Float(), traits.Float()), - desc='After computing the center of mass, set the origin fields in ' - 'the header so that the center of mass will be at (x,y,z) in ' - 'DICOM coords.', - argstr='-set %f %f %f') + desc="After computing the center of mass, set the origin fields in " + "the header so that the center of mass will be at (x,y,z) in " + "DICOM coords.", + argstr="-set %f %f %f", + ) local_ijk = traits.Bool( - desc='Output values as (i,j,k) in local orienation', - argstr='-local_ijk') + desc="Output values as (i,j,k) in local orienation", argstr="-local_ijk" + ) roi_vals = traits.List( traits.Int, - desc='Compute center of mass for each blob with voxel value of v0, ' - 'v1, v2, etc. This option is handy for getting ROI centers of ' - 'mass.', - argstr='-roi_vals %s') + desc="Compute center of mass for each blob with voxel value of v0, " + "v1, v2, etc. This option is handy for getting ROI centers of " + "mass.", + argstr="-roi_vals %s", + ) all_rois = traits.Bool( - desc='Don\'t bother listing the values of ROIs you want: The program ' - 'will find all of them and produce a full list', - argstr='-all_rois') + desc="Don't bother listing the values of ROIs you want: The program " + "will find all of them and produce a full list", + argstr="-all_rois", + ) class CenterMassOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='output file') - cm_file = File(desc='file with the center of mass coordinates') + out_file = File(exists=True, desc="output file") + cm_file = File(desc="file with the center of mass coordinates") cm = traits.List( traits.Tuple(traits.Float(), traits.Float(), traits.Float()), - desc='center of mass') + desc="center of mass", + ) class CenterMass(AFNICommandBase): @@ -698,41 +750,53 @@ class CenterMass(AFNICommandBase): >>> res = 3dcm.run() # doctest: +SKIP """ - _cmd = '3dCM' + _cmd = "3dCM" input_spec = CenterMassInputSpec output_spec = CenterMassOutputSpec def _list_outputs(self): outputs = super(CenterMass, self)._list_outputs() - outputs['out_file'] = os.path.abspath(self.inputs.in_file) - outputs['cm_file'] = os.path.abspath(self.inputs.cm_file) - sout = np.loadtxt(outputs['cm_file'], ndmin=2) - outputs['cm'] = [tuple(s) for s in sout] + outputs["out_file"] = os.path.abspath(self.inputs.in_file) + outputs["cm_file"] = os.path.abspath(self.inputs.cm_file) + sout = np.loadtxt(outputs["cm_file"], ndmin=2) + outputs["cm"] = [tuple(s) for s in sout] return outputs class ConvertDsetInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to ConvertDset', - argstr='-input %s', + desc="input file to ConvertDset", + argstr="-input %s", position=-2, mandatory=True, - exists=True) + exists=True, + ) out_file = File( - desc='output file for ConvertDset', - argstr='-prefix %s', + desc="output file for ConvertDset", + argstr="-prefix %s", position=-1, - mandatory=True) + mandatory=True, + ) out_type = traits.Enum( - ('niml', 'niml_asc', 'niml_bi', - '1D', '1Dp', '1Dpt', - 'gii', 'gii_asc', 'gii_b64', 'gii_b64gz'), - desc='output type', - argstr='-o_%s', + ( + "niml", + "niml_asc", + "niml_bi", + "1D", + "1Dp", + "1Dpt", + "gii", + "gii_asc", + "gii_b64", + "gii_b64gz", + ), + desc="output type", + argstr="-o_%s", mandatory=True, - position=0) + position=0, + ) class ConvertDset(AFNICommandBase): @@ -754,31 +818,33 @@ class ConvertDset(AFNICommandBase): >>> res = convertdset.run() # doctest: +SKIP """ - _cmd = 'ConvertDset' + _cmd = "ConvertDset" input_spec = ConvertDsetInputSpec output_spec = AFNICommandOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class CopyInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dcopy', - argstr='%s', + desc="input file to 3dcopy", + argstr="%s", position=-2, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_copy', - desc='output image file name', - argstr='%s', + name_template="%s_copy", + desc="output image file name", + argstr="%s", position=-1, - name_source='in_file') - verbose = traits.Bool(desc='print progress reports', argstr='-verb') + name_source="in_file", + ) + verbose = traits.Bool(desc="print progress reports", argstr="-verb") class Copy(AFNICommand): @@ -819,7 +885,7 @@ class Copy(AFNICommand): """ - _cmd = '3dcopy' + _cmd = "3dcopy" input_spec = CopyInputSpec output_spec = AFNICommandOutputSpec @@ -829,47 +895,48 @@ class DotInputSpec(AFNICommandInputSpec): (File()), desc="list of input files, possibly with subbrick selectors", argstr="%s ...", - position=-2) - out_file = File( - desc='collect output to a file', argstr=' |& tee %s', position=-1) - mask = File(desc='Use this dataset as a mask', argstr='-mask %s') + position=-2, + ) + out_file = File(desc="collect output to a file", argstr=" |& tee %s", position=-1) + mask = File(desc="Use this dataset as a mask", argstr="-mask %s") mrange = traits.Tuple( (traits.Float(), traits.Float()), - desc='Means to further restrict the voxels from \'mset\' so that' - 'only those mask values within this range (inclusive) willbe used.', - argstr='-mrange %s %s') + desc="Means to further restrict the voxels from 'mset' so that" + "only those mask values within this range (inclusive) willbe used.", + argstr="-mrange %s %s", + ) demean = traits.Bool( - desc= - 'Remove the mean from each volume prior to computing the correlation', - argstr='-demean') + desc="Remove the mean from each volume prior to computing the correlation", + argstr="-demean", + ) docor = traits.Bool( - desc='Return the correlation coefficient (default).', argstr='-docor') - dodot = traits.Bool( - desc='Return the dot product (unscaled).', argstr='-dodot') + desc="Return the correlation coefficient (default).", argstr="-docor" + ) + dodot = traits.Bool(desc="Return the dot product (unscaled).", argstr="-dodot") docoef = traits.Bool( - desc= - 'Return the least square fit coefficients {{a,b}} so that dset2 is approximately a + b*dset1', - argstr='-docoef') + desc="Return the least square fit coefficients {{a,b}} so that dset2 is approximately a + b*dset1", + argstr="-docoef", + ) dosums = traits.Bool( - desc= - 'Return the 6 numbers xbar= ybar= <(x-xbar)^2> <(y-ybar)^2> <(x-xbar)(y-ybar)> and the correlation coefficient.', - argstr='-dosums') + desc="Return the 6 numbers xbar= ybar= <(x-xbar)^2> <(y-ybar)^2> <(x-xbar)(y-ybar)> and the correlation coefficient.", + argstr="-dosums", + ) dodice = traits.Bool( - desc='Return the Dice coefficient (the Sorensen-Dice index).', - argstr='-dodice') + desc="Return the Dice coefficient (the Sorensen-Dice index).", argstr="-dodice" + ) doeta2 = traits.Bool( - desc='Return eta-squared (Cohen, NeuroImage 2008).', argstr='-doeta2') + desc="Return eta-squared (Cohen, NeuroImage 2008).", argstr="-doeta2" + ) full = traits.Bool( - desc= - 'Compute the whole matrix. A waste of time, but handy for parsing.', - argstr='-full') + desc="Compute the whole matrix. A waste of time, but handy for parsing.", + argstr="-full", + ) show_labels = traits.Bool( - desc= - 'Print sub-brick labels to help identify what is being correlated. This option is useful when' - 'you have more than 2 sub-bricks at input.', - argstr='-show_labels') - upper = traits.Bool( - desc='Compute upper triangular matrix', argstr='-upper') + desc="Print sub-brick labels to help identify what is being correlated. This option is useful when" + "you have more than 2 sub-bricks at input.", + argstr="-show_labels", + ) + upper = traits.Bool(desc="Compute upper triangular matrix", argstr="-upper") class Dot(AFNICommand): @@ -890,53 +957,60 @@ class Dot(AFNICommand): >>> res = copy3d.run() # doctest: +SKIP """ - _cmd = '3dDot' + + _cmd = "3dDot" input_spec = DotInputSpec output_spec = AFNICommandOutputSpec class Edge3InputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dedge3', - argstr='-input %s', + desc="input file to 3dedge3", + argstr="-input %s", position=0, mandatory=True, exists=True, - copyfile=False) - out_file = File( - desc='output image file name', position=-1, argstr='-prefix %s') + copyfile=False, + ) + out_file = File(desc="output image file name", position=-1, argstr="-prefix %s") datum = traits.Enum( - 'byte', - 'short', - 'float', - argstr='-datum %s', - desc='specify data type for output. Valid types are \'byte\', ' - '\'short\' and \'float\'.') + "byte", + "short", + "float", + argstr="-datum %s", + desc="specify data type for output. Valid types are 'byte', " + "'short' and 'float'.", + ) fscale = traits.Bool( - desc='Force scaling of the output to the maximum integer range.', - argstr='-fscale', - xor=['gscale', 'nscale', 'scale_floats']) + desc="Force scaling of the output to the maximum integer range.", + argstr="-fscale", + xor=["gscale", "nscale", "scale_floats"], + ) gscale = traits.Bool( - desc='Same as \'-fscale\', but also forces each output sub-brick to ' - 'to get the same scaling factor.', - argstr='-gscale', - xor=['fscale', 'nscale', 'scale_floats']) + desc="Same as '-fscale', but also forces each output sub-brick to " + "to get the same scaling factor.", + argstr="-gscale", + xor=["fscale", "nscale", "scale_floats"], + ) nscale = traits.Bool( - desc='Don\'t do any scaling on output to byte or short datasets.', - argstr='-nscale', - xor=['fscale', 'gscale', 'scale_floats']) + desc="Don't do any scaling on output to byte or short datasets.", + argstr="-nscale", + xor=["fscale", "gscale", "scale_floats"], + ) scale_floats = traits.Float( - desc='Multiply input by VAL, but only if the input datum is ' - 'float. This is needed when the input dataset ' - 'has a small range, like 0 to 2.0 for instance. ' - 'With such a range, very few edges are detected due to ' - 'what I suspect to be truncation problems. ' - 'Multiplying such a dataset by 10000 fixes the problem ' - 'and the scaling is undone at the output.', - argstr='-scale_floats %f', - xor=['fscale', 'gscale', 'nscale']) + desc="Multiply input by VAL, but only if the input datum is " + "float. This is needed when the input dataset " + "has a small range, like 0 to 2.0 for instance. " + "With such a range, very few edges are detected due to " + "what I suspect to be truncation problems. " + "Multiplying such a dataset by 10000 fixes the problem " + "and the scaling is undone at the output.", + argstr="-scale_floats %f", + xor=["fscale", "gscale", "nscale"], + ) verbose = traits.Bool( - desc='Print out some information along the way.', argstr='-verbose') + desc="Print out some information along the way.", argstr="-verbose" + ) class Edge3(AFNICommand): @@ -982,35 +1056,37 @@ class Edge3(AFNICommand): """ - _cmd = '3dedge3' + _cmd = "3dedge3" input_spec = Edge3InputSpec output_spec = AFNICommandOutputSpec class EvalInputSpec(AFNICommandInputSpec): in_file_a = File( - desc='input file to 1deval', - argstr='-a %s', + desc="input file to 1deval", + argstr="-a %s", position=0, mandatory=True, - exists=True) + exists=True, + ) in_file_b = File( - desc='operand file to 1deval', argstr='-b %s', position=1, exists=True) + desc="operand file to 1deval", argstr="-b %s", position=1, exists=True + ) in_file_c = File( - desc='operand file to 1deval', argstr='-c %s', position=2, exists=True) + desc="operand file to 1deval", argstr="-c %s", position=2, exists=True + ) out_file = File( - name_template='%s_calc', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file_a') - out1D = traits.Bool(desc='output in 1D', argstr='-1D') - expr = Str(desc='expr', argstr='-expr "%s"', position=3, mandatory=True) - start_idx = traits.Int( - desc='start index for in_file_a', requires=['stop_idx']) - stop_idx = traits.Int( - desc='stop index for in_file_a', requires=['start_idx']) - single_idx = traits.Int(desc='volume index for in_file_a') - other = File(desc='other options', argstr='') + name_template="%s_calc", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file_a", + ) + out1D = traits.Bool(desc="output in 1D", argstr="-1D") + expr = Str(desc="expr", argstr='-expr "%s"', position=3, mandatory=True) + start_idx = traits.Int(desc="start index for in_file_a", requires=["stop_idx"]) + stop_idx = traits.Int(desc="stop index for in_file_a", requires=["start_idx"]) + single_idx = traits.Int(desc="volume index for in_file_a") + other = File(desc="other options", argstr="") class Eval(AFNICommand): @@ -1036,125 +1112,132 @@ class Eval(AFNICommand): """ - _cmd = '1deval' + _cmd = "1deval" input_spec = EvalInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, trait_spec, value): - if name == 'in_file_a': + if name == "in_file_a": arg = trait_spec.argstr % value if isdefined(self.inputs.start_idx): - arg += '[%d..%d]' % (self.inputs.start_idx, - self.inputs.stop_idx) + arg += "[%d..%d]" % (self.inputs.start_idx, self.inputs.stop_idx) if isdefined(self.inputs.single_idx): - arg += '[%d]' % (self.inputs.single_idx) + arg += "[%d]" % (self.inputs.single_idx) return arg return super(Eval, self)._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): """Skip the arguments without argstr metadata """ - return super( - Eval, self)._parse_inputs(skip=('start_idx', 'stop_idx', 'other')) + return super(Eval, self)._parse_inputs(skip=("start_idx", "stop_idx", "other")) class FWHMxInputSpec(CommandLineInputSpec): in_file = File( - desc='input dataset', argstr='-input %s', mandatory=True, exists=True) + desc="input dataset", argstr="-input %s", mandatory=True, exists=True + ) out_file = File( - argstr='> %s', - name_source='in_file', - name_template='%s_fwhmx.out', + argstr="> %s", + name_source="in_file", + name_template="%s_fwhmx.out", position=-1, keep_extension=False, - desc='output file') + desc="output file", + ) out_subbricks = File( - argstr='-out %s', - name_source='in_file', - name_template='%s_subbricks.out', + argstr="-out %s", + name_source="in_file", + name_template="%s_subbricks.out", keep_extension=False, - desc='output file listing the subbricks FWHM') + desc="output file listing the subbricks FWHM", + ) mask = File( - desc='use only voxels that are nonzero in mask', - argstr='-mask %s', - exists=True) + desc="use only voxels that are nonzero in mask", argstr="-mask %s", exists=True + ) automask = traits.Bool( False, usedefault=True, - argstr='-automask', - desc='compute a mask from THIS dataset, a la 3dAutomask') + argstr="-automask", + desc="compute a mask from THIS dataset, a la 3dAutomask", + ) detrend = traits.Either( traits.Bool(), traits.Int(), default=False, - argstr='-detrend', - xor=['demed'], + argstr="-detrend", + xor=["demed"], usedefault=True, - desc='instead of demed (0th order detrending), detrend to the ' - 'specified order. If order is not given, the program picks ' - 'q=NT/30. -detrend disables -demed, and includes -unif.') + desc="instead of demed (0th order detrending), detrend to the " + "specified order. If order is not given, the program picks " + "q=NT/30. -detrend disables -demed, and includes -unif.", + ) demed = traits.Bool( False, - argstr='-demed', - xor=['detrend'], - desc='If the input dataset has more than one sub-brick (e.g., has a ' - 'time axis), then subtract the median of each voxel\'s time ' - 'series before processing FWHM. This will tend to remove ' - 'intrinsic spatial structure and leave behind the noise.') + argstr="-demed", + xor=["detrend"], + desc="If the input dataset has more than one sub-brick (e.g., has a " + "time axis), then subtract the median of each voxel's time " + "series before processing FWHM. This will tend to remove " + "intrinsic spatial structure and leave behind the noise.", + ) unif = traits.Bool( False, - argstr='-unif', - desc='If the input dataset has more than one sub-brick, then ' - 'normalize each voxel\'s time series to have the same MAD before ' - 'processing FWHM.') + argstr="-unif", + desc="If the input dataset has more than one sub-brick, then " + "normalize each voxel's time series to have the same MAD before " + "processing FWHM.", + ) out_detrend = File( - argstr='-detprefix %s', - name_source='in_file', - name_template='%s_detrend', + argstr="-detprefix %s", + name_source="in_file", + name_template="%s_detrend", keep_extension=False, - desc='Save the detrended file into a dataset') + desc="Save the detrended file into a dataset", + ) geom = traits.Bool( - argstr='-geom', - xor=['arith'], - desc='if in_file has more than one sub-brick, compute the final ' - 'estimate as the geometric mean of the individual sub-brick FWHM ' - 'estimates') + argstr="-geom", + xor=["arith"], + desc="if in_file has more than one sub-brick, compute the final " + "estimate as the geometric mean of the individual sub-brick FWHM " + "estimates", + ) arith = traits.Bool( - argstr='-arith', - xor=['geom'], - desc='if in_file has more than one sub-brick, compute the final ' - 'estimate as the arithmetic mean of the individual sub-brick ' - 'FWHM estimates') + argstr="-arith", + xor=["geom"], + desc="if in_file has more than one sub-brick, compute the final " + "estimate as the arithmetic mean of the individual sub-brick " + "FWHM estimates", + ) combine = traits.Bool( - argstr='-combine', - desc='combine the final measurements along each axis') - compat = traits.Bool( - argstr='-compat', desc='be compatible with the older 3dFWHM') + argstr="-combine", desc="combine the final measurements along each axis" + ) + compat = traits.Bool(argstr="-compat", desc="be compatible with the older 3dFWHM") acf = traits.Either( traits.Bool(), File(), traits.Tuple(File(exists=True), traits.Float()), default=False, usedefault=True, - argstr='-acf', - desc='computes the spatial autocorrelation') + argstr="-acf", + desc="computes the spatial autocorrelation", + ) class FWHMxOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='output file') - out_subbricks = File(exists=True, desc='output file (subbricks)') - out_detrend = File(desc='output file, detrended') + out_file = File(exists=True, desc="output file") + out_subbricks = File(exists=True, desc="output file (subbricks)") + out_detrend = File(desc="output file, detrended") fwhm = traits.Either( traits.Tuple(traits.Float(), traits.Float(), traits.Float()), - traits.Tuple(traits.Float(), traits.Float(), traits.Float(), - traits.Float()), - desc='FWHM along each axis') + traits.Tuple(traits.Float(), traits.Float(), traits.Float(), traits.Float()), + desc="FWHM along each axis", + ) acf_param = traits.Either( traits.Tuple(traits.Float(), traits.Float(), traits.Float()), - traits.Tuple(traits.Float(), traits.Float(), traits.Float(), - traits.Float()), - desc='fitted ACF model parameters') - out_acf = File(exists=True, desc='output acf file') + traits.Tuple(traits.Float(), traits.Float(), traits.Float(), traits.Float()), + desc="fitted ACF model parameters", + ) + out_acf = File(exists=True, desc="output acf file") class FWHMx(AFNICommandBase): @@ -1259,20 +1342,22 @@ class FWHMx(AFNICommandBase): """ - _cmd = '3dFWHMx' + + _cmd = "3dFWHMx" input_spec = FWHMxInputSpec output_spec = FWHMxOutputSpec references_ = [ { - 'entry': - BibTeX('@article{CoxReynoldsTaylor2016,' - 'author={R.W. Cox, R.C. Reynolds, and P.A. Taylor},' - 'title={AFNI and clustering: false positive rates redux},' - 'journal={bioRxiv},' - 'year={2016},' - '}'), - 'tags': ['method'], + "entry": BibTeX( + "@article{CoxReynoldsTaylor2016," + "author={R.W. Cox, R.C. Reynolds, and P.A. Taylor}," + "title={AFNI and clustering: false positive rates redux}," + "journal={bioRxiv}," + "year={2016}," + "}" + ), + "tags": ["method"], }, ] _acf = True @@ -1281,20 +1366,20 @@ def _parse_inputs(self, skip=None): if not self.inputs.detrend: if skip is None: skip = [] - skip += ['out_detrend'] + skip += ["out_detrend"] return super(FWHMx, self)._parse_inputs(skip=skip) def _format_arg(self, name, trait_spec, value): - if name == 'detrend': + if name == "detrend": if isinstance(value, bool): if value: return trait_spec.argstr else: return None elif isinstance(value, int): - return trait_spec.argstr + ' %d' % value + return trait_spec.argstr + " %d" % value - if name == 'acf': + if name == "acf": if isinstance(value, bool): if value: return trait_spec.argstr @@ -1302,9 +1387,9 @@ def _format_arg(self, name, trait_spec, value): self._acf = False return None elif isinstance(value, tuple): - return trait_spec.argstr + ' %s %f' % value + return trait_spec.argstr + " %s %f" % value elif isinstance(value, (str, bytes)): - return trait_spec.argstr + ' ' + value + return trait_spec.argstr + " " + value return super(FWHMx, self)._format_arg(name, trait_spec, value) def _list_outputs(self): @@ -1312,28 +1397,28 @@ def _list_outputs(self): if self.inputs.detrend: fname, ext = op.splitext(self.inputs.in_file) - if '.gz' in ext: + if ".gz" in ext: _, ext2 = op.splitext(fname) ext = ext2 + ext - outputs['out_detrend'] += ext + outputs["out_detrend"] += ext else: - outputs['out_detrend'] = Undefined + outputs["out_detrend"] = Undefined - sout = np.loadtxt(outputs['out_file']) + sout = np.loadtxt(outputs["out_file"]) # handle newer versions of AFNI if sout.size == 8: - outputs['fwhm'] = tuple(sout[0, :]) + outputs["fwhm"] = tuple(sout[0, :]) else: - outputs['fwhm'] = tuple(sout) + outputs["fwhm"] = tuple(sout) if self._acf: assert sout.size == 8, "Wrong number of elements in %s" % str(sout) - outputs['acf_param'] = tuple(sout[1]) + outputs["acf_param"] = tuple(sout[1]) - outputs['out_acf'] = op.abspath('3dFWHMx.1D') + outputs["out_acf"] = op.abspath("3dFWHMx.1D") if isinstance(self.inputs.acf, (str, bytes)): - outputs['out_acf'] = op.abspath(self.inputs.acf) + outputs["out_acf"] = op.abspath(self.inputs.acf) return outputs @@ -1342,81 +1427,103 @@ class LocalBistatInputSpec(AFNICommandInputSpec): in_file1 = File( exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, - desc='Filename of the first image') + desc="Filename of the first image", + ) in_file2 = File( exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-1, - desc='Filename of the second image') + desc="Filename of the second image", + ) neighborhood = traits.Either( - traits.Tuple(traits.Enum('SPHERE', 'RHDD', 'TOHD'), traits.Float()), - traits.Tuple(traits.Enum('RECT'), traits.Tuple(traits.Float(), - traits.Float(), - traits.Float())), + traits.Tuple(traits.Enum("SPHERE", "RHDD", "TOHD"), traits.Float()), + traits.Tuple( + traits.Enum("RECT"), + traits.Tuple(traits.Float(), traits.Float(), traits.Float()), + ), mandatory=True, - desc='The region around each voxel that will be extracted for ' - 'the statistics calculation. Possible regions are: ' - '\'SPHERE\', \'RHDD\' (rhombic dodecahedron), \'TOHD\' ' - '(truncated octahedron) with a given radius in mm or ' - '\'RECT\' (rectangular block) with dimensions to specify in mm.', - argstr="-nbhd '%s(%s)'") - _stat_names = ['pearson', 'spearman', 'quadrant', 'mutinfo', 'normuti', - 'jointent', 'hellinger', 'crU', 'crM', 'crA', 'L2slope', - 'L1slope', 'num', 'ALL'] + desc="The region around each voxel that will be extracted for " + "the statistics calculation. Possible regions are: " + "'SPHERE', 'RHDD' (rhombic dodecahedron), 'TOHD' " + "(truncated octahedron) with a given radius in mm or " + "'RECT' (rectangular block) with dimensions to specify in mm.", + argstr="-nbhd '%s(%s)'", + ) + _stat_names = [ + "pearson", + "spearman", + "quadrant", + "mutinfo", + "normuti", + "jointent", + "hellinger", + "crU", + "crM", + "crA", + "L2slope", + "L1slope", + "num", + "ALL", + ] stat = InputMultiPath( traits.Enum(_stat_names), mandatory=True, - desc='statistics to compute. Possible names are :' - ' * pearson = Pearson correlation coefficient' - ' * spearman = Spearman correlation coefficient' - ' * quadrant = Quadrant correlation coefficient' - ' * mutinfo = Mutual Information' - ' * normuti = Normalized Mutual Information' - ' * jointent = Joint entropy' - ' * hellinger= Hellinger metric' - ' * crU = Correlation ratio (Unsymmetric)' - ' * crM = Correlation ratio (symmetrized by Multiplication)' - ' * crA = Correlation ratio (symmetrized by Addition)' - ' * L2slope = slope of least-squares (L2) linear regression of ' - ' the data from dataset1 vs. the dataset2 ' - ' (i.e., d2 = a + b*d1 ==> this is \'b\')' - ' * L1slope = slope of least-absolute-sum (L1) linear ' - ' regression of the data from dataset1 vs. ' - ' the dataset2' - ' * num = number of the values in the region: ' - ' with the use of -mask or -automask, ' - ' the size of the region around any given ' - ' voxel will vary; this option lets you ' - ' map that size.' - ' * ALL = all of the above, in that order' - 'More than one option can be used.', - argstr='-stat %s...') + desc="statistics to compute. Possible names are :" + " * pearson = Pearson correlation coefficient" + " * spearman = Spearman correlation coefficient" + " * quadrant = Quadrant correlation coefficient" + " * mutinfo = Mutual Information" + " * normuti = Normalized Mutual Information" + " * jointent = Joint entropy" + " * hellinger= Hellinger metric" + " * crU = Correlation ratio (Unsymmetric)" + " * crM = Correlation ratio (symmetrized by Multiplication)" + " * crA = Correlation ratio (symmetrized by Addition)" + " * L2slope = slope of least-squares (L2) linear regression of " + " the data from dataset1 vs. the dataset2 " + " (i.e., d2 = a + b*d1 ==> this is 'b')" + " * L1slope = slope of least-absolute-sum (L1) linear " + " regression of the data from dataset1 vs. " + " the dataset2" + " * num = number of the values in the region: " + " with the use of -mask or -automask, " + " the size of the region around any given " + " voxel will vary; this option lets you " + " map that size." + " * ALL = all of the above, in that order" + "More than one option can be used.", + argstr="-stat %s...", + ) mask_file = File( exists=True, - desc='mask image file name. Voxels NOT in the mask will not be used ' - 'in the neighborhood of any voxel. Also, a voxel NOT in the mask ' - 'will have its statistic(s) computed as zero (0).', - argstr='-mask %s') + desc="mask image file name. Voxels NOT in the mask will not be used " + "in the neighborhood of any voxel. Also, a voxel NOT in the mask " + "will have its statistic(s) computed as zero (0).", + argstr="-mask %s", + ) automask = traits.Bool( - desc='Compute the mask as in program 3dAutomask.', - argstr='-automask', - xor=['weight_file']) + desc="Compute the mask as in program 3dAutomask.", + argstr="-automask", + xor=["weight_file"], + ) weight_file = File( exists=True, - desc='File name of an image to use as a weight. Only applies to ' - '\'pearson\' statistics.', - argstr='-weight %s', - xor=['automask']) + desc="File name of an image to use as a weight. Only applies to " + "'pearson' statistics.", + argstr="-weight %s", + xor=["automask"], + ) out_file = File( - desc='Output dataset.', - argstr='-prefix %s', - name_source='in_file1', - name_template='%s_bistat', + desc="Output dataset.", + argstr="-prefix %s", + name_source="in_file1", + name_template="%s_bistat", keep_extension=True, - position=0) + position=0, + ) class LocalBistat(AFNICommand): @@ -1442,157 +1549,184 @@ class LocalBistat(AFNICommand): """ - _cmd = '3dLocalBistat' + _cmd = "3dLocalBistat" input_spec = LocalBistatInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, spec, value): - if name == 'neighborhood' and value[0] == 'RECT': - value = ('RECT', '%s,%s,%s' % value[1]) + if name == "neighborhood" and value[0] == "RECT": + value = ("RECT", "%s,%s,%s" % value[1]) return super(LocalBistat, self)._format_arg(name, spec, value) class LocalstatInputSpec(AFNICommandInputSpec): in_file = File( - exists=True, - mandatory=True, - argstr='%s', - position=-1, - desc='input dataset') + exists=True, mandatory=True, argstr="%s", position=-1, desc="input dataset" + ) neighborhood = traits.Either( - traits.Tuple(traits.Enum('SPHERE', 'RHDD', 'TOHD'), traits.Float()), - traits.Tuple(traits.Enum('RECT'), traits.Tuple(traits.Float(), - traits.Float(), - traits.Float())), + traits.Tuple(traits.Enum("SPHERE", "RHDD", "TOHD"), traits.Float()), + traits.Tuple( + traits.Enum("RECT"), + traits.Tuple(traits.Float(), traits.Float(), traits.Float()), + ), mandatory=True, - desc='The region around each voxel that will be extracted for ' - 'the statistics calculation. Possible regions are: ' - '\'SPHERE\', \'RHDD\' (rhombic dodecahedron), \'TOHD\' ' - '(truncated octahedron) with a given radius in mm or ' - '\'RECT\' (rectangular block) with dimensions to specify in mm.', - argstr="-nbhd '%s(%s)'") - _stat_names = ['mean', 'stdev', 'var', 'cvar', 'median', 'MAD', 'min', - 'max', 'absmax', 'num', 'sum', 'FWHM', 'FWHMbar', 'rank', - 'frank', 'P2skew', 'ALL', 'mMP2s', 'mmMP2s'] + desc="The region around each voxel that will be extracted for " + "the statistics calculation. Possible regions are: " + "'SPHERE', 'RHDD' (rhombic dodecahedron), 'TOHD' " + "(truncated octahedron) with a given radius in mm or " + "'RECT' (rectangular block) with dimensions to specify in mm.", + argstr="-nbhd '%s(%s)'", + ) + _stat_names = [ + "mean", + "stdev", + "var", + "cvar", + "median", + "MAD", + "min", + "max", + "absmax", + "num", + "sum", + "FWHM", + "FWHMbar", + "rank", + "frank", + "P2skew", + "ALL", + "mMP2s", + "mmMP2s", + ] stat = InputMultiObject( traits.Either( - traits.Enum(_stat_names), - traits.Tuple(traits.Enum('perc'), - traits.Tuple(traits.Float, traits.Float, traits.Float))), + traits.Enum(_stat_names), + traits.Tuple( + traits.Enum("perc"), + traits.Tuple(traits.Float, traits.Float, traits.Float), + ), + ), mandatory=True, - desc='statistics to compute. Possible names are :\n' - ' * mean = average of the values\n' - ' * stdev = standard deviation\n' - ' * var = variance (stdev*stdev)\n' - ' * cvar = coefficient of variation = stdev/fabs(mean)\n' - ' * median = median of the values\n' - ' * MAD = median absolute deviation\n' - ' * min = minimum\n' - ' * max = maximum\n' - ' * absmax = maximum of the absolute values\n' - ' * num = number of the values in the region:\n' - ' with the use of -mask or -automask,' - ' the size of the region around any given' - ' voxel will vary; this option lets you' - ' map that size. It may be useful if you' - ' plan to compute a t-statistic (say) from' - ' the mean and stdev outputs.\n' - ' * sum = sum of the values in the region\n' - ' * FWHM = compute (like 3dFWHM) image smoothness' - ' inside each voxel\'s neighborhood. Results' - ' are in 3 sub-bricks: FWHMx, FHWMy, and FWHMz.' - ' Places where an output is -1 are locations' - ' where the FWHM value could not be computed' - ' (e.g., outside the mask).\n' - ' * FWHMbar= Compute just the average of the 3 FWHM values' - ' (normally would NOT do this with FWHM also).\n' - ' * perc:P0:P1:Pstep = \n' - ' Compute percentiles between P0 and P1 with a ' - ' step of Pstep.\n' - ' Default P1 is equal to P0 and default P2 = 1\n' - ' * rank = rank of the voxel\'s intensity\n' - ' * frank = rank / number of voxels in neighborhood\n' - ' * P2skew = Pearson\'s second skewness coefficient' - ' 3 * (mean - median) / stdev\n' - ' * ALL = all of the above, in that order ' - ' (except for FWHMbar and perc).\n' - ' * mMP2s = Exactly the same output as:' - ' median, MAD, P2skew,' - ' but a little faster\n' - ' * mmMP2s = Exactly the same output as:' - ' mean, median, MAD, P2skew\n' - 'More than one option can be used.', - argstr='-stat %s...') + desc="statistics to compute. Possible names are :\n" + " * mean = average of the values\n" + " * stdev = standard deviation\n" + " * var = variance (stdev*stdev)\n" + " * cvar = coefficient of variation = stdev/fabs(mean)\n" + " * median = median of the values\n" + " * MAD = median absolute deviation\n" + " * min = minimum\n" + " * max = maximum\n" + " * absmax = maximum of the absolute values\n" + " * num = number of the values in the region:\n" + " with the use of -mask or -automask," + " the size of the region around any given" + " voxel will vary; this option lets you" + " map that size. It may be useful if you" + " plan to compute a t-statistic (say) from" + " the mean and stdev outputs.\n" + " * sum = sum of the values in the region\n" + " * FWHM = compute (like 3dFWHM) image smoothness" + " inside each voxel's neighborhood. Results" + " are in 3 sub-bricks: FWHMx, FHWMy, and FWHMz." + " Places where an output is -1 are locations" + " where the FWHM value could not be computed" + " (e.g., outside the mask).\n" + " * FWHMbar= Compute just the average of the 3 FWHM values" + " (normally would NOT do this with FWHM also).\n" + " * perc:P0:P1:Pstep = \n" + " Compute percentiles between P0 and P1 with a " + " step of Pstep.\n" + " Default P1 is equal to P0 and default P2 = 1\n" + " * rank = rank of the voxel's intensity\n" + " * frank = rank / number of voxels in neighborhood\n" + " * P2skew = Pearson's second skewness coefficient" + " 3 * (mean - median) / stdev\n" + " * ALL = all of the above, in that order " + " (except for FWHMbar and perc).\n" + " * mMP2s = Exactly the same output as:" + " median, MAD, P2skew," + " but a little faster\n" + " * mmMP2s = Exactly the same output as:" + " mean, median, MAD, P2skew\n" + "More than one option can be used.", + argstr="-stat %s...", + ) mask_file = File( exists=True, - desc='Mask image file name. Voxels NOT in the mask will not be used ' - 'in the neighborhood of any voxel. Also, a voxel NOT in the ' - 'mask will have its statistic(s) computed as zero (0) unless ' - 'the parameter \'nonmask\' is set to true.', - argstr='-mask %s') + desc="Mask image file name. Voxels NOT in the mask will not be used " + "in the neighborhood of any voxel. Also, a voxel NOT in the " + "mask will have its statistic(s) computed as zero (0) unless " + "the parameter 'nonmask' is set to true.", + argstr="-mask %s", + ) automask = traits.Bool( - desc='Compute the mask as in program 3dAutomask.', - argstr='-automask') + desc="Compute the mask as in program 3dAutomask.", argstr="-automask" + ) nonmask = traits.Bool( - desc='Voxels not in the mask WILL have their local statistics ' - 'computed from all voxels in their neighborhood that ARE in ' - 'the mask.\n' - ' * For instance, this option can be used to compute the ' - ' average local white matter time series, even at non-WM ' - ' voxels.', - argstr='-use_nonmask') + desc="Voxels not in the mask WILL have their local statistics " + "computed from all voxels in their neighborhood that ARE in " + "the mask.\n" + " * For instance, this option can be used to compute the " + " average local white matter time series, even at non-WM " + " voxels.", + argstr="-use_nonmask", + ) reduce_grid = traits.Either( traits.Float, traits.Tuple(traits.Float, traits.Float, traits.Float), - argstr='-reduce_grid %s', - xor=['reduce_restore_grid', 'reduce_max_vox'], - desc='Compute output on a grid that is reduced by the specified ' - 'factors. If a single value is passed, output is resampled ' - 'to the specified isotropic grid. Otherwise, the 3 inputs ' - 'describe the reduction in the X, Y, and Z directions. This ' - 'option speeds up computations at the expense of resolution. ' - 'It should only be used when the nbhd is quite large with ' - 'respect to the input\'s resolution, and the resultant stats ' - 'are expected to be smooth.') + argstr="-reduce_grid %s", + xor=["reduce_restore_grid", "reduce_max_vox"], + desc="Compute output on a grid that is reduced by the specified " + "factors. If a single value is passed, output is resampled " + "to the specified isotropic grid. Otherwise, the 3 inputs " + "describe the reduction in the X, Y, and Z directions. This " + "option speeds up computations at the expense of resolution. " + "It should only be used when the nbhd is quite large with " + "respect to the input's resolution, and the resultant stats " + "are expected to be smooth.", + ) reduce_restore_grid = traits.Either( traits.Float, traits.Tuple(traits.Float, traits.Float, traits.Float), - argstr='-reduce_restore_grid %s', - xor=['reduce_max_vox', 'reduce_grid'], - desc='Like reduce_grid, but also resample output back to input' - 'grid.') + argstr="-reduce_restore_grid %s", + xor=["reduce_max_vox", "reduce_grid"], + desc="Like reduce_grid, but also resample output back to input" "grid.", + ) reduce_max_vox = traits.Float( - argstr='-reduce_max_vox %s', - xor=['reduce_restore_grid', 'reduce_grid'], - desc='Like reduce_restore_grid, but automatically set Rx Ry Rz so' - 'that the computation grid is at a resolution of nbhd/MAX_VOX' - 'voxels.') + argstr="-reduce_max_vox %s", + xor=["reduce_restore_grid", "reduce_grid"], + desc="Like reduce_restore_grid, but automatically set Rx Ry Rz so" + "that the computation grid is at a resolution of nbhd/MAX_VOX" + "voxels.", + ) grid_rmode = traits.Enum( - 'NN', - 'Li', - 'Cu', - 'Bk', - argstr='-grid_rmode %s', - requires=['reduce_restore_grid'], - desc='Interpolant to use when resampling the output with the' - 'reduce_restore_grid option. The resampling method string ' - 'RESAM should come from the set {\'NN\', \'Li\', \'Cu\', ' - '\'Bk\'}. These stand for \'Nearest Neighbor\', \'Linear\', ' - '\'Cubic\', and \'Blocky\' interpolation, respectively.') + "NN", + "Li", + "Cu", + "Bk", + argstr="-grid_rmode %s", + requires=["reduce_restore_grid"], + desc="Interpolant to use when resampling the output with the" + "reduce_restore_grid option. The resampling method string " + "RESAM should come from the set {'NN', 'Li', 'Cu', " + "'Bk'}. These stand for 'Nearest Neighbor', 'Linear', " + "'Cubic', and 'Blocky' interpolation, respectively.", + ) quiet = traits.Bool( - argstr='-quiet', - desc='Stop the highly informative progress reports.') + argstr="-quiet", desc="Stop the highly informative progress reports." + ) overwrite = traits.Bool( - desc='overwrite output file if it already exists', - argstr='-overwrite') + desc="overwrite output file if it already exists", argstr="-overwrite" + ) out_file = File( - desc='Output dataset.', - argstr='-prefix %s', - name_source='in_file', - name_template='%s_localstat', + desc="Output dataset.", + argstr="-prefix %s", + name_source="in_file", + name_template="%s_localstat", keep_extension=True, - position=0) + position=0, + ) class Localstat(AFNICommand): @@ -1618,18 +1752,19 @@ class Localstat(AFNICommand): >>> res = localstat.run() # doctest: +SKIP """ - _cmd = '3dLocalstat' + + _cmd = "3dLocalstat" input_spec = LocalstatInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, spec, value): - if name == 'neighborhood' and value[0] == 'RECT': - value = ('RECT', '%s,%s,%s' % value[1]) - if name == 'stat': - value = ['perc:%s:%s:%s' % v[1] if len(v) == 2 else v for v in value] - if name == 'reduce_grid' or name == 'reduce_restore_grid': + if name == "neighborhood" and value[0] == "RECT": + value = ("RECT", "%s,%s,%s" % value[1]) + if name == "stat": + value = ["perc:%s:%s:%s" % v[1] if len(v) == 2 else v for v in value] + if name == "reduce_grid" or name == "reduce_restore_grid": if len(value) == 3: - value = '%s %s %s' % value + value = "%s %s %s" % value return super(Localstat, self)._format_arg(name, spec, value) @@ -1637,61 +1772,68 @@ def _format_arg(self, name, spec, value): class MaskToolInputSpec(AFNICommandInputSpec): in_file = InputMultiPath( File(exists=True), - desc='input file or files to 3dmask_tool', - argstr='-input %s', + desc="input file or files to 3dmask_tool", + argstr="-input %s", position=-1, mandatory=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_mask', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_mask", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) count = traits.Bool( - desc='Instead of created a binary 0/1 mask dataset, create one with ' - 'counts of voxel overlap, i.e., each voxel will contain the ' - 'number of masks that it is set in.', - argstr='-count', - position=2) + desc="Instead of created a binary 0/1 mask dataset, create one with " + "counts of voxel overlap, i.e., each voxel will contain the " + "number of masks that it is set in.", + argstr="-count", + position=2, + ) datum = traits.Enum( - 'byte', - 'short', - 'float', - argstr='-datum %s', - desc='specify data type for output. Valid types are \'byte\', ' - '\'short\' and \'float\'.') + "byte", + "short", + "float", + argstr="-datum %s", + desc="specify data type for output. Valid types are 'byte', " + "'short' and 'float'.", + ) dilate_inputs = Str( - desc='Use this option to dilate and/or erode datasets as they are ' - 'read. ex. \'5 -5\' to dilate and erode 5 times', - argstr='-dilate_inputs %s') + desc="Use this option to dilate and/or erode datasets as they are " + "read. ex. '5 -5' to dilate and erode 5 times", + argstr="-dilate_inputs %s", + ) dilate_results = Str( - desc='dilate and/or erode combined mask at the given levels.', - argstr='-dilate_results %s') + desc="dilate and/or erode combined mask at the given levels.", + argstr="-dilate_results %s", + ) frac = traits.Float( - desc='When combining masks (across datasets and sub-bricks), use ' - 'this option to restrict the result to a certain fraction of the ' - 'set of volumes', - argstr='-frac %s') - inter = traits.Bool( - desc='intersection, this means -frac 1.0', argstr='-inter') - union = traits.Bool(desc='union, this means -frac 0', argstr='-union') + desc="When combining masks (across datasets and sub-bricks), use " + "this option to restrict the result to a certain fraction of the " + "set of volumes", + argstr="-frac %s", + ) + inter = traits.Bool(desc="intersection, this means -frac 1.0", argstr="-inter") + union = traits.Bool(desc="union, this means -frac 0", argstr="-union") fill_holes = traits.Bool( - desc='This option can be used to fill holes in the resulting mask, ' - 'i.e. after all other processing has been done.', - argstr='-fill_holes') + desc="This option can be used to fill holes in the resulting mask, " + "i.e. after all other processing has been done.", + argstr="-fill_holes", + ) fill_dirs = Str( - desc='fill holes only in the given directions. This option is for use ' - 'with -fill holes. should be a single string that specifies ' - '1-3 of the axes using {x,y,z} labels (i.e. dataset axis order), ' - 'or using the labels in {R,L,A,P,I,S}.', - argstr='-fill_dirs %s', - requires=['fill_holes']) - verbose = traits.Int( - desc='specify verbosity level, for 0 to 3', argstr='-verb %s') + desc="fill holes only in the given directions. This option is for use " + "with -fill holes. should be a single string that specifies " + "1-3 of the axes using {x,y,z} labels (i.e. dataset axis order), " + "or using the labels in {R,L,A,P,I,S}.", + argstr="-fill_dirs %s", + requires=["fill_holes"], + ) + verbose = traits.Int(desc="specify verbosity level, for 0 to 3", argstr="-verb %s") class MaskToolOutputSpec(TraitedSpec): - out_file = File(desc='mask file', exists=True) + out_file = File(desc="mask file", exists=True) class MaskTool(AFNICommand): @@ -1713,27 +1855,31 @@ class MaskTool(AFNICommand): """ - _cmd = '3dmask_tool' + _cmd = "3dmask_tool" input_spec = MaskToolInputSpec output_spec = MaskToolOutputSpec class MergeInputSpec(AFNICommandInputSpec): in_files = InputMultiPath( - File(desc='input file to 3dmerge', exists=True), - argstr='%s', + File(desc="input file to 3dmerge", exists=True), + argstr="%s", position=-1, mandatory=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_merge', - desc='output image file name', - argstr='-prefix %s', - name_source='in_files') + name_template="%s_merge", + desc="output image file name", + argstr="-prefix %s", + name_source="in_files", + ) doall = traits.Bool( - desc='apply options to all sub-bricks in dataset', argstr='-doall') + desc="apply options to all sub-bricks in dataset", argstr="-doall" + ) blurfwhm = traits.Int( - desc='FWHM blur value (mm)', argstr='-1blur_fwhm %d', units='mm') + desc="FWHM blur value (mm)", argstr="-1blur_fwhm %d", units="mm" + ) class Merge(AFNICommand): @@ -1757,29 +1903,32 @@ class Merge(AFNICommand): """ - _cmd = '3dmerge' + _cmd = "3dmerge" input_spec = MergeInputSpec output_spec = AFNICommandOutputSpec class NotesInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dNotes', - argstr='%s', + desc="input file to 3dNotes", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) - add = Str(desc='note to add', argstr='-a "%s"') + copyfile=False, + ) + add = Str(desc="note to add", argstr='-a "%s"') add_history = Str( - desc='note to add to history', argstr='-h "%s"', xor=['rep_history']) + desc="note to add to history", argstr='-h "%s"', xor=["rep_history"] + ) rep_history = Str( - desc='note with which to replace history', + desc="note with which to replace history", argstr='-HH "%s"', - xor=['add_history']) - delete = traits.Int(desc='delete note number num', argstr='-d %d') - ses = traits.Bool(desc='print to stdout the expanded notes', argstr='-ses') - out_file = File(desc='output image file name', argstr='%s') + xor=["add_history"], + ) + delete = traits.Int(desc="delete note number num", argstr="-d %d") + ses = traits.Bool(desc="print to stdout the expanded notes", argstr="-ses") + out_file = File(desc="output image file name", argstr="%s") class Notes(CommandLine): @@ -1801,13 +1950,13 @@ class Notes(CommandLine): >>> res = notes.run() # doctest: +SKIP """ - _cmd = '3dNotes' + _cmd = "3dNotes" input_spec = NotesInputSpec output_spec = AFNICommandOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.in_file) + outputs["out_file"] = os.path.abspath(self.inputs.in_file) return outputs @@ -1816,24 +1965,27 @@ class NwarpAdjustInputSpec(AFNICommandInputSpec): File(exists=True), minlen=5, mandatory=True, - argstr='-nwarp %s', - desc='List of input 3D warp datasets') + argstr="-nwarp %s", + desc="List of input 3D warp datasets", + ) in_files = InputMultiPath( File(exists=True), minlen=5, - argstr='-source %s', - desc='List of input 3D datasets to be warped by the adjusted warp ' - 'datasets. There must be exactly as many of these datasets as ' - 'there are input warps.') + argstr="-source %s", + desc="List of input 3D datasets to be warped by the adjusted warp " + "datasets. There must be exactly as many of these datasets as " + "there are input warps.", + ) out_file = File( - desc='Output mean dataset, only needed if in_files are also given. ' - 'The output dataset will be on the common grid shared by the ' - 'source datasets.', - argstr='-prefix %s', - name_source='in_files', - name_template='%s_NwarpAdjust', + desc="Output mean dataset, only needed if in_files are also given. " + "The output dataset will be on the common grid shared by the " + "source datasets.", + argstr="-prefix %s", + name_source="in_files", + name_template="%s_NwarpAdjust", keep_extension=True, - requires=['in_files']) + requires=["in_files"], + ) class NwarpAdjust(AFNICommandBase): @@ -1858,7 +2010,8 @@ class NwarpAdjust(AFNICommandBase): >>> res = adjust.run() # doctest: +SKIP """ - _cmd = '3dNwarpAdjust' + + _cmd = "3dNwarpAdjust" input_spec = NwarpAdjustInputSpec output_spec = AFNICommandOutputSpec @@ -1866,7 +2019,7 @@ def _parse_inputs(self, skip=None): if not self.inputs.in_files: if skip is None: skip = [] - skip += ['out_file'] + skip += ["out_file"] return super(NwarpAdjust, self)._parse_inputs(skip=skip) def _list_outputs(self): @@ -1874,15 +2027,16 @@ def _list_outputs(self): if self.inputs.in_files: if self.inputs.out_file: - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) else: basename = os.path.basename(self.inputs.in_files[0]) basename_noext, ext = op.splitext(basename) - if '.gz' in ext: + if ".gz" in ext: basename_noext, ext2 = op.splitext(basename_noext) ext = ext2 + ext - outputs['out_file'] = os.path.abspath( - basename_noext + '_NwarpAdjust' + ext) + outputs["out_file"] = os.path.abspath( + basename_noext + "_NwarpAdjust" + ext + ) return outputs @@ -1891,62 +2045,67 @@ class NwarpApplyInputSpec(CommandLineInputSpec): File(exists=True), traits.List(File(exists=True)), mandatory=True, - argstr='-source %s', - desc='the name of the dataset to be warped ' - 'can be multiple datasets') + argstr="-source %s", + desc="the name of the dataset to be warped " "can be multiple datasets", + ) warp = traits.String( - desc='the name of the warp dataset. ' - 'multiple warps can be concatenated (make sure they exist)', - argstr='-nwarp %s', - mandatory=True) + desc="the name of the warp dataset. " + "multiple warps can be concatenated (make sure they exist)", + argstr="-nwarp %s", + mandatory=True, + ) inv_warp = traits.Bool( - desc='After the warp specified in \'-nwarp\' is computed, invert it', - argstr='-iwarp') + desc="After the warp specified in '-nwarp' is computed, invert it", + argstr="-iwarp", + ) master = File( exists=True, - desc='the name of the master dataset, which defines the output grid', - argstr='-master %s') + desc="the name of the master dataset, which defines the output grid", + argstr="-master %s", + ) interp = traits.Enum( - 'wsinc5', - 'NN', - 'nearestneighbour', - 'nearestneighbor', - 'linear', - 'trilinear', - 'cubic', - 'tricubic', - 'quintic', - 'triquintic', - desc='defines interpolation method to use during warp', - argstr='-interp %s', - usedefault=True) + "wsinc5", + "NN", + "nearestneighbour", + "nearestneighbor", + "linear", + "trilinear", + "cubic", + "tricubic", + "quintic", + "triquintic", + desc="defines interpolation method to use during warp", + argstr="-interp %s", + usedefault=True, + ) ainterp = traits.Enum( - 'NN', - 'nearestneighbour', - 'nearestneighbor', - 'linear', - 'trilinear', - 'cubic', - 'tricubic', - 'quintic', - 'triquintic', - 'wsinc5', - desc='specify a different interpolation method than might ' - 'be used for the warp', - argstr='-ainterp %s') + "NN", + "nearestneighbour", + "nearestneighbor", + "linear", + "trilinear", + "cubic", + "tricubic", + "quintic", + "triquintic", + "wsinc5", + desc="specify a different interpolation method than might " + "be used for the warp", + argstr="-ainterp %s", + ) out_file = File( - name_template='%s_Nwarp', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_Nwarp", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) short = traits.Bool( - desc='Write output dataset using 16-bit short integers, rather than ' - 'the usual 32-bit floats.', - argstr='-short') - quiet = traits.Bool( - desc='don\'t be verbose :(', argstr='-quiet', xor=['verb']) - verb = traits.Bool( - desc='be extra verbose :)', argstr='-verb', xor=['quiet']) + desc="Write output dataset using 16-bit short integers, rather than " + "the usual 32-bit floats.", + argstr="-short", + ) + quiet = traits.Bool(desc="don't be verbose :(", argstr="-quiet", xor=["verb"]) + verb = traits.Bool(desc="be extra verbose :)", argstr="-verb", xor=["quiet"]) class NwarpApply(AFNICommandBase): @@ -1970,46 +2129,49 @@ class NwarpApply(AFNICommandBase): >>> res = nwarp.run() # doctest: +SKIP """ - _cmd = '3dNwarpApply' + + _cmd = "3dNwarpApply" input_spec = NwarpApplyInputSpec output_spec = AFNICommandOutputSpec class NwarpCatInputSpec(AFNICommandInputSpec): in_files = traits.List( - traits.Either(File(), - traits.Tuple( - traits.Enum('IDENT', 'INV', 'SQRT', 'SQRTINV'), - File())), + traits.Either( + File(), traits.Tuple(traits.Enum("IDENT", "INV", "SQRT", "SQRTINV"), File()) + ), desc="list of tuples of 3D warps and associated functions", mandatory=True, argstr="%s", - position=-1) + position=-1, + ) space = traits.String( - desc='string to attach to the output dataset as its atlas space ' - 'marker.', - argstr='-space %s') - inv_warp = traits.Bool( - desc='invert the final warp before output', argstr='-iwarp') + desc="string to attach to the output dataset as its atlas space " "marker.", + argstr="-space %s", + ) + inv_warp = traits.Bool(desc="invert the final warp before output", argstr="-iwarp") interp = traits.Enum( - 'wsinc5', - 'linear', - 'quintic', - desc='specify a different interpolation method than might ' - 'be used for the warp', - argstr='-interp %s', - usedefault=True) + "wsinc5", + "linear", + "quintic", + desc="specify a different interpolation method than might " + "be used for the warp", + argstr="-interp %s", + usedefault=True, + ) expad = traits.Int( - desc='Pad the nonlinear warps by the given number of voxels voxels in ' - 'all directions. The warp displacements are extended by linear ' - 'extrapolation from the faces of the input grid..', - argstr='-expad %d') + desc="Pad the nonlinear warps by the given number of voxels voxels in " + "all directions. The warp displacements are extended by linear " + "extrapolation from the faces of the input grid..", + argstr="-expad %d", + ) out_file = File( - name_template='%s_NwarpCat', - desc='output image file name', - argstr='-prefix %s', - name_source='in_files') - verb = traits.Bool(desc='be verbose', argstr='-verb') + name_template="%s_NwarpCat", + desc="output image file name", + argstr="-prefix %s", + name_source="in_files", + ) + verb = traits.Bool(desc="be verbose", argstr="-verb") class NwarpCat(AFNICommand): @@ -2064,91 +2226,99 @@ class NwarpCat(AFNICommand): >>> res = nwarpcat.run() # doctest: +SKIP """ - _cmd = '3dNwarpCat' + + _cmd = "3dNwarpCat" input_spec = NwarpCatInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, spec, value): - if name == 'in_files': - return spec.argstr % (' '.join([ - "'" + v[0] + "(" + v[1] + ")'" if isinstance(v, tuple) else v - for v in value - ])) + if name == "in_files": + return spec.argstr % ( + " ".join( + [ + "'" + v[0] + "(" + v[1] + ")'" if isinstance(v, tuple) else v + for v in value + ] + ) + ) return super(NwarpCat, self)._format_arg(name, spec, value) def _gen_filename(self, name): - if name == 'out_file': - return self._gen_fname( - self.inputs.in_files[0][0], suffix='_NwarpCat') + if name == "out_file": + return self._gen_fname(self.inputs.in_files[0][0], suffix="_NwarpCat") def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_file): - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) else: - outputs['out_file'] = os.path.abspath( + outputs["out_file"] = os.path.abspath( self._gen_fname( - self.inputs.in_files[0], - suffix='_NwarpCat+tlrc', - ext='.HEAD')) + self.inputs.in_files[0], suffix="_NwarpCat+tlrc", ext=".HEAD" + ) + ) return outputs class OneDToolPyInputSpec(AFNIPythonCommandInputSpec): in_file = File( - desc='input file to OneDTool', - argstr='-infile %s', - mandatory=True, - exists=True) + desc="input file to OneDTool", argstr="-infile %s", mandatory=True, exists=True + ) set_nruns = traits.Int( - desc='treat the input data as if it has nruns', argstr='-set_nruns %d') + desc="treat the input data as if it has nruns", argstr="-set_nruns %d" + ) derivative = traits.Bool( - desc= - 'take the temporal derivative of each vector (done as first backward difference)', - argstr='-derivative') + desc="take the temporal derivative of each vector (done as first backward difference)", + argstr="-derivative", + ) demean = traits.Bool( - desc='demean each run (new mean of each run = 0.0)', argstr='-demean') + desc="demean each run (new mean of each run = 0.0)", argstr="-demean" + ) out_file = File( - desc='write the current 1D data to FILE', - argstr='-write %s', - xor=['show_cormat_warnings']) + desc="write the current 1D data to FILE", + argstr="-write %s", + xor=["show_cormat_warnings"], + ) show_censor_count = traits.Bool( - desc= - 'display the total number of censored TRs Note : if input is a valid xmat.1D dataset, ' - 'then the count will come from the header. Otherwise the input is assumed to be a binary censor' - 'file, and zeros are simply counted.', - argstr="-show_censor_count") + desc="display the total number of censored TRs Note : if input is a valid xmat.1D dataset, " + "then the count will come from the header. Otherwise the input is assumed to be a binary censor" + "file, and zeros are simply counted.", + argstr="-show_censor_count", + ) censor_motion = traits.Tuple( (traits.Float(), File()), - desc= - 'Tuple of motion limit and outfile prefix. need to also set set_nruns -r set_run_lengths', - argstr="-censor_motion %f %s") + desc="Tuple of motion limit and outfile prefix. need to also set set_nruns -r set_run_lengths", + argstr="-censor_motion %f %s", + ) censor_prev_TR = traits.Bool( - desc='for each censored TR, also censor previous', - argstr='-censor_prev_TR') + desc="for each censored TR, also censor previous", argstr="-censor_prev_TR" + ) show_trs_uncensored = traits.Enum( - 'comma', - 'space', - 'encoded', - 'verbose', - desc= - 'display a list of TRs which were not censored in the specified style', - argstr='-show_trs_uncensored %s') + "comma", + "space", + "encoded", + "verbose", + desc="display a list of TRs which were not censored in the specified style", + argstr="-show_trs_uncensored %s", + ) show_cormat_warnings = File( - desc='Write cormat warnings to a file', + desc="Write cormat warnings to a file", argstr="-show_cormat_warnings |& tee %s", position=-1, - xor=['out_file']) + xor=["out_file"], + ) show_indices_interest = traits.Bool( desc="display column indices for regs of interest", - argstr="-show_indices_interest") + argstr="-show_indices_interest", + ) show_trs_run = traits.Int( desc="restrict -show_trs_[un]censored to the given 1-based run", - argstr="-show_trs_run %d") + argstr="-show_trs_run %d", + ) class OneDToolPyOutputSpec(AFNICommandOutputSpec): - out_file = File(desc='output of 1D_tool.py') + out_file = File(desc="output of 1D_tool.py") class OneDToolPy(AFNIPythonCommand): @@ -2166,7 +2336,7 @@ class OneDToolPy(AFNIPythonCommand): >>> res = odt.run() # doctest: +SKIP """ - _cmd = '1d_tool.py' + _cmd = "1d_tool.py" input_spec = OneDToolPyInputSpec output_spec = OneDToolPyOutputSpec @@ -2175,95 +2345,102 @@ def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_file): - outputs['out_file'] = os.path.join(os.getcwd(), - self.inputs.out_file) + outputs["out_file"] = os.path.join(os.getcwd(), self.inputs.out_file) if isdefined(self.inputs.show_cormat_warnings): - outputs['out_file'] = os.path.join( - os.getcwd(), self.inputs.show_cormat_warnings) + outputs["out_file"] = os.path.join( + os.getcwd(), self.inputs.show_cormat_warnings + ) if isdefined(self.inputs.censor_motion): - outputs['out_file'] = os.path.join(os.getcwd(), - self.inputs.censor_motion[1] + - '_censor.1D') + outputs["out_file"] = os.path.join( + os.getcwd(), self.inputs.censor_motion[1] + "_censor.1D" + ) return outputs class RefitInputSpec(CommandLineInputSpec): in_file = File( - desc='input file to 3drefit', - argstr='%s', + desc="input file to 3drefit", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=True) + copyfile=True, + ) deoblique = traits.Bool( - desc='replace current transformation matrix with cardinal matrix', - argstr='-deoblique') - xorigin = Str( - desc='x distance for edge voxel offset', argstr='-xorigin %s') - yorigin = Str( - desc='y distance for edge voxel offset', argstr='-yorigin %s') - zorigin = Str( - desc='z distance for edge voxel offset', argstr='-zorigin %s') + desc="replace current transformation matrix with cardinal matrix", + argstr="-deoblique", + ) + xorigin = Str(desc="x distance for edge voxel offset", argstr="-xorigin %s") + yorigin = Str(desc="y distance for edge voxel offset", argstr="-yorigin %s") + zorigin = Str(desc="z distance for edge voxel offset", argstr="-zorigin %s") duporigin_file = File( - argstr='-duporigin %s', + argstr="-duporigin %s", exists=True, - desc='Copies the xorigin, yorigin, and zorigin values from the header ' - 'of the given dataset') - xdel = traits.Float(desc='new x voxel dimension in mm', argstr='-xdel %f') - ydel = traits.Float(desc='new y voxel dimension in mm', argstr='-ydel %f') - zdel = traits.Float(desc='new z voxel dimension in mm', argstr='-zdel %f') + desc="Copies the xorigin, yorigin, and zorigin values from the header " + "of the given dataset", + ) + xdel = traits.Float(desc="new x voxel dimension in mm", argstr="-xdel %f") + ydel = traits.Float(desc="new y voxel dimension in mm", argstr="-ydel %f") + zdel = traits.Float(desc="new z voxel dimension in mm", argstr="-zdel %f") xyzscale = traits.Float( - desc='Scale the size of the dataset voxels by the given factor', - argstr='-xyzscale %f') + desc="Scale the size of the dataset voxels by the given factor", + argstr="-xyzscale %f", + ) space = traits.Enum( - 'TLRC', - 'MNI', - 'ORIG', - argstr='-space %s', - desc='Associates the dataset with a specific template type, e.g. ' - 'TLRC, MNI, ORIG') + "TLRC", + "MNI", + "ORIG", + argstr="-space %s", + desc="Associates the dataset with a specific template type, e.g. " + "TLRC, MNI, ORIG", + ) atrcopy = traits.Tuple( File(exists=True), traits.Str(), - argstr='-atrcopy %s %s', - desc='Copy AFNI header attribute from the given file into the header ' - 'of the dataset(s) being modified. For more information on AFNI ' - 'header attributes, see documentation file README.attributes. ' - 'More than one \'-atrcopy\' option can be used. For AFNI ' - 'advanced users only. Do NOT use -atrcopy or -atrstring with ' - 'other modification options. See also -copyaux.') + argstr="-atrcopy %s %s", + desc="Copy AFNI header attribute from the given file into the header " + "of the dataset(s) being modified. For more information on AFNI " + "header attributes, see documentation file README.attributes. " + "More than one '-atrcopy' option can be used. For AFNI " + "advanced users only. Do NOT use -atrcopy or -atrstring with " + "other modification options. See also -copyaux.", + ) atrstring = traits.Tuple( traits.Str(), traits.Str(), - argstr='-atrstring %s %s', - desc='Copy the last given string into the dataset(s) being modified, ' - 'giving it the attribute name given by the last string.' - 'To be safe, the last string should be in quotes.') + argstr="-atrstring %s %s", + desc="Copy the last given string into the dataset(s) being modified, " + "giving it the attribute name given by the last string." + "To be safe, the last string should be in quotes.", + ) atrfloat = traits.Tuple( traits.Str(), traits.Str(), - argstr='-atrfloat %s %s', - desc='Create or modify floating point attributes. ' - 'The input values may be specified as a single string in quotes ' - 'or as a 1D filename or string, example ' - '\'1 0.2 0 0 -0.2 1 0 0 0 0 1 0\' or ' - 'flipZ.1D or \'1D:1,0.2,2@0,-0.2,1,2@0,2@0,1,0\'') + argstr="-atrfloat %s %s", + desc="Create or modify floating point attributes. " + "The input values may be specified as a single string in quotes " + "or as a 1D filename or string, example " + "'1 0.2 0 0 -0.2 1 0 0 0 0 1 0' or " + "flipZ.1D or '1D:1,0.2,2@0,-0.2,1,2@0,2@0,1,0'", + ) atrint = traits.Tuple( traits.Str(), traits.Str(), - argstr='-atrint %s %s', - desc='Create or modify integer attributes. ' - 'The input values may be specified as a single string in quotes ' - 'or as a 1D filename or string, example ' - '\'1 0 0 0 0 1 0 0 0 0 1 0\' or ' - 'flipZ.1D or \'1D:1,0,2@0,-0,1,2@0,2@0,1,0\'') + argstr="-atrint %s %s", + desc="Create or modify integer attributes. " + "The input values may be specified as a single string in quotes " + "or as a 1D filename or string, example " + "'1 0 0 0 0 1 0 0 0 0 1 0' or " + "flipZ.1D or '1D:1,0,2@0,-0,1,2@0,2@0,1,0'", + ) saveatr = traits.Bool( - argstr='-saveatr', - desc='(default) Copy the attributes that are known to AFNI into ' - 'the dset->dblk structure thereby forcing changes to known ' - 'attributes to be present in the output. This option only makes ' - 'sense with -atrcopy.') - nosaveatr = traits.Bool(argstr='-nosaveatr', desc='Opposite of -saveatr') + argstr="-saveatr", + desc="(default) Copy the attributes that are known to AFNI into " + "the dset->dblk structure thereby forcing changes to known " + "attributes to be present in the output. This option only makes " + "sense with -atrcopy.", + ) + nosaveatr = traits.Bool(argstr="-nosaveatr", desc="Opposite of -saveatr") class Refit(AFNICommandBase): @@ -2290,93 +2467,101 @@ class Refit(AFNICommandBase): "3drefit -atrfloat IJK_TO_DICOM_REAL '1 0.2 0 0 -0.2 1 0 0 0 0 1 0' structural.nii" >>> res = refit_2.run() # doctest: +SKIP """ - _cmd = '3drefit' + + _cmd = "3drefit" input_spec = RefitInputSpec output_spec = AFNICommandOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.in_file) + outputs["out_file"] = os.path.abspath(self.inputs.in_file) return outputs class ReHoInputSpec(CommandLineInputSpec): in_file = File( - desc='input dataset', - argstr='-inset %s', + desc="input dataset", + argstr="-inset %s", position=1, mandatory=True, - exists=True) + exists=True, + ) out_file = File( - desc='Output dataset.', - argstr='-prefix %s', - name_source='in_file', - name_template='%s_reho', + desc="Output dataset.", + argstr="-prefix %s", + name_source="in_file", + name_template="%s_reho", keep_extension=True, - position=0) + position=0, + ) chi_sq = traits.Bool( - argstr='-chi_sq', - desc='Output the Friedman chi-squared value in addition to the ' - 'Kendall\'s W. This option is currently compatible only with ' - 'the AFNI (BRIK/HEAD) output type; the chi-squared value will ' - 'be the second sub-brick of the output dataset.') + argstr="-chi_sq", + desc="Output the Friedman chi-squared value in addition to the " + "Kendall's W. This option is currently compatible only with " + "the AFNI (BRIK/HEAD) output type; the chi-squared value will " + "be the second sub-brick of the output dataset.", + ) mask_file = File( - desc='Mask within which ReHo should be calculated voxelwise', - argstr='-mask %s') + desc="Mask within which ReHo should be calculated voxelwise", argstr="-mask %s" + ) neighborhood = traits.Enum( - 'faces', - 'edges', - 'vertices', - xor=['sphere', 'ellipsoid'], - argstr='-nneigh %s', - desc='voxels in neighborhood. can be: ' - '* faces (for voxel and 6 facewise neighbors, only),\n' - '* edges (for voxel and 18 face- and edge-wise neighbors),\n' - '* vertices (for voxel and 26 face-, edge-, and node-wise ' - 'neighbors).\n') + "faces", + "edges", + "vertices", + xor=["sphere", "ellipsoid"], + argstr="-nneigh %s", + desc="voxels in neighborhood. can be: " + "* faces (for voxel and 6 facewise neighbors, only),\n" + "* edges (for voxel and 18 face- and edge-wise neighbors),\n" + "* vertices (for voxel and 26 face-, edge-, and node-wise " + "neighbors).\n", + ) sphere = traits.Float( - argstr='-neigh_RAD %s', - xor=['neighborhood', 'ellipsoid'], - desc='for additional voxelwise neighborhood control, the ' - 'radius R of a desired neighborhood can be put in; R is ' - 'a floating point number, and must be >1. Examples of ' - 'the numbers of voxels in a given radius are as follows ' - '(you can roughly approximate with the ol\' 4*PI*(R^3)/3 ' - 'thing):\n' - ' R=2.0 -> V=33,\n' - ' R=2.3 -> V=57, \n' - ' R=2.9 -> V=93, \n' - ' R=3.1 -> V=123, \n' - ' R=3.9 -> V=251, \n' - ' R=4.5 -> V=389, \n' - ' R=6.1 -> V=949, \n' - 'but you can choose most any value.') + argstr="-neigh_RAD %s", + xor=["neighborhood", "ellipsoid"], + desc="for additional voxelwise neighborhood control, the " + "radius R of a desired neighborhood can be put in; R is " + "a floating point number, and must be >1. Examples of " + "the numbers of voxels in a given radius are as follows " + "(you can roughly approximate with the ol' 4*PI*(R^3)/3 " + "thing):\n" + " R=2.0 -> V=33,\n" + " R=2.3 -> V=57, \n" + " R=2.9 -> V=93, \n" + " R=3.1 -> V=123, \n" + " R=3.9 -> V=251, \n" + " R=4.5 -> V=389, \n" + " R=6.1 -> V=949, \n" + "but you can choose most any value.", + ) ellipsoid = traits.Tuple( traits.Float, traits.Float, traits.Float, - xor=['sphere', 'neighborhood'], - argstr='-neigh_X %s -neigh_Y %s -neigh_Z %s', - desc='Tuple indicating the x, y, and z radius of an ellipsoid ' - 'defining the neighbourhood of each voxel.\n' - 'The \'hood is then made according to the following relation:' - '(i/A)^2 + (j/B)^2 + (k/C)^2 <=1.\n' - 'which will have approx. V=4*PI*A*B*C/3. The impetus for ' - 'this freedom was for use with data having anisotropic ' - 'voxel edge lengths.') + xor=["sphere", "neighborhood"], + argstr="-neigh_X %s -neigh_Y %s -neigh_Z %s", + desc="Tuple indicating the x, y, and z radius of an ellipsoid " + "defining the neighbourhood of each voxel.\n" + "The 'hood is then made according to the following relation:" + "(i/A)^2 + (j/B)^2 + (k/C)^2 <=1.\n" + "which will have approx. V=4*PI*A*B*C/3. The impetus for " + "this freedom was for use with data having anisotropic " + "voxel edge lengths.", + ) label_set = File( exists=True, - argstr='-in_rois %s', - desc='a set of ROIs, each labelled with distinct ' - 'integers. ReHo will then be calculated per ROI.') + argstr="-in_rois %s", + desc="a set of ROIs, each labelled with distinct " + "integers. ReHo will then be calculated per ROI.", + ) overwrite = traits.Bool( - desc='overwrite output file if it already exists', - argstr='-overwrite') + desc="overwrite output file if it already exists", argstr="-overwrite" + ) class ReHoOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='Voxelwise regional homogeneity map') - out_vals = File(desc='Table of labelwise regional homogenity values') + out_file = File(exists=True, desc="Voxelwise regional homogeneity map") + out_vals = File(desc="Table of labelwise regional homogenity values") class ReHo(AFNICommandBase): @@ -2399,23 +2584,24 @@ class ReHo(AFNICommandBase): >>> res = reho.run() # doctest: +SKIP """ - _cmd = '3dReHo' + + _cmd = "3dReHo" input_spec = ReHoInputSpec output_spec = ReHoOutputSpec def _list_outputs(self): outputs = super(ReHo, self)._list_outputs() if self.inputs.label_set: - outputs['out_vals'] = outputs['out_file'] + '_ROI_reho.vals' + outputs["out_vals"] = outputs["out_file"] + "_ROI_reho.vals" return outputs def _format_arg(self, name, spec, value): _neigh_dict = { - 'faces': 7, - 'edges': 19, - 'vertices': 27, - } - if name == 'neighborhood': + "faces": 7, + "edges": 19, + "vertices": 27, + } + if name == "neighborhood": value = _neigh_dict[value] return super(ReHo, self)._format_arg(name, spec, value) @@ -2423,33 +2609,36 @@ def _format_arg(self, name, spec, value): class ResampleInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dresample', - argstr='-inset %s', + desc="input file to 3dresample", + argstr="-inset %s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_resample', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') - orientation = Str(desc='new orientation code', argstr='-orient %s') + name_template="%s_resample", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) + orientation = Str(desc="new orientation code", argstr="-orient %s") resample_mode = traits.Enum( - 'NN', - 'Li', - 'Cu', - 'Bk', - argstr='-rmode %s', + "NN", + "Li", + "Cu", + "Bk", + argstr="-rmode %s", desc='resampling method from set {"NN", "Li", "Cu", "Bk"}. These are ' 'for "Nearest Neighbor", "Linear", "Cubic" and "Blocky"' - 'interpolation, respectively. Default is NN.') + "interpolation, respectively. Default is NN.", + ) voxel_size = traits.Tuple( *[traits.Float()] * 3, - argstr='-dxyz %f %f %f', - desc='resample to new dx, dy and dz') - master = File( - argstr='-master %s', desc='align dataset grid to a reference file') + argstr="-dxyz %f %f %f", + desc="resample to new dx, dy and dz" + ) + master = File(argstr="-master %s", desc="align dataset grid to a reference file") class Resample(AFNICommand): @@ -2472,7 +2661,7 @@ class Resample(AFNICommand): """ - _cmd = '3dresample' + _cmd = "3dresample" input_spec = ResampleInputSpec output_spec = AFNICommandOutputSpec @@ -2480,29 +2669,33 @@ class Resample(AFNICommand): class TCatInputSpec(AFNICommandInputSpec): in_files = InputMultiPath( File(exists=True), - desc='input file to 3dTcat', - argstr=' %s', + desc="input file to 3dTcat", + argstr=" %s", position=-1, mandatory=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_tcat', - desc='output image file name', - argstr='-prefix %s', - name_source='in_files') + name_template="%s_tcat", + desc="output image file name", + argstr="-prefix %s", + name_source="in_files", + ) rlt = traits.Enum( - '', - '+', - '++', - argstr='-rlt%s', - desc='Remove linear trends in each voxel time series loaded from each ' - 'input dataset, SEPARATELY. Option -rlt removes the least squares ' - 'fit of \'a+b*t\' to each voxel time series. Option -rlt+ adds ' - 'dataset mean back in. Option -rlt++ adds overall mean of all ' - 'dataset timeseries back in.', - position=1) + "", + "+", + "++", + argstr="-rlt%s", + desc="Remove linear trends in each voxel time series loaded from each " + "input dataset, SEPARATELY. Option -rlt removes the least squares " + "fit of 'a+b*t' to each voxel time series. Option -rlt+ adds " + "dataset mean back in. Option -rlt++ adds overall mean of all " + "dataset timeseries back in.", + position=1, + ) verbose = traits.Bool( - desc='Print out some verbose output as the program', argstr='-verb') + desc="Print out some verbose output as the program", argstr="-verb" + ) class TCat(AFNICommand): @@ -2528,7 +2721,7 @@ class TCat(AFNICommand): """ - _cmd = '3dTcat' + _cmd = "3dTcat" input_spec = TCatInputSpec output_spec = AFNICommandOutputSpec @@ -2536,26 +2729,27 @@ class TCat(AFNICommand): class TCatSBInputSpec(AFNICommandInputSpec): in_files = traits.List( traits.Tuple(File(exists=True), Str()), - desc='List of tuples of file names and subbrick selectors as strings.' - 'Don\'t forget to protect the single quotes in the subbrick selector' - 'so the contents are protected from the command line interpreter.', - argstr='%s%s ...', + desc="List of tuples of file names and subbrick selectors as strings." + "Don't forget to protect the single quotes in the subbrick selector" + "so the contents are protected from the command line interpreter.", + argstr="%s%s ...", position=-1, mandatory=True, - copyfile=False) - out_file = File( - desc='output image file name', argstr='-prefix %s', genfile=True) + copyfile=False, + ) + out_file = File(desc="output image file name", argstr="-prefix %s", genfile=True) rlt = traits.Enum( - '', - '+', - '++', - argstr='-rlt%s', - desc='Remove linear trends in each voxel time series loaded from each ' - 'input dataset, SEPARATELY. Option -rlt removes the least squares ' - 'fit of \'a+b*t\' to each voxel time series. Option -rlt+ adds ' - 'dataset mean back in. Option -rlt++ adds overall mean of all ' - 'dataset timeseries back in.', - position=1) + "", + "+", + "++", + argstr="-rlt%s", + desc="Remove linear trends in each voxel time series loaded from each " + "input dataset, SEPARATELY. Option -rlt removes the least squares " + "fit of 'a+b*t' to each voxel time series. Option -rlt+ adds " + "dataset mean back in. Option -rlt++ adds overall mean of all " + "dataset timeseries back in.", + position=1, + ) class TCatSubBrick(AFNICommand): @@ -2579,30 +2773,32 @@ class TCatSubBrick(AFNICommand): """ - _cmd = '3dTcat' + _cmd = "3dTcat" input_spec = TCatSBInputSpec output_spec = AFNICommandOutputSpec def _gen_filename(self, name): - if name == 'out_file': - return self._gen_fname(self.inputs.in_files[0][0], suffix='_tcat') + if name == "out_file": + return self._gen_fname(self.inputs.in_files[0][0], suffix="_tcat") class TStatInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dTstat', - argstr='%s', + desc="input file to 3dTstat", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_tstat', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') - mask = File(desc='mask file', argstr='-mask %s', exists=True) - options = Str(desc='selected statistical output', argstr='%s') + name_template="%s_tstat", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) + mask = File(desc="mask file", argstr="-mask %s", exists=True) + options = Str(desc="selected statistical output", argstr="%s") class TStat(AFNICommand): @@ -2625,64 +2821,66 @@ class TStat(AFNICommand): """ - _cmd = '3dTstat' + _cmd = "3dTstat" input_spec = TStatInputSpec output_spec = AFNICommandOutputSpec class To3DInputSpec(AFNICommandInputSpec): out_file = File( - name_template='%s', - desc='output image file name', - argstr='-prefix %s', - name_source=['in_folder']) + name_template="%s", + desc="output image file name", + argstr="-prefix %s", + name_source=["in_folder"], + ) in_folder = Directory( - desc='folder with DICOM images to convert', - argstr='%s/*.dcm', + desc="folder with DICOM images to convert", + argstr="%s/*.dcm", position=-1, mandatory=True, - exists=True) + exists=True, + ) filetype = traits.Enum( - 'spgr', - 'fse', - 'epan', - 'anat', - 'ct', - 'spct', - 'pet', - 'mra', - 'bmap', - 'diff', - 'omri', - 'abuc', - 'fim', - 'fith', - 'fico', - 'fitt', - 'fift', - 'fizt', - 'fict', - 'fibt', - 'fibn', - 'figt', - 'fipt', - 'fbuc', - argstr='-%s', - desc='type of datafile being converted') - skipoutliers = traits.Bool( - desc='skip the outliers check', argstr='-skip_outliers') + "spgr", + "fse", + "epan", + "anat", + "ct", + "spct", + "pet", + "mra", + "bmap", + "diff", + "omri", + "abuc", + "fim", + "fith", + "fico", + "fitt", + "fift", + "fizt", + "fict", + "fibt", + "fibn", + "figt", + "fipt", + "fbuc", + argstr="-%s", + desc="type of datafile being converted", + ) + skipoutliers = traits.Bool(desc="skip the outliers check", argstr="-skip_outliers") assumemosaic = traits.Bool( - desc='assume that Siemens image is mosaic', - argstr='-assume_dicom_mosaic') + desc="assume that Siemens image is mosaic", argstr="-assume_dicom_mosaic" + ) datatype = traits.Enum( - 'short', - 'float', - 'byte', - 'complex', - desc='set output file datatype', - argstr='-datum %s') - funcparams = Str( - desc='parameters for functional data', argstr='-time:zt %s alt+z2') + "short", + "float", + "byte", + "complex", + desc="set output file datatype", + argstr="-datum %s", + ) + funcparams = Str(desc="parameters for functional data", argstr="-time:zt %s alt+z2") class To3D(AFNICommand): @@ -2706,76 +2904,80 @@ class To3D(AFNICommand): """ - _cmd = 'to3d' + _cmd = "to3d" input_spec = To3DInputSpec output_spec = AFNICommandOutputSpec class UndumpInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dUndump, whose geometry will determine' - 'the geometry of the output', - argstr='-master %s', + desc="input file to 3dUndump, whose geometry will determine" + "the geometry of the output", + argstr="-master %s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + desc="output image file name", argstr="-prefix %s", name_source="in_file" + ) mask_file = File( - desc='mask image file name. Only voxels that are nonzero in the mask ' - 'can be set.', - argstr='-mask %s') + desc="mask image file name. Only voxels that are nonzero in the mask " + "can be set.", + argstr="-mask %s", + ) datatype = traits.Enum( - 'short', - 'float', - 'byte', - desc='set output file datatype', - argstr='-datum %s') + "short", "float", "byte", desc="set output file datatype", argstr="-datum %s" + ) default_value = traits.Float( - desc='default value stored in each input voxel that does not have ' - 'a value supplied in the input file', - argstr='-dval %f') + desc="default value stored in each input voxel that does not have " + "a value supplied in the input file", + argstr="-dval %f", + ) fill_value = traits.Float( - desc='value, used for each voxel in the output dataset that is NOT ' - 'listed in the input file', - argstr='-fval %f') + desc="value, used for each voxel in the output dataset that is NOT " + "listed in the input file", + argstr="-fval %f", + ) coordinates_specification = traits.Enum( - 'ijk', - 'xyz', - desc='Coordinates in the input file as index triples (i, j, k) ' - 'or spatial coordinates (x, y, z) in mm', - argstr='-%s') + "ijk", + "xyz", + desc="Coordinates in the input file as index triples (i, j, k) " + "or spatial coordinates (x, y, z) in mm", + argstr="-%s", + ) srad = traits.Float( - desc='radius in mm of the sphere that will be filled about each input ' - '(x,y,z) or (i,j,k) voxel. If the radius is not given, or is 0, ' - 'then each input data line sets the value in only one voxel.', - argstr='-srad %f') + desc="radius in mm of the sphere that will be filled about each input " + "(x,y,z) or (i,j,k) voxel. If the radius is not given, or is 0, " + "then each input data line sets the value in only one voxel.", + argstr="-srad %f", + ) orient = traits.Tuple( - traits.Enum('R', 'L'), - traits.Enum('A', 'P'), - traits.Enum('I', 'S'), - desc='Specifies the coordinate order used by -xyz. ' - 'The code must be 3 letters, one each from the pairs ' - '{R,L} {A,P} {I,S}. The first letter gives the ' - 'orientation of the x-axis, the second the orientation ' - 'of the y-axis, the third the z-axis: ' - 'R = right-to-left L = left-to-right ' - 'A = anterior-to-posterior P = posterior-to-anterior ' - 'I = inferior-to-superior S = superior-to-inferior ' - 'If -orient isn\'t used, then the coordinate order of the ' - '-master (in_file) dataset is used to interpret (x,y,z) inputs.', - argstr='-orient %s') + traits.Enum("R", "L"), + traits.Enum("A", "P"), + traits.Enum("I", "S"), + desc="Specifies the coordinate order used by -xyz. " + "The code must be 3 letters, one each from the pairs " + "{R,L} {A,P} {I,S}. The first letter gives the " + "orientation of the x-axis, the second the orientation " + "of the y-axis, the third the z-axis: " + "R = right-to-left L = left-to-right " + "A = anterior-to-posterior P = posterior-to-anterior " + "I = inferior-to-superior S = superior-to-inferior " + "If -orient isn't used, then the coordinate order of the " + "-master (in_file) dataset is used to interpret (x,y,z) inputs.", + argstr="-orient %s", + ) head_only = traits.Bool( - desc='create only the .HEAD file which gets exploited by ' - 'the AFNI matlab library function New_HEAD.m', - argstr='-head_only') + desc="create only the .HEAD file which gets exploited by " + "the AFNI matlab library function New_HEAD.m", + argstr="-head_only", + ) class UndumpOutputSpec(TraitedSpec): - out_file = File(desc='assembled file', exists=True) + out_file = File(desc="assembled file", exists=True) class Undump(AFNICommand): @@ -2815,89 +3017,99 @@ class Undump(AFNICommand): """ - _cmd = '3dUndump' + _cmd = "3dUndump" input_spec = UndumpInputSpec output_spec = UndumpOutputSpec class UnifizeInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dUnifize', - argstr='-input %s', + desc="input file to 3dUnifize", + argstr="-input %s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_unifized', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') + name_template="%s_unifized", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) t2 = traits.Bool( - desc='Treat the input as if it were T2-weighted, rather than ' - 'T1-weighted. This processing is done simply by inverting ' - 'the image contrast, processing it as if that result were ' - 'T1-weighted, and then re-inverting the results ' - 'counts of voxel overlap, i.e., each voxel will contain the ' - 'number of masks that it is set in.', - argstr='-T2') + desc="Treat the input as if it were T2-weighted, rather than " + "T1-weighted. This processing is done simply by inverting " + "the image contrast, processing it as if that result were " + "T1-weighted, and then re-inverting the results " + "counts of voxel overlap, i.e., each voxel will contain the " + "number of masks that it is set in.", + argstr="-T2", + ) gm = traits.Bool( - desc='Also scale to unifize \'gray matter\' = lower intensity voxels ' - '(to aid in registering images from different scanners).', - argstr='-GM') + desc="Also scale to unifize 'gray matter' = lower intensity voxels " + "(to aid in registering images from different scanners).", + argstr="-GM", + ) urad = traits.Float( - desc='Sets the radius (in voxels) of the ball used for the sneaky ' - 'trick. Default value is 18.3, and should be changed ' - 'proportionally if the dataset voxel size differs significantly ' - 'from 1 mm.', - argstr='-Urad %s') + desc="Sets the radius (in voxels) of the ball used for the sneaky " + "trick. Default value is 18.3, and should be changed " + "proportionally if the dataset voxel size differs significantly " + "from 1 mm.", + argstr="-Urad %s", + ) scale_file = File( - desc='output file name to save the scale factor used at each voxel ', - argstr='-ssave %s') + desc="output file name to save the scale factor used at each voxel ", + argstr="-ssave %s", + ) no_duplo = traits.Bool( - desc='Do NOT use the \'duplo down\' step; this can be useful for ' - 'lower resolution datasets.', - argstr='-noduplo') + desc="Do NOT use the 'duplo down' step; this can be useful for " + "lower resolution datasets.", + argstr="-noduplo", + ) epi = traits.Bool( - desc='Assume the input dataset is a T2 (or T2*) weighted EPI time ' - 'series. After computing the scaling, apply it to ALL volumes ' - '(TRs) in the input dataset. That is, a given voxel will be ' - 'scaled by the same factor at each TR. ' - 'This option also implies \'-noduplo\' and \'-T2\'.' - 'This option turns off \'-GM\' if you turned it on.', - argstr='-EPI', - requires=['no_duplo', 't2'], - xor=['gm']) + desc="Assume the input dataset is a T2 (or T2*) weighted EPI time " + "series. After computing the scaling, apply it to ALL volumes " + "(TRs) in the input dataset. That is, a given voxel will be " + "scaled by the same factor at each TR. " + "This option also implies '-noduplo' and '-T2'." + "This option turns off '-GM' if you turned it on.", + argstr="-EPI", + requires=["no_duplo", "t2"], + xor=["gm"], + ) rbt = traits.Tuple( traits.Float(), traits.Float(), traits.Float(), - desc='Option for AFNI experts only.' - 'Specify the 3 parameters for the algorithm:\n' - 'R = radius; same as given by option \'-Urad\', [default=18.3]\n' - 'b = bottom percentile of normalizing data range, [default=70.0]\n' - 'r = top percentile of normalizing data range, [default=80.0]\n', - argstr='-rbt %f %f %f') + desc="Option for AFNI experts only." + "Specify the 3 parameters for the algorithm:\n" + "R = radius; same as given by option '-Urad', [default=18.3]\n" + "b = bottom percentile of normalizing data range, [default=70.0]\n" + "r = top percentile of normalizing data range, [default=80.0]\n", + argstr="-rbt %f %f %f", + ) t2_up = traits.Float( - desc='Option for AFNI experts only.' - 'Set the upper percentile point used for T2-T1 inversion. ' - 'Allowed to be anything between 90 and 100 (inclusive), with ' - 'default to 98.5 (for no good reason).', - argstr='-T2up %f') + desc="Option for AFNI experts only." + "Set the upper percentile point used for T2-T1 inversion. " + "Allowed to be anything between 90 and 100 (inclusive), with " + "default to 98.5 (for no good reason).", + argstr="-T2up %f", + ) cl_frac = traits.Float( - desc='Option for AFNI experts only.' - 'Set the automask \'clip level fraction\'. Must be between ' - '0.1 and 0.9. A small fraction means to make the initial ' - 'threshold for clipping (a la 3dClipLevel) smaller, which ' - 'will tend to make the mask larger. [default=0.1]', - argstr='-clfrac %f') - quiet = traits.Bool( - desc='Don\'t print the progress messages.', argstr='-quiet') + desc="Option for AFNI experts only." + "Set the automask 'clip level fraction'. Must be between " + "0.1 and 0.9. A small fraction means to make the initial " + "threshold for clipping (a la 3dClipLevel) smaller, which " + "will tend to make the mask larger. [default=0.1]", + argstr="-clfrac %f", + ) + quiet = traits.Bool(desc="Don't print the progress messages.", argstr="-quiet") class UnifizeOutputSpec(TraitedSpec): - scale_file = File(desc='scale factor file') - out_file = File(desc='unifized file', exists=True) + scale_file = File(desc="scale factor file") + out_file = File(desc="unifized file", exists=True) class Unifize(AFNICommand): @@ -2944,25 +3156,27 @@ class Unifize(AFNICommand): """ - _cmd = '3dUnifize' + _cmd = "3dUnifize" input_spec = UnifizeInputSpec output_spec = UnifizeOutputSpec class ZCutUpInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3dZcutup', - argstr='%s', + desc="input file to 3dZcutup", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_zcutup', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') - keep = Str(desc='slice range to keep in output', argstr='-keep %s') + name_template="%s_zcutup", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) + keep = Str(desc="slice range to keep in output", argstr="-keep %s") class ZCutUp(AFNICommand): @@ -2985,36 +3199,38 @@ class ZCutUp(AFNICommand): """ - _cmd = '3dZcutup' + _cmd = "3dZcutup" input_spec = ZCutUpInputSpec output_spec = AFNICommandOutputSpec class GCORInputSpec(CommandLineInputSpec): in_file = File( - desc='input dataset to compute the GCOR over', - argstr='-input %s', + desc="input dataset to compute the GCOR over", + argstr="-input %s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) mask = File( - desc='mask dataset, for restricting the computation', - argstr='-mask %s', + desc="mask dataset, for restricting the computation", + argstr="-mask %s", exists=True, - copyfile=False) + copyfile=False, + ) nfirst = traits.Int( - 0, argstr='-nfirst %d', desc='specify number of initial TRs to ignore') + 0, argstr="-nfirst %d", desc="specify number of initial TRs to ignore" + ) no_demean = traits.Bool( - False, - argstr='-no_demean', - desc='do not (need to) demean as first step') + False, argstr="-no_demean", desc="do not (need to) demean as first step" + ) class GCOROutputSpec(TraitedSpec): - out = traits.Float(desc='global correlation value') + out = traits.Float(desc="global correlation value") class GCOR(CommandLine): @@ -3039,7 +3255,7 @@ class GCOR(CommandLine): """ - _cmd = '@compute_gcor' + _cmd = "@compute_gcor" input_spec = GCORInputSpec output_spec = GCOROutputSpec @@ -3047,48 +3263,54 @@ def _run_interface(self, runtime): runtime = super(GCOR, self)._run_interface(runtime) gcor_line = [ - line.strip() for line in runtime.stdout.split('\n') - if line.strip().startswith('GCOR = ') + line.strip() + for line in runtime.stdout.split("\n") + if line.strip().startswith("GCOR = ") ][-1] - setattr(self, '_gcor', float(gcor_line[len('GCOR = '):])) + setattr(self, "_gcor", float(gcor_line[len("GCOR = ") :])) return runtime def _list_outputs(self): - return {'out': getattr(self, '_gcor')} + return {"out": getattr(self, "_gcor")} class AxializeInputSpec(AFNICommandInputSpec): in_file = File( - desc='input file to 3daxialize', - argstr='%s', + desc="input file to 3daxialize", + argstr="%s", position=-2, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_axialize', - desc='output image file name', - argstr='-prefix %s', - name_source='in_file') - verb = traits.Bool(desc='Print out a progerss report', argstr='-verb') + name_template="%s_axialize", + desc="output image file name", + argstr="-prefix %s", + name_source="in_file", + ) + verb = traits.Bool(desc="Print out a progerss report", argstr="-verb") sagittal = traits.Bool( - desc='Do sagittal slice order [-orient ASL]', - argstr='-sagittal', - xor=['coronal', 'axial']) + desc="Do sagittal slice order [-orient ASL]", + argstr="-sagittal", + xor=["coronal", "axial"], + ) coronal = traits.Bool( - desc='Do coronal slice order [-orient RSA]', - argstr='-coronal', - xor=['sagittal', 'axial']) + desc="Do coronal slice order [-orient RSA]", + argstr="-coronal", + xor=["sagittal", "axial"], + ) axial = traits.Bool( - desc='Do axial slice order [-orient RAI]' - 'This is the default AFNI axial order, and' - 'is the one currently required by the' - 'volume rendering plugin; this is also' - 'the default orientation output by this' + desc="Do axial slice order [-orient RAI]" + "This is the default AFNI axial order, and" + "is the one currently required by the" + "volume rendering plugin; this is also" + "the default orientation output by this" "program (hence the program's name).", - argstr='-axial', - xor=['coronal', 'sagittal']) - orientation = Str(desc='new orientation code', argstr='-orient %s') + argstr="-axial", + xor=["coronal", "sagittal"], + ) + orientation = Str(desc="new orientation code", argstr="-orient %s") class Axialize(AFNICommand): @@ -3110,48 +3332,53 @@ class Axialize(AFNICommand): """ - _cmd = '3daxialize' + _cmd = "3daxialize" input_spec = AxializeInputSpec output_spec = AFNICommandOutputSpec class ZcatInputSpec(AFNICommandInputSpec): in_files = InputMultiPath( - File(desc='input files to 3dZcat', exists=True), - argstr='%s', + File(desc="input files to 3dZcat", exists=True), + argstr="%s", position=-1, mandatory=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='%s_zcat', - desc='output dataset prefix name (default \'zcat\')', - argstr='-prefix %s', - name_source='in_files') + name_template="%s_zcat", + desc="output dataset prefix name (default 'zcat')", + argstr="-prefix %s", + name_source="in_files", + ) datum = traits.Enum( - 'byte', - 'short', - 'float', - argstr='-datum %s', - desc='specify data type for output. Valid types are \'byte\', ' - '\'short\' and \'float\'.') + "byte", + "short", + "float", + argstr="-datum %s", + desc="specify data type for output. Valid types are 'byte', " + "'short' and 'float'.", + ) verb = traits.Bool( - desc='print out some verbositiness as the program proceeds.', - argstr='-verb') + desc="print out some verbositiness as the program proceeds.", argstr="-verb" + ) fscale = traits.Bool( - desc='Force scaling of the output to the maximum integer ' - 'range. This only has effect if the output datum is ' - 'byte or short (either forced or defaulted). This ' - 'option is sometimes necessary to eliminate ' - 'unpleasant truncation artifacts.', - argstr='-fscale', - xor=['nscale']) + desc="Force scaling of the output to the maximum integer " + "range. This only has effect if the output datum is " + "byte or short (either forced or defaulted). This " + "option is sometimes necessary to eliminate " + "unpleasant truncation artifacts.", + argstr="-fscale", + xor=["nscale"], + ) nscale = traits.Bool( - desc='Don\'t do any scaling on output to byte or short ' - 'datasets. This may be especially useful when ' - 'operating on mask datasets whose output values ' - 'are only 0\'s and 1\'s.', - argstr='-nscale', - xor=['fscale']) + desc="Don't do any scaling on output to byte or short " + "datasets. This may be especially useful when " + "operating on mask datasets whose output values " + "are only 0's and 1's.", + argstr="-nscale", + xor=["fscale"], + ) class Zcat(AFNICommand): @@ -3173,88 +3400,98 @@ class Zcat(AFNICommand): >>> res = zcat.run() # doctest: +SKIP """ - _cmd = '3dZcat' + _cmd = "3dZcat" input_spec = ZcatInputSpec output_spec = AFNICommandOutputSpec class ZeropadInputSpec(AFNICommandInputSpec): in_files = File( - desc='input dataset', - argstr='%s', + desc="input dataset", + argstr="%s", position=-1, mandatory=True, exists=True, - copyfile=False) + copyfile=False, + ) out_file = File( - name_template='zeropad', - desc='output dataset prefix name (default \'zeropad\')', - argstr='-prefix %s') + name_template="zeropad", + desc="output dataset prefix name (default 'zeropad')", + argstr="-prefix %s", + ) I = traits.Int( - desc='adds \'n\' planes of zero at the Inferior edge', - argstr='-I %i', - xor=['master']) + desc="adds 'n' planes of zero at the Inferior edge", + argstr="-I %i", + xor=["master"], + ) S = traits.Int( - desc='adds \'n\' planes of zero at the Superior edge', - argstr='-S %i', - xor=['master']) + desc="adds 'n' planes of zero at the Superior edge", + argstr="-S %i", + xor=["master"], + ) A = traits.Int( - desc='adds \'n\' planes of zero at the Anterior edge', - argstr='-A %i', - xor=['master']) + desc="adds 'n' planes of zero at the Anterior edge", + argstr="-A %i", + xor=["master"], + ) P = traits.Int( - desc='adds \'n\' planes of zero at the Posterior edge', - argstr='-P %i', - xor=['master']) + desc="adds 'n' planes of zero at the Posterior edge", + argstr="-P %i", + xor=["master"], + ) L = traits.Int( - desc='adds \'n\' planes of zero at the Left edge', - argstr='-L %i', - xor=['master']) + desc="adds 'n' planes of zero at the Left edge", argstr="-L %i", xor=["master"] + ) R = traits.Int( - desc='adds \'n\' planes of zero at the Right edge', - argstr='-R %i', - xor=['master']) + desc="adds 'n' planes of zero at the Right edge", argstr="-R %i", xor=["master"] + ) z = traits.Int( - desc='adds \'n\' planes of zero on EACH of the ' - 'dataset z-axis (slice-direction) faces', - argstr='-z %i', - xor=['master']) + desc="adds 'n' planes of zero on EACH of the " + "dataset z-axis (slice-direction) faces", + argstr="-z %i", + xor=["master"], + ) RL = traits.Int( - desc='specify that planes should be added or cut ' - 'symmetrically to make the resulting volume have' - 'N slices in the right-left direction', - argstr='-RL %i', - xor=['master']) + desc="specify that planes should be added or cut " + "symmetrically to make the resulting volume have" + "N slices in the right-left direction", + argstr="-RL %i", + xor=["master"], + ) AP = traits.Int( - desc='specify that planes should be added or cut ' - 'symmetrically to make the resulting volume have' - 'N slices in the anterior-posterior direction', - argstr='-AP %i', - xor=['master']) + desc="specify that planes should be added or cut " + "symmetrically to make the resulting volume have" + "N slices in the anterior-posterior direction", + argstr="-AP %i", + xor=["master"], + ) IS = traits.Int( - desc='specify that planes should be added or cut ' - 'symmetrically to make the resulting volume have' - 'N slices in the inferior-superior direction', - argstr='-IS %i', - xor=['master']) + desc="specify that planes should be added or cut " + "symmetrically to make the resulting volume have" + "N slices in the inferior-superior direction", + argstr="-IS %i", + xor=["master"], + ) mm = traits.Bool( - desc='pad counts \'n\' are in mm instead of slices, ' - 'where each \'n\' is an integer and at least \'n\' ' - 'mm of slices will be added/removed; e.g., n = 3 ' - 'and slice thickness = 2.5 mm ==> 2 slices added', - argstr='-mm', - xor=['master']) + desc="pad counts 'n' are in mm instead of slices, " + "where each 'n' is an integer and at least 'n' " + "mm of slices will be added/removed; e.g., n = 3 " + "and slice thickness = 2.5 mm ==> 2 slices added", + argstr="-mm", + xor=["master"], + ) master = File( - desc='match the volume described in dataset ' - '\'mset\', where mset must have the same ' - 'orientation and grid spacing as dataset to be ' - 'padded. the goal of -master is to make the ' - 'output dataset from 3dZeropad match the ' - 'spatial \'extents\' of mset by adding or ' - 'subtracting slices as needed. You can\'t use ' - '-I,-S,..., or -mm with -master', - argstr='-master %s', - xor=['I', 'S', 'A', 'P', 'L', 'R', 'z', 'RL', 'AP', 'IS', 'mm']) + desc="match the volume described in dataset " + "'mset', where mset must have the same " + "orientation and grid spacing as dataset to be " + "padded. the goal of -master is to make the " + "output dataset from 3dZeropad match the " + "spatial 'extents' of mset by adding or " + "subtracting slices as needed. You can't use " + "-I,-S,..., or -mm with -master", + argstr="-master %s", + xor=["I", "S", "A", "P", "L", "R", "z", "RL", "AP", "IS", "mm"], + ) class Zeropad(AFNICommand): @@ -3281,6 +3518,6 @@ class Zeropad(AFNICommand): >>> res = zeropad.run() # doctest: +SKIP """ - _cmd = '3dZeropad' + _cmd = "3dZeropad" input_spec = ZeropadInputSpec output_spec = AFNICommandOutputSpec diff --git a/nipype/interfaces/ants/__init__.py b/nipype/interfaces/ants/__init__.py index fa441944a2..389a5f1371 100644 --- a/nipype/interfaces/ants/__init__.py +++ b/nipype/interfaces/ants/__init__.py @@ -4,23 +4,44 @@ """Top-level namespace for ants.""" # Registraiton programs -from .registration import (ANTS, Registration, RegistrationSynQuick, - CompositeTransformUtil, MeasureImageSimilarity) +from .registration import ( + ANTS, + Registration, + RegistrationSynQuick, + CompositeTransformUtil, + MeasureImageSimilarity, +) # Resampling Programs -from .resampling import (ApplyTransforms, ApplyTransformsToPoints, - WarpImageMultiTransform, - WarpTimeSeriesImageMultiTransform) +from .resampling import ( + ApplyTransforms, + ApplyTransformsToPoints, + WarpImageMultiTransform, + WarpTimeSeriesImageMultiTransform, +) # Segmentation Programs -from .segmentation import (Atropos, LaplacianThickness, N4BiasFieldCorrection, - JointFusion, CorticalThickness, BrainExtraction, - DenoiseImage, AntsJointFusion) +from .segmentation import ( + Atropos, + LaplacianThickness, + N4BiasFieldCorrection, + JointFusion, + CorticalThickness, + BrainExtraction, + DenoiseImage, + AntsJointFusion, +) # Visualization Programs from .visualization import ConvertScalarImageToRGB, CreateTiledMosaic # Utility Programs -from .utils import (AverageAffineTransform, AverageImages, MultiplyImages, - CreateJacobianDeterminantImage, AffineInitializer, - ComposeMultiTransform, LabelGeometry) +from .utils import ( + AverageAffineTransform, + AverageImages, + MultiplyImages, + CreateJacobianDeterminantImage, + AffineInitializer, + ComposeMultiTransform, + LabelGeometry, +) diff --git a/nipype/interfaces/ants/base.py b/nipype/interfaces/ants/base.py index e27cfc0920..4b5e5ef8db 100644 --- a/nipype/interfaces/ants/base.py +++ b/nipype/interfaces/ants/base.py @@ -6,9 +6,9 @@ # Local imports from ... import logging, LooseVersion -from ..base import (CommandLine, CommandLineInputSpec, traits, isdefined, - PackageInfo) -iflogger = logging.getLogger('nipype.interface') +from ..base import CommandLine, CommandLineInputSpec, traits, isdefined, PackageInfo + +iflogger = logging.getLogger("nipype.interface") # -Using -1 gives primary responsibilty to ITKv4 to do the correct # thread limitings. @@ -22,32 +22,34 @@ # ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS takes precidence. # This behavior states that you the user explicitly specifies # num_threads, then respect that no matter what SGE tries to limit. -PREFERED_ITKv4_THREAD_LIMIT_VARIABLE = 'NSLOTS' -ALT_ITKv4_THREAD_LIMIT_VARIABLE = 'ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS' +PREFERED_ITKv4_THREAD_LIMIT_VARIABLE = "NSLOTS" +ALT_ITKv4_THREAD_LIMIT_VARIABLE = "ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS" class Info(PackageInfo): - version_cmd = os.path.join(os.getenv('ANTSPATH', ''), - 'antsRegistration') + ' --version' + version_cmd = ( + os.path.join(os.getenv("ANTSPATH", ""), "antsRegistration") + " --version" + ) @staticmethod def parse_version(raw_info): for line in raw_info.splitlines(): - if line.startswith('ANTs Version: '): + if line.startswith("ANTs Version: "): v_string = line.split()[2] break else: return None # -githash may or may not be appended - v_string = v_string.split('-')[0] + v_string = v_string.split("-")[0] # 2.2.0-equivalent version string - if 'post' in v_string and \ - LooseVersion(v_string) >= LooseVersion('2.1.0.post789'): - return '2.2.0' + if "post" in v_string and LooseVersion(v_string) >= LooseVersion( + "2.1.0.post789" + ): + return "2.2.0" else: - return '.'.join(v_string.split('.')[:3]) + return ".".join(v_string.split(".")[:3]) class ANTSCommandInputSpec(CommandLineInputSpec): @@ -58,7 +60,8 @@ class ANTSCommandInputSpec(CommandLineInputSpec): LOCAL_DEFAULT_NUMBER_OF_THREADS, usedefault=True, nohash=True, - desc="Number of ITK threads to use") + desc="Number of ITK threads to use", + ) class ANTSCommand(CommandLine): @@ -70,7 +73,7 @@ class ANTSCommand(CommandLine): def __init__(self, **inputs): super(ANTSCommand, self).__init__(**inputs) - self.inputs.on_trait_change(self._num_threads_update, 'num_threads') + self.inputs.on_trait_change(self._num_threads_update, "num_threads") if not isdefined(self.inputs.num_threads): self.inputs.num_threads = self._num_threads @@ -88,22 +91,21 @@ def _num_threads_update(self): # default behavior should be the one specified by ITKv4 rules # (i.e. respect SGE $NSLOTS or environmental variables of threads, or # user environmental settings) - if (self.inputs.num_threads == -1): - if (ALT_ITKv4_THREAD_LIMIT_VARIABLE in self.inputs.environ): + if self.inputs.num_threads == -1: + if ALT_ITKv4_THREAD_LIMIT_VARIABLE in self.inputs.environ: del self.inputs.environ[ALT_ITKv4_THREAD_LIMIT_VARIABLE] - if (PREFERED_ITKv4_THREAD_LIMIT_VARIABLE in self.inputs.environ): + if PREFERED_ITKv4_THREAD_LIMIT_VARIABLE in self.inputs.environ: del self.inputs.environ[PREFERED_ITKv4_THREAD_LIMIT_VARIABLE] else: - self.inputs.environ.update({ - PREFERED_ITKv4_THREAD_LIMIT_VARIABLE: - '%s' % self.inputs.num_threads - }) + self.inputs.environ.update( + {PREFERED_ITKv4_THREAD_LIMIT_VARIABLE: "%s" % self.inputs.num_threads} + ) @staticmethod def _format_xarray(val): """ Convenience method for converting input arrays [1,2,3] to commandline format '1x2x3' """ - return 'x'.join([str(x) for x in val]) + return "x".join([str(x) for x in val]) @classmethod def set_default_num_threads(cls, num_threads): diff --git a/nipype/interfaces/ants/registration.py b/nipype/interfaces/ants/registration.py index 2b85ea5923..6aee26655e 100644 --- a/nipype/interfaces/ants/registration.py +++ b/nipype/interfaces/ants/registration.py @@ -11,23 +11,23 @@ class ANTSInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, - 2, - argstr='%d', - position=1, - desc='image dimension (2 or 3)') + 3, 2, argstr="%d", position=1, desc="image dimension (2 or 3)" + ) fixed_image = InputMultiPath( File(exists=True), mandatory=True, - desc=('image to which the moving image is ' - 'warped')) + desc=("image to which the moving image is " "warped"), + ) moving_image = InputMultiPath( File(exists=True), - argstr='%s', + argstr="%s", mandatory=True, - desc=('image to apply transformation to ' - '(generally a coregistered' - 'functional)')) + desc=( + "image to apply transformation to " + "(generally a coregistered" + "functional)" + ), + ) # Not all metrics are appropriate for all modalities. Also, not all metrics # are efficeint or appropriate at all resolution levels, Some metrics @@ -50,75 +50,80 @@ class ANTSInputSpec(ANTSCommandInputSpec): # # Cost = Sum_i ( metricweight[i] Metric_i ( fixedimage[i], movingimage[i]) ) metric = traits.List( - traits.Enum('CC', 'MI', 'SMI', 'PR', 'SSD', 'MSQ', 'PSE'), + traits.Enum("CC", "MI", "SMI", "PR", "SSD", "MSQ", "PSE"), mandatory=True, - desc='') + desc="", + ) metric_weight = traits.List( traits.Float(), value=[1.0], usedefault=True, - requires=['metric'], + requires=["metric"], mandatory=True, - desc='the metric weight(s) for each stage. ' - 'The weights must sum to 1 per stage.') + desc="the metric weight(s) for each stage. " + "The weights must sum to 1 per stage.", + ) radius = traits.List( traits.Int(), - requires=['metric'], + requires=["metric"], mandatory=True, - desc='radius of the region (i.e. number of layers around a voxel/pixel)' - ' that is used for computing cross correlation') + desc="radius of the region (i.e. number of layers around a voxel/pixel)" + " that is used for computing cross correlation", + ) output_transform_prefix = Str( - 'out', - usedefault=True, - argstr='--output-naming %s', - mandatory=True, - desc='') + "out", usedefault=True, argstr="--output-naming %s", mandatory=True, desc="" + ) transformation_model = traits.Enum( - 'Diff', - 'Elast', - 'Exp', - 'Greedy Exp', - 'SyN', - argstr='%s', + "Diff", + "Elast", + "Exp", + "Greedy Exp", + "SyN", + argstr="%s", mandatory=True, - desc='') - gradient_step_length = traits.Float( - requires=['transformation_model'], desc='') - number_of_time_steps = traits.Float( - requires=['gradient_step_length'], desc='') - delta_time = traits.Float(requires=['number_of_time_steps'], desc='') - symmetry_type = traits.Float(requires=['delta_time'], desc='') + desc="", + ) + gradient_step_length = traits.Float(requires=["transformation_model"], desc="") + number_of_time_steps = traits.Float(requires=["gradient_step_length"], desc="") + delta_time = traits.Float(requires=["number_of_time_steps"], desc="") + symmetry_type = traits.Float(requires=["delta_time"], desc="") use_histogram_matching = traits.Bool( - argstr='%s', default_value=True, usedefault=True) + argstr="%s", default_value=True, usedefault=True + ) number_of_iterations = traits.List( - traits.Int(), argstr='--number-of-iterations %s', sep='x') + traits.Int(), argstr="--number-of-iterations %s", sep="x" + ) smoothing_sigmas = traits.List( - traits.Int(), argstr='--gaussian-smoothing-sigmas %s', sep='x') + traits.Int(), argstr="--gaussian-smoothing-sigmas %s", sep="x" + ) subsampling_factors = traits.List( - traits.Int(), argstr='--subsampling-factors %s', sep='x') - affine_gradient_descent_option = traits.List(traits.Float(), argstr='%s') + traits.Int(), argstr="--subsampling-factors %s", sep="x" + ) + affine_gradient_descent_option = traits.List(traits.Float(), argstr="%s") - mi_option = traits.List(traits.Int(), argstr='--MI-option %s', sep='x') - regularization = traits.Enum('Gauss', 'DMFFD', argstr='%s', desc='') + mi_option = traits.List(traits.Int(), argstr="--MI-option %s", sep="x") + regularization = traits.Enum("Gauss", "DMFFD", argstr="%s", desc="") regularization_gradient_field_sigma = traits.Float( - requires=['regularization'], desc='') + requires=["regularization"], desc="" + ) regularization_deformation_field_sigma = traits.Float( - requires=['regularization'], desc='') + requires=["regularization"], desc="" + ) number_of_affine_iterations = traits.List( - traits.Int(), argstr='--number-of-affine-iterations %s', sep='x') + traits.Int(), argstr="--number-of-affine-iterations %s", sep="x" + ) class ANTSOutputSpec(TraitedSpec): - affine_transform = File(exists=True, desc='Affine transform file') - warp_transform = File(exists=True, desc='Warping deformation field') - inverse_warp_transform = File( - exists=True, desc='Inverse warping deformation field') - metaheader = File(exists=True, desc='VTK metaheader .mhd file') - metaheader_raw = File(exists=True, desc='VTK metaheader .raw file') + affine_transform = File(exists=True, desc="Affine transform file") + warp_transform = File(exists=True, desc="Warping deformation field") + inverse_warp_transform = File(exists=True, desc="Inverse warping deformation field") + metaheader = File(exists=True, desc="VTK metaheader .mhd file") + metaheader_raw = File(exists=True, desc="VTK metaheader .raw file") class ANTS(ANTSCommand): @@ -151,26 +156,32 @@ class ANTS(ANTSCommand): 10000x10000x10000x10000x10000 --number-of-iterations 50x35x15 --output-naming MY --regularization Gauss[3.0,0.0] \ --transformation-model SyN[0.25] --use-Histogram-Matching 1' """ - _cmd = 'ANTS' + + _cmd = "ANTS" input_spec = ANTSInputSpec output_spec = ANTSOutputSpec def _image_metric_constructor(self): retval = [] - intensity_based = ['CC', 'MI', 'SMI', 'PR', 'SSD', 'MSQ'] - point_set_based = ['PSE', 'JTB'] + intensity_based = ["CC", "MI", "SMI", "PR", "SSD", "MSQ"] + point_set_based = ["PSE", "JTB"] for ii in range(len(self.inputs.moving_image)): if self.inputs.metric[ii] in intensity_based: retval.append( - '--image-metric %s[ %s, %s, %g, %d ]' % - (self.inputs.metric[ii], self.inputs.fixed_image[ii], - self.inputs.moving_image[ii], - self.inputs.metric_weight[ii], self.inputs.radius[ii])) + "--image-metric %s[ %s, %s, %g, %d ]" + % ( + self.inputs.metric[ii], + self.inputs.fixed_image[ii], + self.inputs.moving_image[ii], + self.inputs.metric_weight[ii], + self.inputs.radius[ii], + ) + ) elif self.inputs.metric[ii] == point_set_based: pass # retval.append('--image-metric %s[%s, %s, ...'.format(self.inputs.metric[ii], # self.inputs.fixed_image[ii], self.inputs.moving_image[ii], ...)) - return ' '.join(retval) + return " ".join(retval) def _transformation_constructor(self): model = self.inputs.transformation_model @@ -178,62 +189,67 @@ def _transformation_constructor(self): time_step = self.inputs.number_of_time_steps delta_time = self.inputs.delta_time symmetry_type = self.inputs.symmetry_type - retval = ['--transformation-model %s' % model] + retval = ["--transformation-model %s" % model] parameters = [] for elem in (step_length, time_step, delta_time, symmetry_type): if elem is not traits.Undefined: - parameters.append('%#.2g' % elem) + parameters.append("%#.2g" % elem) if len(parameters) > 0: if len(parameters) > 1: - parameters = ','.join(parameters) + parameters = ",".join(parameters) else: - parameters = ''.join(parameters) - retval.append('[%s]' % parameters) - return ''.join(retval) + parameters = "".join(parameters) + retval.append("[%s]" % parameters) + return "".join(retval) def _regularization_constructor(self): - return '--regularization {0}[{1},{2}]'.format( + return "--regularization {0}[{1},{2}]".format( self.inputs.regularization, self.inputs.regularization_gradient_field_sigma, - self.inputs.regularization_deformation_field_sigma) + self.inputs.regularization_deformation_field_sigma, + ) def _affine_gradient_descent_option_constructor(self): values = self.inputs.affine_gradient_descent_option - defaults = [0.1, 0.5, 1.e-4, 1.e-4] + defaults = [0.1, 0.5, 1.0e-4, 1.0e-4] for ii in range(len(defaults)): try: defaults[ii] = values[ii] except IndexError: break parameters = self._format_xarray( - [('%g' % defaults[index]) for index in range(4)]) - retval = ['--affine-gradient-descent-option', parameters] - return ' '.join(retval) + [("%g" % defaults[index]) for index in range(4)] + ) + retval = ["--affine-gradient-descent-option", parameters] + return " ".join(retval) def _format_arg(self, opt, spec, val): - if opt == 'moving_image': + if opt == "moving_image": return self._image_metric_constructor() - elif opt == 'transformation_model': + elif opt == "transformation_model": return self._transformation_constructor() - elif opt == 'regularization': + elif opt == "regularization": return self._regularization_constructor() - elif opt == 'affine_gradient_descent_option': + elif opt == "affine_gradient_descent_option": return self._affine_gradient_descent_option_constructor() - elif opt == 'use_histogram_matching': + elif opt == "use_histogram_matching": if self.inputs.use_histogram_matching: - return '--use-Histogram-Matching 1' + return "--use-Histogram-Matching 1" else: - return '--use-Histogram-Matching 0' + return "--use-Histogram-Matching 0" return super(ANTS, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() - outputs['affine_transform'] = os.path.abspath( - self.inputs.output_transform_prefix + 'Affine.txt') - outputs['warp_transform'] = os.path.abspath( - self.inputs.output_transform_prefix + 'Warp.nii.gz') - outputs['inverse_warp_transform'] = os.path.abspath( - self.inputs.output_transform_prefix + 'InverseWarp.nii.gz') + outputs["affine_transform"] = os.path.abspath( + self.inputs.output_transform_prefix + "Affine.txt" + ) + outputs["warp_transform"] = os.path.abspath( + self.inputs.output_transform_prefix + "Warp.nii.gz" + ) + outputs["inverse_warp_transform"] = os.path.abspath( + self.inputs.output_transform_prefix + "InverseWarp.nii.gz" + ) # outputs['metaheader'] = os.path.abspath(self.inputs.output_transform_prefix + 'velocity.mhd') # outputs['metaheader_raw'] = os.path.abspath(self.inputs.output_transform_prefix + 'velocity.raw') return outputs @@ -243,198 +259,231 @@ class RegistrationInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, - argstr='--dimensionality %d', + argstr="--dimensionality %d", usedefault=True, - desc='image dimension (2 or 3)') + desc="image dimension (2 or 3)", + ) fixed_image = InputMultiPath( File(exists=True), mandatory=True, - desc='Image to which the moving_image should be transformed' - '(usually a structural image)') + desc="Image to which the moving_image should be transformed" + "(usually a structural image)", + ) fixed_image_mask = File( exists=True, - argstr='%s', - max_ver='2.1.0', - xor=['fixed_image_masks'], - desc='Mask used to limit metric sampling region of the fixed image' - 'in all stages') + argstr="%s", + max_ver="2.1.0", + xor=["fixed_image_masks"], + desc="Mask used to limit metric sampling region of the fixed image" + "in all stages", + ) fixed_image_masks = InputMultiPath( - traits.Either('NULL', File(exists=True)), - min_ver='2.2.0', - xor=['fixed_image_mask'], - desc= - 'Masks used to limit metric sampling region of the fixed image, defined per registration stage' - '(Use "NULL" to omit a mask at a given stage)') + traits.Either("NULL", File(exists=True)), + min_ver="2.2.0", + xor=["fixed_image_mask"], + desc="Masks used to limit metric sampling region of the fixed image, defined per registration stage" + '(Use "NULL" to omit a mask at a given stage)', + ) moving_image = InputMultiPath( File(exists=True), mandatory=True, - desc= - 'Image that will be registered to the space of fixed_image. This is the' - 'image on which the transformations will be applied to') + desc="Image that will be registered to the space of fixed_image. This is the" + "image on which the transformations will be applied to", + ) moving_image_mask = File( exists=True, - requires=['fixed_image_mask'], - max_ver='2.1.0', - xor=['moving_image_masks'], - desc='mask used to limit metric sampling region of the moving image' - 'in all stages') + requires=["fixed_image_mask"], + max_ver="2.1.0", + xor=["moving_image_masks"], + desc="mask used to limit metric sampling region of the moving image" + "in all stages", + ) moving_image_masks = InputMultiPath( - traits.Either('NULL', File(exists=True)), - min_ver='2.2.0', - xor=['moving_image_mask'], - desc= - 'Masks used to limit metric sampling region of the moving image, defined per registration stage' - '(Use "NULL" to omit a mask at a given stage)') + traits.Either("NULL", File(exists=True)), + min_ver="2.2.0", + xor=["moving_image_mask"], + desc="Masks used to limit metric sampling region of the moving image, defined per registration stage" + '(Use "NULL" to omit a mask at a given stage)', + ) save_state = File( - argstr='--save-state %s', + argstr="--save-state %s", exists=False, - desc= - 'Filename for saving the internal restorable state of the registration' + desc="Filename for saving the internal restorable state of the registration", ) restore_state = File( - argstr='--restore-state %s', + argstr="--restore-state %s", exists=True, - desc= - 'Filename for restoring the internal restorable state of the registration' + desc="Filename for restoring the internal restorable state of the registration", ) initial_moving_transform = InputMultiPath( File(exists=True), - argstr='%s', - desc='A transform or a list of transforms that should be applied ' - 'before the registration begins. Note that, when a list is given, ' - 'the transformations are applied in reverse order.', - xor=['initial_moving_transform_com']) + argstr="%s", + desc="A transform or a list of transforms that should be applied " + "before the registration begins. Note that, when a list is given, " + "the transformations are applied in reverse order.", + xor=["initial_moving_transform_com"], + ) invert_initial_moving_transform = InputMultiPath( traits.Bool(), requires=["initial_moving_transform"], - desc='One boolean or a list of booleans that indicate' - 'whether the inverse(s) of the transform(s) defined' - 'in initial_moving_transform should be used.', - xor=['initial_moving_transform_com']) + desc="One boolean or a list of booleans that indicate" + "whether the inverse(s) of the transform(s) defined" + "in initial_moving_transform should be used.", + xor=["initial_moving_transform_com"], + ) initial_moving_transform_com = traits.Enum( 0, 1, 2, - argstr='%s', - xor=['initial_moving_transform'], + argstr="%s", + xor=["initial_moving_transform"], desc="Align the moving_image and fixed_image before registration using " "the geometric center of the images (=0), the image intensities (=1), " - "or the origin of the images (=2).") - metric_item_trait = traits.Enum("CC", "MeanSquares", "Demons", "GC", "MI", - "Mattes") - metric_stage_trait = traits.Either(metric_item_trait, - traits.List(metric_item_trait)) + "or the origin of the images (=2).", + ) + metric_item_trait = traits.Enum("CC", "MeanSquares", "Demons", "GC", "MI", "Mattes") + metric_stage_trait = traits.Either( + metric_item_trait, traits.List(metric_item_trait) + ) metric = traits.List( metric_stage_trait, mandatory=True, - desc='the metric(s) to use for each stage. ' - 'Note that multiple metrics per stage are not supported ' - 'in ANTS 1.9.1 and earlier.') + desc="the metric(s) to use for each stage. " + "Note that multiple metrics per stage are not supported " + "in ANTS 1.9.1 and earlier.", + ) metric_weight_item_trait = traits.Float(1.0, usedefault=True) metric_weight_stage_trait = traits.Either( - metric_weight_item_trait, traits.List(metric_weight_item_trait)) + metric_weight_item_trait, traits.List(metric_weight_item_trait) + ) metric_weight = traits.List( metric_weight_stage_trait, value=[1.0], usedefault=True, - requires=['metric'], + requires=["metric"], mandatory=True, - desc='the metric weight(s) for each stage. ' - 'The weights must sum to 1 per stage.') + desc="the metric weight(s) for each stage. " + "The weights must sum to 1 per stage.", + ) radius_bins_item_trait = traits.Int(5, usedefault=True) radius_bins_stage_trait = traits.Either( - radius_bins_item_trait, traits.List(radius_bins_item_trait)) + radius_bins_item_trait, traits.List(radius_bins_item_trait) + ) radius_or_number_of_bins = traits.List( radius_bins_stage_trait, value=[5], usedefault=True, - requires=['metric_weight'], - desc='the number of bins in each stage for the MI and Mattes metric, ' - 'the radius for other metrics') - sampling_strategy_item_trait = traits.Enum("None", "Regular", "Random", - None) + requires=["metric_weight"], + desc="the number of bins in each stage for the MI and Mattes metric, " + "the radius for other metrics", + ) + sampling_strategy_item_trait = traits.Enum("None", "Regular", "Random", None) sampling_strategy_stage_trait = traits.Either( - sampling_strategy_item_trait, - traits.List(sampling_strategy_item_trait)) + sampling_strategy_item_trait, traits.List(sampling_strategy_item_trait) + ) sampling_strategy = traits.List( trait=sampling_strategy_stage_trait, - requires=['metric_weight'], - desc='the metric sampling strategy (strategies) for each stage') + requires=["metric_weight"], + desc="the metric sampling strategy (strategies) for each stage", + ) sampling_percentage_item_trait = traits.Either( - traits.Range(low=0.0, high=1.0), None) + traits.Range(low=0.0, high=1.0), None + ) sampling_percentage_stage_trait = traits.Either( - sampling_percentage_item_trait, - traits.List(sampling_percentage_item_trait)) + sampling_percentage_item_trait, traits.List(sampling_percentage_item_trait) + ) sampling_percentage = traits.List( trait=sampling_percentage_stage_trait, - requires=['sampling_strategy'], - desc="the metric sampling percentage(s) to use for each stage") - use_estimate_learning_rate_once = traits.List(traits.Bool(), desc='') + requires=["sampling_strategy"], + desc="the metric sampling percentage(s) to use for each stage", + ) + use_estimate_learning_rate_once = traits.List(traits.Bool(), desc="") use_histogram_matching = traits.Either( traits.Bool, - traits.List(traits.Bool(argstr='%s')), + traits.List(traits.Bool(argstr="%s")), default=True, usedefault=True, - desc='Histogram match the images before registration.') + desc="Histogram match the images before registration.", + ) interpolation = traits.Enum( - 'Linear', - 'NearestNeighbor', - 'CosineWindowedSinc', - 'WelchWindowedSinc', - 'HammingWindowedSinc', - 'LanczosWindowedSinc', - 'BSpline', - 'MultiLabel', - 'Gaussian', - argstr='%s', - usedefault=True) + "Linear", + "NearestNeighbor", + "CosineWindowedSinc", + "WelchWindowedSinc", + "HammingWindowedSinc", + "LanczosWindowedSinc", + "BSpline", + "MultiLabel", + "Gaussian", + argstr="%s", + usedefault=True, + ) interpolation_parameters = traits.Either( traits.Tuple(traits.Int()), # BSpline (order) traits.Tuple( - traits.Float(), # Gaussian/MultiLabel (sigma, alpha) - traits.Float())) + traits.Float(), traits.Float() # Gaussian/MultiLabel (sigma, alpha) + ), + ) write_composite_transform = traits.Bool( - argstr='--write-composite-transform %d', + argstr="--write-composite-transform %d", default_value=False, usedefault=True, - desc='') + desc="", + ) collapse_output_transforms = traits.Bool( - argstr='--collapse-output-transforms %d', + argstr="--collapse-output-transforms %d", default_value=True, usedefault=True, # This should be true for explicit completeness - desc=('Collapse output transforms. Specifically, enabling this option ' - 'combines all adjacent linear transforms and composes all ' - 'adjacent displacement field transforms before writing the ' - 'results to disk.')) + desc=( + "Collapse output transforms. Specifically, enabling this option " + "combines all adjacent linear transforms and composes all " + "adjacent displacement field transforms before writing the " + "results to disk." + ), + ) initialize_transforms_per_stage = traits.Bool( - argstr='--initialize-transforms-per-stage %d', + argstr="--initialize-transforms-per-stage %d", default_value=False, usedefault=True, # This should be true for explicit completeness - desc= - ('Initialize linear transforms from the previous stage. By enabling this option, ' - 'the current linear stage transform is directly intialized from the previous ' - 'stages linear transform; this allows multiple linear stages to be run where ' - 'each stage directly updates the estimated linear transform from the previous ' - 'stage. (e.g. Translation -> Rigid -> Affine). ')) + desc=( + "Initialize linear transforms from the previous stage. By enabling this option, " + "the current linear stage transform is directly intialized from the previous " + "stages linear transform; this allows multiple linear stages to be run where " + "each stage directly updates the estimated linear transform from the previous " + "stage. (e.g. Translation -> Rigid -> Affine). " + ), + ) # NOTE: Even though only 0=False and 1=True are allowed, ants uses integer # values instead of booleans float = traits.Bool( - argstr='--float %d', + argstr="--float %d", default_value=False, - desc='Use float instead of double for computations.') + desc="Use float instead of double for computations.", + ) transforms = traits.List( - traits.Enum('Rigid', 'Affine', 'CompositeAffine', 'Similarity', - 'Translation', 'BSpline', 'GaussianDisplacementField', - 'TimeVaryingVelocityField', - 'TimeVaryingBSplineVelocityField', 'SyN', 'BSplineSyN', - 'Exponential', 'BSplineExponential'), - argstr='%s', - mandatory=True) + traits.Enum( + "Rigid", + "Affine", + "CompositeAffine", + "Similarity", + "Translation", + "BSpline", + "GaussianDisplacementField", + "TimeVaryingVelocityField", + "TimeVaryingBSplineVelocityField", + "SyN", + "BSplineSyN", + "Exponential", + "BSplineExponential", + ), + argstr="%s", + mandatory=True, + ) # TODO: input checking and allow defaults # All parameters must be specified for BSplineDisplacementField, TimeVaryingBSplineVelocityField, BSplineSyN, # Exponential, and BSplineExponential. EVEN DEFAULTS! @@ -445,111 +494,119 @@ class RegistrationInputSpec(ANTSCommandInputSpec): traits.Tuple( traits.Float(), # GaussianDisplacementField, SyN traits.Float(), - traits.Float()), + traits.Float(), + ), traits.Tuple( traits.Float(), # BSplineSyn, traits.Int(), # BSplineDisplacementField, traits.Int(), # TimeVaryingBSplineVelocityField - traits.Int()), + traits.Int(), + ), traits.Tuple( traits.Float(), # TimeVaryingVelocityField traits.Int(), traits.Float(), traits.Float(), traits.Float(), - traits.Float()), + traits.Float(), + ), traits.Tuple( traits.Float(), # Exponential traits.Float(), traits.Float(), - traits.Int()), + traits.Int(), + ), traits.Tuple( traits.Float(), # BSplineExponential traits.Int(), traits.Int(), traits.Int(), - traits.Int()), - )) + traits.Int(), + ), + ) + ) restrict_deformation = traits.List( traits.List(traits.Enum(0, 1)), - desc=("This option allows the user to restrict the optimization of " - "the displacement field, translation, rigid or affine transform " - "on a per-component basis. For example, if one wants to limit " - "the deformation or rotation of 3-D volume to the first two " - "dimensions, this is possible by specifying a weight vector of " - "'1x1x0' for a deformation field or '1x1x0x1x1x0' for a rigid " - "transformation. Low-dimensional restriction only works if " - "there are no preceding transformations.")) + desc=( + "This option allows the user to restrict the optimization of " + "the displacement field, translation, rigid or affine transform " + "on a per-component basis. For example, if one wants to limit " + "the deformation or rotation of 3-D volume to the first two " + "dimensions, this is possible by specifying a weight vector of " + "'1x1x0' for a deformation field or '1x1x0x1x1x0' for a rigid " + "transformation. Low-dimensional restriction only works if " + "there are no preceding transformations." + ), + ) # Convergence flags number_of_iterations = traits.List(traits.List(traits.Int())) smoothing_sigmas = traits.List(traits.List(traits.Float()), mandatory=True) sigma_units = traits.List( - traits.Enum('mm', 'vox'), - requires=['smoothing_sigmas'], - desc="units for smoothing sigmas") + traits.Enum("mm", "vox"), + requires=["smoothing_sigmas"], + desc="units for smoothing sigmas", + ) shrink_factors = traits.List(traits.List(traits.Int()), mandatory=True) convergence_threshold = traits.List( trait=traits.Float(), value=[1e-6], minlen=1, - requires=['number_of_iterations'], - usedefault=True) + requires=["number_of_iterations"], + usedefault=True, + ) convergence_window_size = traits.List( trait=traits.Int(), value=[10], minlen=1, - requires=['convergence_threshold'], - usedefault=True) + requires=["convergence_threshold"], + usedefault=True, + ) # Output flags - output_transform_prefix = Str( - "transform", usedefault=True, argstr="%s", desc="") - output_warped_image = traits.Either( - traits.Bool, File(), hash_files=False, desc="") + output_transform_prefix = Str("transform", usedefault=True, argstr="%s", desc="") + output_warped_image = traits.Either(traits.Bool, File(), hash_files=False, desc="") output_inverse_warped_image = traits.Either( - traits.Bool, - File(), - hash_files=False, - requires=['output_warped_image'], - desc="") + traits.Bool, File(), hash_files=False, requires=["output_warped_image"], desc="" + ) winsorize_upper_quantile = traits.Range( low=0.0, high=1.0, value=1.0, - argstr='%s', + argstr="%s", usedefault=True, - desc="The Upper quantile to clip image ranges") + desc="The Upper quantile to clip image ranges", + ) winsorize_lower_quantile = traits.Range( low=0.0, high=1.0, value=0.0, - argstr='%s', + argstr="%s", usedefault=True, - desc="The Lower quantile to clip image ranges") + desc="The Lower quantile to clip image ranges", + ) - verbose = traits.Bool(argstr='-v', default_value=False, usedefault=True) + verbose = traits.Bool(argstr="-v", default_value=False, usedefault=True) class RegistrationOutputSpec(TraitedSpec): forward_transforms = traits.List( - File(exists=True), - desc='List of output transforms for forward registration') + File(exists=True), desc="List of output transforms for forward registration" + ) reverse_transforms = traits.List( - File(exists=True), - desc='List of output transforms for reverse registration') + File(exists=True), desc="List of output transforms for reverse registration" + ) forward_invert_flags = traits.List( - traits.Bool(), - desc='List of flags corresponding to the forward transforms') + traits.Bool(), desc="List of flags corresponding to the forward transforms" + ) reverse_invert_flags = traits.List( - traits.Bool(), - desc='List of flags corresponding to the reverse transforms') - composite_transform = File(exists=True, desc='Composite transform file') - inverse_composite_transform = File(desc='Inverse composite transform file') + traits.Bool(), desc="List of flags corresponding to the reverse transforms" + ) + composite_transform = File(exists=True, desc="Composite transform file") + inverse_composite_transform = File(desc="Inverse composite transform file") warped_image = File(desc="Outputs warped image") inverse_warped_image = File(desc="Outputs the inverse of the warped image") save_state = File(desc="The saved registration state to be restored") - metric_value = traits.Float(desc='the final value of metric') - elapsed_time = traits.Float( - desc='the total elapsed time as reported by ANTs') + metric_value = traits.Float(desc="the final value of metric") + elapsed_time = traits.Float(desc="the total elapsed time as reported by ANTs") class Registration(ANTSCommand): @@ -917,15 +974,20 @@ class Registration(ANTSCommand): --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' """ - DEF_SAMPLING_STRATEGY = 'None' + + DEF_SAMPLING_STRATEGY = "None" """The default sampling strategy argument.""" - _cmd = 'antsRegistration' + _cmd = "antsRegistration" input_spec = RegistrationInputSpec output_spec = RegistrationOutputSpec _quantilesDone = False _linear_transform_names = [ - 'Rigid', 'Affine', 'Translation', 'CompositeAffine', 'Similarity' + "Rigid", + "Affine", + "Translation", + "CompositeAffine", + "Similarity", ] def __init__(self, **inputs): @@ -933,20 +995,21 @@ def __init__(self, **inputs): self._elapsed_time = None self._metric_value = None - def _run_interface(self, runtime, correct_return_codes=(0, )): + def _run_interface(self, runtime, correct_return_codes=(0,)): runtime = super(Registration, self)._run_interface(runtime) # Parse some profiling info output = runtime.stdout or runtime.merged if output: - lines = output.split('\n') + lines = output.split("\n") for l in lines[::-1]: # This should be the last line - if l.strip().startswith('Total elapsed time:'): - self._elapsed_time = float(l.strip().replace( - 'Total elapsed time: ', '')) - elif 'DIAGNOSTIC' in l: - self._metric_value = float(l.split(',')[2]) + if l.strip().startswith("Total elapsed time:"): + self._elapsed_time = float( + l.strip().replace("Total elapsed time: ", "") + ) + elif "DIAGNOSTIC" in l: + self._metric_value = float(l.split(",")[2]) break return runtime @@ -968,18 +1031,20 @@ def _format_metric(self, index): metric=name_input, weight=self.inputs.metric_weight[index], radius_or_bins=self.inputs.radius_or_number_of_bins[index], - optional=self.inputs.radius_or_number_of_bins[index]) + optional=self.inputs.radius_or_number_of_bins[index], + ) # The optional sampling strategy and percentage. - if isdefined(self.inputs.sampling_strategy - ) and self.inputs.sampling_strategy: + if isdefined(self.inputs.sampling_strategy) and self.inputs.sampling_strategy: sampling_strategy = self.inputs.sampling_strategy[index] if sampling_strategy: - stage_inputs['sampling_strategy'] = sampling_strategy - if isdefined(self.inputs.sampling_percentage - ) and self.inputs.sampling_percentage: + stage_inputs["sampling_strategy"] = sampling_strategy + if ( + isdefined(self.inputs.sampling_percentage) + and self.inputs.sampling_percentage + ): sampling_percentage = self.inputs.sampling_percentage[index] if sampling_percentage: - stage_inputs['sampling_percentage'] = sampling_percentage + stage_inputs["sampling_percentage"] = sampling_percentage # Make a list of metric specifications, one per -m command line # argument for the current stage. @@ -1013,16 +1078,18 @@ def _format_metric(self, index): @staticmethod def _format_metric_argument(**kwargs): - retval = '%s[ %s, %s, %g, %d' % (kwargs['metric'], - kwargs['fixed_image'], - kwargs['moving_image'], - kwargs['weight'], - kwargs['radius_or_bins']) + retval = "%s[ %s, %s, %g, %d" % ( + kwargs["metric"], + kwargs["fixed_image"], + kwargs["moving_image"], + kwargs["weight"], + kwargs["radius_or_bins"], + ) # The optional sampling strategy. - if 'sampling_strategy' in kwargs: - sampling_strategy = kwargs['sampling_strategy'] - elif 'sampling_percentage' in kwargs: + if "sampling_strategy" in kwargs: + sampling_strategy = kwargs["sampling_strategy"] + elif "sampling_percentage" in kwargs: # The sampling percentage is specified but not the # sampling strategy. Use the default strategy. sampling_strategy = Registration.DEF_SAMPLING_STRATEGY @@ -1030,44 +1097,53 @@ def _format_metric_argument(**kwargs): sampling_strategy = None # Format the optional sampling arguments. if sampling_strategy: - retval += ', %s' % sampling_strategy - if 'sampling_percentage' in kwargs: - retval += ', %g' % kwargs['sampling_percentage'] + retval += ", %s" % sampling_strategy + if "sampling_percentage" in kwargs: + retval += ", %g" % kwargs["sampling_percentage"] - retval += ' ]' + retval += " ]" return retval def _format_transform(self, index): retval = [] - retval.append('%s[ ' % self.inputs.transforms[index]) - parameters = ', '.join([ - str(element) for element in self.inputs.transform_parameters[index] - ]) - retval.append('%s' % parameters) - retval.append(' ]') + retval.append("%s[ " % self.inputs.transforms[index]) + parameters = ", ".join( + [str(element) for element in self.inputs.transform_parameters[index]] + ) + retval.append("%s" % parameters) + retval.append(" ]") return "".join(retval) def _format_registration(self): retval = [] for ii in range(len(self.inputs.transforms)): - retval.append('--transform %s' % (self._format_transform(ii))) + retval.append("--transform %s" % (self._format_transform(ii))) for metric in self._format_metric(ii): - retval.append('--metric %s' % metric) - retval.append('--convergence %s' % self._format_convergence(ii)) + retval.append("--metric %s" % metric) + retval.append("--convergence %s" % self._format_convergence(ii)) if isdefined(self.inputs.sigma_units): retval.append( - '--smoothing-sigmas %s%s' % - (self._format_xarray(self.inputs.smoothing_sigmas[ii]), - self.inputs.sigma_units[ii])) + "--smoothing-sigmas %s%s" + % ( + self._format_xarray(self.inputs.smoothing_sigmas[ii]), + self.inputs.sigma_units[ii], + ) + ) else: - retval.append('--smoothing-sigmas %s' % self._format_xarray( - self.inputs.smoothing_sigmas[ii])) - retval.append('--shrink-factors %s' % self._format_xarray( - self.inputs.shrink_factors[ii])) + retval.append( + "--smoothing-sigmas %s" + % self._format_xarray(self.inputs.smoothing_sigmas[ii]) + ) + retval.append( + "--shrink-factors %s" + % self._format_xarray(self.inputs.shrink_factors[ii]) + ) if isdefined(self.inputs.use_estimate_learning_rate_once): - retval.append('--use-estimate-learning-rate-once %d' % - self.inputs.use_estimate_learning_rate_once[ii]) + retval.append( + "--use-estimate-learning-rate-once %d" + % self.inputs.use_estimate_learning_rate_once[ii] + ) if isdefined(self.inputs.use_histogram_matching): # use_histogram_matching is either a common flag for all transforms # or a list of transform-specific flags @@ -1075,50 +1151,59 @@ def _format_registration(self): histval = self.inputs.use_histogram_matching else: histval = self.inputs.use_histogram_matching[ii] - retval.append('--use-histogram-matching %d' % histval) + retval.append("--use-histogram-matching %d" % histval) if isdefined(self.inputs.restrict_deformation): retval.append( - '--restrict-deformation %s' % self._format_xarray( - self.inputs.restrict_deformation[ii])) - if any((isdefined(self.inputs.fixed_image_masks), - isdefined(self.inputs.moving_image_masks))): + "--restrict-deformation %s" + % self._format_xarray(self.inputs.restrict_deformation[ii]) + ) + if any( + ( + isdefined(self.inputs.fixed_image_masks), + isdefined(self.inputs.moving_image_masks), + ) + ): if isdefined(self.inputs.fixed_image_masks): - fixed_masks = ensure_list( - self.inputs.fixed_image_masks) + fixed_masks = ensure_list(self.inputs.fixed_image_masks) fixed_mask = fixed_masks[ii if len(fixed_masks) > 1 else 0] else: - fixed_mask = 'NULL' + fixed_mask = "NULL" if isdefined(self.inputs.moving_image_masks): - moving_masks = ensure_list( - self.inputs.moving_image_masks) - moving_mask = moving_masks[ii - if len(moving_masks) > 1 else 0] + moving_masks = ensure_list(self.inputs.moving_image_masks) + moving_mask = moving_masks[ii if len(moving_masks) > 1 else 0] else: - moving_mask = 'NULL' - retval.append('--masks [ %s, %s ]' % (fixed_mask, moving_mask)) + moving_mask = "NULL" + retval.append("--masks [ %s, %s ]" % (fixed_mask, moving_mask)) return " ".join(retval) def _get_outputfilenames(self, inverse=False): output_filename = None if not inverse: - if isdefined(self.inputs.output_warped_image) and \ - self.inputs.output_warped_image: + if ( + isdefined(self.inputs.output_warped_image) + and self.inputs.output_warped_image + ): output_filename = self.inputs.output_warped_image if isinstance(output_filename, bool): - output_filename = '%s_Warped.nii.gz' % self.inputs.output_transform_prefix + output_filename = ( + "%s_Warped.nii.gz" % self.inputs.output_transform_prefix + ) return output_filename inv_output_filename = None - if isdefined(self.inputs.output_inverse_warped_image) and \ - self.inputs.output_inverse_warped_image: + if ( + isdefined(self.inputs.output_inverse_warped_image) + and self.inputs.output_inverse_warped_image + ): inv_output_filename = self.inputs.output_inverse_warped_image if isinstance(inv_output_filename, bool): - inv_output_filename = '%s_InverseWarped.nii.gz' % self.inputs.output_transform_prefix + inv_output_filename = ( + "%s_InverseWarped.nii.gz" % self.inputs.output_transform_prefix + ) return inv_output_filename def _format_convergence(self, ii): - convergence_iter = self._format_xarray( - self.inputs.number_of_iterations[ii]) + convergence_iter = self._format_xarray(self.inputs.number_of_iterations[ii]) if len(self.inputs.convergence_threshold) > ii: convergence_value = self.inputs.convergence_threshold[ii] else: @@ -1127,19 +1212,25 @@ def _format_convergence(self, ii): convergence_ws = self.inputs.convergence_window_size[ii] else: convergence_ws = self.inputs.convergence_window_size[0] - return '[ %s, %g, %d ]' % (convergence_iter, convergence_value, - convergence_ws) + return "[ %s, %g, %d ]" % (convergence_iter, convergence_value, convergence_ws) def _format_winsorize_image_intensities(self): - if not self.inputs.winsorize_upper_quantile > self.inputs.winsorize_lower_quantile: + if ( + not self.inputs.winsorize_upper_quantile + > self.inputs.winsorize_lower_quantile + ): raise RuntimeError( - "Upper bound MUST be more than lower bound: %g > %g" % - (self.inputs.winsorize_upper_quantile, - self.inputs.winsorize_lower_quantile)) + "Upper bound MUST be more than lower bound: %g > %g" + % ( + self.inputs.winsorize_upper_quantile, + self.inputs.winsorize_lower_quantile, + ) + ) self._quantilesDone = True - return '--winsorize-image-intensities [ %s, %s ]' % ( + return "--winsorize-image-intensities [ %s, %s ]" % ( self.inputs.winsorize_lower_quantile, - self.inputs.winsorize_upper_quantile) + self.inputs.winsorize_upper_quantile, + ) def _get_initial_transform_filenames(self): n_transforms = len(self.inputs.initial_moving_transform) @@ -1150,60 +1241,76 @@ def _get_initial_transform_filenames(self): if len(self.inputs.invert_initial_moving_transform) != n_transforms: raise Exception( 'Inputs "initial_moving_transform" and "invert_initial_moving_transform"' - 'should have the same length.') + "should have the same length." + ) invert_flags = self.inputs.invert_initial_moving_transform retval = [ - "[ %s, %d ]" % (xfm, int(flag)) for xfm, flag in zip( - self.inputs.initial_moving_transform, invert_flags) + "[ %s, %d ]" % (xfm, int(flag)) + for xfm, flag in zip(self.inputs.initial_moving_transform, invert_flags) ] - return " ".join(['--initial-moving-transform'] + retval) + return " ".join(["--initial-moving-transform"] + retval) def _format_arg(self, opt, spec, val): - if opt == 'fixed_image_mask': + if opt == "fixed_image_mask": if isdefined(self.inputs.moving_image_mask): - return '--masks [ %s, %s ]' % (self.inputs.fixed_image_mask, - self.inputs.moving_image_mask) + return "--masks [ %s, %s ]" % ( + self.inputs.fixed_image_mask, + self.inputs.moving_image_mask, + ) else: - return '--masks %s' % self.inputs.fixed_image_mask - elif opt == 'transforms': + return "--masks %s" % self.inputs.fixed_image_mask + elif opt == "transforms": return self._format_registration() - elif opt == 'initial_moving_transform': + elif opt == "initial_moving_transform": return self._get_initial_transform_filenames() - elif opt == 'initial_moving_transform_com': - do_center_of_mass_init = self.inputs.initial_moving_transform_com \ - if isdefined(self.inputs.initial_moving_transform_com) else 0 # Just do the default behavior - return '--initial-moving-transform [ %s, %s, %d ]' % ( - self.inputs.fixed_image[0], self.inputs.moving_image[0], - do_center_of_mass_init) - elif opt == 'interpolation': - if self.inputs.interpolation in ['BSpline', 'MultiLabel', 'Gaussian'] and \ - isdefined(self.inputs.interpolation_parameters): - return '--interpolation %s[ %s ]' % ( - self.inputs.interpolation, ', '.join([ - str(param) - for param in self.inputs.interpolation_parameters - ])) + elif opt == "initial_moving_transform_com": + do_center_of_mass_init = ( + self.inputs.initial_moving_transform_com + if isdefined(self.inputs.initial_moving_transform_com) + else 0 + ) # Just do the default behavior + return "--initial-moving-transform [ %s, %s, %d ]" % ( + self.inputs.fixed_image[0], + self.inputs.moving_image[0], + do_center_of_mass_init, + ) + elif opt == "interpolation": + if self.inputs.interpolation in [ + "BSpline", + "MultiLabel", + "Gaussian", + ] and isdefined(self.inputs.interpolation_parameters): + return "--interpolation %s[ %s ]" % ( + self.inputs.interpolation, + ", ".join( + [str(param) for param in self.inputs.interpolation_parameters] + ), + ) else: - return '--interpolation %s' % self.inputs.interpolation - elif opt == 'output_transform_prefix': + return "--interpolation %s" % self.inputs.interpolation + elif opt == "output_transform_prefix": out_filename = self._get_outputfilenames(inverse=False) inv_out_filename = self._get_outputfilenames(inverse=True) if out_filename and inv_out_filename: - return '--output [ %s, %s, %s ]' % ( - self.inputs.output_transform_prefix, out_filename, - inv_out_filename) + return "--output [ %s, %s, %s ]" % ( + self.inputs.output_transform_prefix, + out_filename, + inv_out_filename, + ) elif out_filename: - return '--output [ %s, %s ]' % ( - self.inputs.output_transform_prefix, out_filename) + return "--output [ %s, %s ]" % ( + self.inputs.output_transform_prefix, + out_filename, + ) else: - return '--output %s' % self.inputs.output_transform_prefix - elif opt == 'winsorize_upper_quantile' or opt == 'winsorize_lower_quantile': + return "--output %s" % self.inputs.output_transform_prefix + elif opt == "winsorize_upper_quantile" or opt == "winsorize_lower_quantile": if not self._quantilesDone: return self._format_winsorize_image_intensities() else: self._quantilesDone = False - return '' # Must return something for argstr! + return "" # Must return something for argstr! # This feature was removed from recent versions of antsRegistration due to corrupt outputs. # elif opt == 'collapse_linear_transforms_to_fixed_image_header': # return self._formatCollapseLinearTransformsToFixedImageHeader() @@ -1211,14 +1318,14 @@ def _format_arg(self, opt, spec, val): def _output_filenames(self, prefix, count, transform, inverse=False): self.low_dimensional_transform_map = { - 'Rigid': 'Rigid.mat', - 'Affine': 'Affine.mat', - 'GenericAffine': 'GenericAffine.mat', - 'CompositeAffine': 'Affine.mat', - 'Similarity': 'Similarity.mat', - 'Translation': 'Translation.mat', - 'BSpline': 'BSpline.txt', - 'Initial': 'DerivedInitialMovingTranslation.mat' + "Rigid": "Rigid.mat", + "Affine": "Affine.mat", + "GenericAffine": "GenericAffine.mat", + "CompositeAffine": "Affine.mat", + "Similarity": "Similarity.mat", + "Translation": "Translation.mat", + "BSpline": "BSpline.txt", + "Initial": "DerivedInitialMovingTranslation.mat", } if transform in list(self.low_dimensional_transform_map.keys()): suffix = self.low_dimensional_transform_map[transform] @@ -1226,126 +1333,145 @@ def _output_filenames(self, prefix, count, transform, inverse=False): else: inverse_mode = False # These are not analytically invertable if inverse: - suffix = 'InverseWarp.nii.gz' + suffix = "InverseWarp.nii.gz" else: - suffix = 'Warp.nii.gz' - return '%s%d%s' % (prefix, count, suffix), inverse_mode + suffix = "Warp.nii.gz" + return "%s%d%s" % (prefix, count, suffix), inverse_mode def _list_outputs(self): outputs = self._outputs().get() - outputs['forward_transforms'] = [] - outputs['forward_invert_flags'] = [] - outputs['reverse_transforms'] = [] - outputs['reverse_invert_flags'] = [] + outputs["forward_transforms"] = [] + outputs["forward_invert_flags"] = [] + outputs["reverse_transforms"] = [] + outputs["reverse_invert_flags"] = [] # invert_initial_moving_transform should be always defined, even if # there's no initial transform invert_initial_moving_transform = [False] * len( - self.inputs.initial_moving_transform) + self.inputs.initial_moving_transform + ) if isdefined(self.inputs.invert_initial_moving_transform): - invert_initial_moving_transform = self.inputs.invert_initial_moving_transform + invert_initial_moving_transform = ( + self.inputs.invert_initial_moving_transform + ) if self.inputs.write_composite_transform: - filename = self.inputs.output_transform_prefix + 'Composite.h5' - outputs['composite_transform'] = os.path.abspath(filename) - filename = self.inputs.output_transform_prefix + \ - 'InverseComposite.h5' - outputs['inverse_composite_transform'] = os.path.abspath(filename) + filename = self.inputs.output_transform_prefix + "Composite.h5" + outputs["composite_transform"] = os.path.abspath(filename) + filename = self.inputs.output_transform_prefix + "InverseComposite.h5" + outputs["inverse_composite_transform"] = os.path.abspath(filename) # If composite transforms are written, then individuals are not written (as of 2014-10-26 else: if not self.inputs.collapse_output_transforms: transform_count = 0 if isdefined(self.inputs.initial_moving_transform): outputs[ - 'forward_transforms'] += self.inputs.initial_moving_transform - outputs[ - 'forward_invert_flags'] += invert_initial_moving_transform - outputs['reverse_transforms'] = self.inputs.initial_moving_transform + \ - outputs['reverse_transforms'] - outputs['reverse_invert_flags'] = [ - not e for e in invert_initial_moving_transform - ] + outputs['reverse_invert_flags'] # Prepend - transform_count += len( - self.inputs.initial_moving_transform) + "forward_transforms" + ] += self.inputs.initial_moving_transform + outputs["forward_invert_flags"] += invert_initial_moving_transform + outputs["reverse_transforms"] = ( + self.inputs.initial_moving_transform + + outputs["reverse_transforms"] + ) + outputs["reverse_invert_flags"] = ( + [not e for e in invert_initial_moving_transform] + + outputs["reverse_invert_flags"] + ) # Prepend + transform_count += len(self.inputs.initial_moving_transform) elif isdefined(self.inputs.initial_moving_transform_com): forward_filename, forward_inversemode = self._output_filenames( - self.inputs.output_transform_prefix, transform_count, - 'Initial') + self.inputs.output_transform_prefix, transform_count, "Initial" + ) reverse_filename, reverse_inversemode = self._output_filenames( - self.inputs.output_transform_prefix, transform_count, - 'Initial', True) - outputs['forward_transforms'].append( - os.path.abspath(forward_filename)) - outputs['forward_invert_flags'].append(False) - outputs['reverse_transforms'].insert( - 0, os.path.abspath(reverse_filename)) - outputs['reverse_invert_flags'].insert(0, True) + self.inputs.output_transform_prefix, + transform_count, + "Initial", + True, + ) + outputs["forward_transforms"].append( + os.path.abspath(forward_filename) + ) + outputs["forward_invert_flags"].append(False) + outputs["reverse_transforms"].insert( + 0, os.path.abspath(reverse_filename) + ) + outputs["reverse_invert_flags"].insert(0, True) transform_count += 1 for count in range(len(self.inputs.transforms)): forward_filename, forward_inversemode = self._output_filenames( - self.inputs.output_transform_prefix, transform_count, - self.inputs.transforms[count]) + self.inputs.output_transform_prefix, + transform_count, + self.inputs.transforms[count], + ) reverse_filename, reverse_inversemode = self._output_filenames( - self.inputs.output_transform_prefix, transform_count, - self.inputs.transforms[count], True) - outputs['forward_transforms'].append( - os.path.abspath(forward_filename)) - outputs['forward_invert_flags'].append(forward_inversemode) - outputs['reverse_transforms'].insert( - 0, os.path.abspath(reverse_filename)) - outputs['reverse_invert_flags'].insert( - 0, reverse_inversemode) + self.inputs.output_transform_prefix, + transform_count, + self.inputs.transforms[count], + True, + ) + outputs["forward_transforms"].append( + os.path.abspath(forward_filename) + ) + outputs["forward_invert_flags"].append(forward_inversemode) + outputs["reverse_transforms"].insert( + 0, os.path.abspath(reverse_filename) + ) + outputs["reverse_invert_flags"].insert(0, reverse_inversemode) transform_count += 1 else: transform_count = 0 is_linear = [ - t in self._linear_transform_names - for t in self.inputs.transforms + t in self._linear_transform_names for t in self.inputs.transforms ] collapse_list = [] - if isdefined(self.inputs.initial_moving_transform) or \ - isdefined(self.inputs.initial_moving_transform_com): + if isdefined(self.inputs.initial_moving_transform) or isdefined( + self.inputs.initial_moving_transform_com + ): is_linear.insert(0, True) # Only files returned by collapse_output_transforms if any(is_linear): - collapse_list.append('GenericAffine') + collapse_list.append("GenericAffine") if not all(is_linear): - collapse_list.append('SyN') + collapse_list.append("SyN") for transform in collapse_list: forward_filename, forward_inversemode = self._output_filenames( self.inputs.output_transform_prefix, transform_count, transform, - inverse=False) + inverse=False, + ) reverse_filename, reverse_inversemode = self._output_filenames( self.inputs.output_transform_prefix, transform_count, transform, - inverse=True) - outputs['forward_transforms'].append( - os.path.abspath(forward_filename)) - outputs['forward_invert_flags'].append(forward_inversemode) - outputs['reverse_transforms'].append( - os.path.abspath(reverse_filename)) - outputs['reverse_invert_flags'].append(reverse_inversemode) + inverse=True, + ) + outputs["forward_transforms"].append( + os.path.abspath(forward_filename) + ) + outputs["forward_invert_flags"].append(forward_inversemode) + outputs["reverse_transforms"].append( + os.path.abspath(reverse_filename) + ) + outputs["reverse_invert_flags"].append(reverse_inversemode) transform_count += 1 out_filename = self._get_outputfilenames(inverse=False) inv_out_filename = self._get_outputfilenames(inverse=True) if out_filename: - outputs['warped_image'] = os.path.abspath(out_filename) + outputs["warped_image"] = os.path.abspath(out_filename) if inv_out_filename: - outputs['inverse_warped_image'] = os.path.abspath(inv_out_filename) + outputs["inverse_warped_image"] = os.path.abspath(inv_out_filename) if len(self.inputs.save_state): - outputs['save_state'] = os.path.abspath(self.inputs.save_state) + outputs["save_state"] = os.path.abspath(self.inputs.save_state) if self._metric_value: - outputs['metric_value'] = self._metric_value + outputs["metric_value"] = self._metric_value if self._elapsed_time: - outputs['elapsed_time'] = self._elapsed_time + outputs["elapsed_time"] = self._elapsed_time return outputs @@ -1354,20 +1480,17 @@ class MeasureImageSimilarityInputSpec(ANTSCommandInputSpec): 2, 3, 4, - argstr='--dimensionality %d', + argstr="--dimensionality %d", position=1, - desc='Dimensionality of the fixed/moving image pair', + desc="Dimensionality of the fixed/moving image pair", ) fixed_image = File( - exists=True, - mandatory=True, - desc='Image to which the moving image is warped', + exists=True, mandatory=True, desc="Image to which the moving image is warped", ) moving_image = File( exists=True, mandatory=True, - desc= - 'Image to apply transformation to (generally a coregistered functional)', + desc="Image to apply transformation to (generally a coregistered functional)", ) metric = traits.Enum( "CC", @@ -1380,41 +1503,42 @@ class MeasureImageSimilarityInputSpec(ANTSCommandInputSpec): mandatory=True, ) metric_weight = traits.Float( - requires=['metric'], + requires=["metric"], default_value=1.0, usedefault=True, desc='The "metricWeight" variable is not used.', ) radius_or_number_of_bins = traits.Int( - requires=['metric'], + requires=["metric"], mandatory=True, - desc='The number of bins in each stage for the MI and Mattes metric, ' - 'or the radius for other metrics', + desc="The number of bins in each stage for the MI and Mattes metric, " + "or the radius for other metrics", ) sampling_strategy = traits.Enum( "None", "Regular", "Random", - requires=['metric'], + requires=["metric"], usedefault=True, - desc='Manner of choosing point set over which to optimize the metric. ' - 'Defaults to "None" (i.e. a dense sampling of one sample per voxel).') + desc="Manner of choosing point set over which to optimize the metric. " + 'Defaults to "None" (i.e. a dense sampling of one sample per voxel).', + ) sampling_percentage = traits.Either( traits.Range(low=0.0, high=1.0), - requires=['metric'], + requires=["metric"], mandatory=True, - desc= - 'Percentage of points accessible to the sampling strategy over which ' - 'to optimize the metric.') + desc="Percentage of points accessible to the sampling strategy over which " + "to optimize the metric.", + ) fixed_image_mask = File( exists=True, - argstr='%s', - desc='mask used to limit metric sampling region of the fixed image', + argstr="%s", + desc="mask used to limit metric sampling region of the fixed image", ) moving_image_mask = File( exists=True, - requires=['fixed_image_mask'], - desc='mask used to limit metric sampling region of the moving image', + requires=["fixed_image_mask"], + desc="mask used to limit metric sampling region of the moving image", ) @@ -1445,14 +1569,15 @@ class MeasureImageSimilarity(ANTSCommand): 'MeasureImageSimilarity --dimensionality 3 --masks ["mask.nii","mask.nii.gz"] \ --metric MI["T1.nii","resting.nii",1.0,5,Regular,1.0]' """ - _cmd = 'MeasureImageSimilarity' + + _cmd = "MeasureImageSimilarity" input_spec = MeasureImageSimilarityInputSpec output_spec = MeasureImageSimilarityOutputSpec def _metric_constructor(self): - retval = '--metric {metric}["{fixed_image}","{moving_image}",{metric_weight},'\ - '{radius_or_number_of_bins},{sampling_strategy},{sampling_percentage}]'\ - .format( + retval = ( + '--metric {metric}["{fixed_image}","{moving_image}",{metric_weight},' + "{radius_or_number_of_bins},{sampling_strategy},{sampling_percentage}]".format( metric=self.inputs.metric, fixed_image=self.inputs.fixed_image, moving_image=self.inputs.moving_image, @@ -1461,50 +1586,74 @@ def _metric_constructor(self): sampling_strategy=self.inputs.sampling_strategy, sampling_percentage=self.inputs.sampling_percentage, ) + ) return retval def _mask_constructor(self): if self.inputs.moving_image_mask: - retval = '--masks ["{fixed_image_mask}","{moving_image_mask}"]'\ - .format( - fixed_image_mask=self.inputs.fixed_image_mask, - moving_image_mask=self.inputs.moving_image_mask, - ) + retval = '--masks ["{fixed_image_mask}","{moving_image_mask}"]'.format( + fixed_image_mask=self.inputs.fixed_image_mask, + moving_image_mask=self.inputs.moving_image_mask, + ) else: - retval = '--masks "{fixed_image_mask}"'\ - .format( - fixed_image_mask=self.inputs.fixed_image_mask, - ) + retval = '--masks "{fixed_image_mask}"'.format( + fixed_image_mask=self.inputs.fixed_image_mask, + ) return retval def _format_arg(self, opt, spec, val): - if opt == 'metric': + if opt == "metric": return self._metric_constructor() - elif opt == 'fixed_image_mask': + elif opt == "fixed_image_mask": return self._mask_constructor() return super(MeasureImageSimilarity, self)._format_arg(opt, spec, val) def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() - stdout = runtime.stdout.split('\n') + stdout = runtime.stdout.split("\n") outputs.similarity = float(stdout[0]) return outputs class RegistrationSynQuickInputSpec(ANTSCommandInputSpec): - dimension = traits.Enum(3, 2, argstr='-d %d', - usedefault=True, desc='image dimension (2 or 3)') - fixed_image = InputMultiPath(File(exists=True), mandatory=True, argstr='-f %s...', - desc='Fixed image or source image or reference image') - moving_image = InputMultiPath(File(exists=True), mandatory=True, argstr='-m %s...', - desc='Moving image or target image') - output_prefix = Str("transform", usedefault=True, argstr='-o %s', - desc="A prefix that is prepended to all output files") - num_threads = traits.Int(default_value=LOCAL_DEFAULT_NUMBER_OF_THREADS, usedefault=True, - desc='Number of threads (default = 1)', argstr='-n %d') - - transform_type = traits.Enum('s', 't', 'r', 'a', 'sr', 'b', 'br', argstr='-t %s', - desc=""" + dimension = traits.Enum( + 3, 2, argstr="-d %d", usedefault=True, desc="image dimension (2 or 3)" + ) + fixed_image = InputMultiPath( + File(exists=True), + mandatory=True, + argstr="-f %s...", + desc="Fixed image or source image or reference image", + ) + moving_image = InputMultiPath( + File(exists=True), + mandatory=True, + argstr="-m %s...", + desc="Moving image or target image", + ) + output_prefix = Str( + "transform", + usedefault=True, + argstr="-o %s", + desc="A prefix that is prepended to all output files", + ) + num_threads = traits.Int( + default_value=LOCAL_DEFAULT_NUMBER_OF_THREADS, + usedefault=True, + desc="Number of threads (default = 1)", + argstr="-n %d", + ) + + transform_type = traits.Enum( + "s", + "t", + "r", + "a", + "sr", + "b", + "br", + argstr="-t %s", + desc=""" transform type t: translation r: rigid @@ -1513,26 +1662,41 @@ class RegistrationSynQuickInputSpec(ANTSCommandInputSpec): sr: rigid + deformable syn b: rigid + affine + deformable b-spline syn br: rigid + deformable b-spline syn""", - usedefault=True) + usedefault=True, + ) - use_histogram_matching = traits.Bool(False, argstr='-j %d', - desc='use histogram matching') - histogram_bins = traits.Int(default_value=32, usedefault=True, argstr='-r %d', - desc='histogram bins for mutual information in SyN stage \ - (default = 32)') - spline_distance = traits.Int(default_value=26, usedefault=True, argstr='-s %d', - desc='spline distance for deformable B-spline SyN transform \ - (default = 26)') - precision_type = traits.Enum('double', 'float', argstr='-p %s', - desc='precision type (default = double)', usedefault=True) + use_histogram_matching = traits.Bool( + False, argstr="-j %d", desc="use histogram matching" + ) + histogram_bins = traits.Int( + default_value=32, + usedefault=True, + argstr="-r %d", + desc="histogram bins for mutual information in SyN stage \ + (default = 32)", + ) + spline_distance = traits.Int( + default_value=26, + usedefault=True, + argstr="-s %d", + desc="spline distance for deformable B-spline SyN transform \ + (default = 26)", + ) + precision_type = traits.Enum( + "double", + "float", + argstr="-p %s", + desc="precision type (default = double)", + usedefault=True, + ) class RegistrationSynQuickOutputSpec(TraitedSpec): warped_image = File(exists=True, desc="Warped image") inverse_warped_image = File(exists=True, desc="Inverse warped image") - out_matrix = File(exists=True, desc='Affine matrix') - forward_warp_field = File(exists=True, desc='Forward warp field') - inverse_warp_field = File(exists=True, desc='Inverse warp field') + out_matrix = File(exists=True, desc="Affine matrix") + forward_warp_field = File(exists=True, desc="Forward warp field") + inverse_warp_field = File(exists=True, desc="Inverse warp field") class RegistrationSynQuick(ANTSCommand): @@ -1566,7 +1730,7 @@ class RegistrationSynQuick(ANTSCommand): >>> reg.run() # doctest: +SKIP """ - _cmd = 'antsRegistrationSyNQuick.sh' + _cmd = "antsRegistrationSyNQuick.sh" input_spec = RegistrationSynQuickInputSpec output_spec = RegistrationSynQuickOutputSpec @@ -1578,39 +1742,60 @@ def _num_threads_update(self): pass def _format_arg(self, name, spec, value): - if name == 'precision_type': + if name == "precision_type": return spec.argstr % value[0] return super(RegistrationSynQuick, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() out_base = os.path.abspath(self.inputs.output_prefix) - outputs['warped_image'] = out_base + 'Warped.nii.gz' - outputs['inverse_warped_image'] = out_base + 'InverseWarped.nii.gz' - outputs['out_matrix'] = out_base + '0GenericAffine.mat' + outputs["warped_image"] = out_base + "Warped.nii.gz" + outputs["inverse_warped_image"] = out_base + "InverseWarped.nii.gz" + outputs["out_matrix"] = out_base + "0GenericAffine.mat" - if self.inputs.transform_type not in ('t', 'r', 'a'): - outputs['forward_warp_field'] = out_base + '1Warp.nii.gz' - outputs['inverse_warp_field'] = out_base + '1InverseWarp.nii.gz' + if self.inputs.transform_type not in ("t", "r", "a"): + outputs["forward_warp_field"] = out_base + "1Warp.nii.gz" + outputs["inverse_warp_field"] = out_base + "1InverseWarp.nii.gz" return outputs + class CompositeTransformUtilInputSpec(ANTSCommandInputSpec): - process = traits.Enum('assemble', 'disassemble', argstr='--%s', - position=1, usedefault=True, - desc='What to do with the transform inputs (assemble or disassemble)', - ) - out_file = File(exists=False, argstr='%s', position=2, - desc='Output file path (only used for disassembly).') - in_file = InputMultiPath(File(exists=True), mandatory=True, argstr='%s...', - position=3, desc='Input transform file(s)') - output_prefix = Str("transform", usedefault=True, argstr='%s', position=4, - desc="A prefix that is prepended to all output files (only used for assembly).") + process = traits.Enum( + "assemble", + "disassemble", + argstr="--%s", + position=1, + usedefault=True, + desc="What to do with the transform inputs (assemble or disassemble)", + ) + out_file = File( + exists=False, + argstr="%s", + position=2, + desc="Output file path (only used for disassembly).", + ) + in_file = InputMultiPath( + File(exists=True), + mandatory=True, + argstr="%s...", + position=3, + desc="Input transform file(s)", + ) + output_prefix = Str( + "transform", + usedefault=True, + argstr="%s", + position=4, + desc="A prefix that is prepended to all output files (only used for assembly).", + ) + class CompositeTransformUtilOutputSpec(TraitedSpec): affine_transform = File(desc="Affine transform component") displacement_field = File(desc="Displacement field component") out_file = File(desc="Compound transformation file") + class CompositeTransformUtil(ANTSCommand): """ ANTs utility which can combine or break apart transform files into their individual @@ -1639,7 +1824,7 @@ class CompositeTransformUtil(ANTSCommand): >>> tran.run() # doctest: +SKIP """ - _cmd = 'CompositeTransformUtil' + _cmd = "CompositeTransformUtil" input_spec = CompositeTransformUtilInputSpec output_spec = CompositeTransformUtilOutputSpec @@ -1651,19 +1836,23 @@ def _num_threads_update(self): pass def _format_arg(self, name, spec, value): - if name == 'output_prefix' and self.inputs.process == 'assemble': - return '' - if name == 'out_file' and self.inputs.process == 'disassemble': - return '' + if name == "output_prefix" and self.inputs.process == "assemble": + return "" + if name == "out_file" and self.inputs.process == "disassemble": + return "" return super(CompositeTransformUtil, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() - if self.inputs.process == 'disassemble': - outputs['affine_transform'] = os.path.abspath( - '00_{}_AffineTransform.mat'.format(self.inputs.output_prefix)) - outputs['displacement_field'] = os.path.abspath( - '01_{}_DisplacementFieldTransform.nii.gz'.format(self.inputs.output_prefix)) - if self.inputs.process == 'assemble': - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + if self.inputs.process == "disassemble": + outputs["affine_transform"] = os.path.abspath( + "00_{}_AffineTransform.mat".format(self.inputs.output_prefix) + ) + outputs["displacement_field"] = os.path.abspath( + "01_{}_DisplacementFieldTransform.nii.gz".format( + self.inputs.output_prefix + ) + ) + if self.inputs.process == "assemble": + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs diff --git a/nipype/interfaces/ants/resampling.py b/nipype/interfaces/ants/resampling.py index 94bccbdf7d..a5d6a52c04 100644 --- a/nipype/interfaces/ants/resampling.py +++ b/nipype/interfaces/ants/resampling.py @@ -10,62 +10,70 @@ class WarpTimeSeriesImageMultiTransformInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 4, - 3, - argstr='%d', - usedefault=True, - desc='image dimension (3 or 4)', - position=1) + 4, 3, argstr="%d", usedefault=True, desc="image dimension (3 or 4)", position=1 + ) input_image = File( - argstr='%s', + argstr="%s", mandatory=True, copyfile=True, - desc=('image to apply transformation to (generally a ' - 'coregistered functional)')) + desc=( + "image to apply transformation to (generally a " "coregistered functional)" + ), + ) out_postfix = traits.Str( - '_wtsimt', - argstr='%s', + "_wtsimt", + argstr="%s", usedefault=True, - desc=('Postfix that is prepended to all output ' - 'files (default = _wtsimt)')) + desc=("Postfix that is prepended to all output " "files (default = _wtsimt)"), + ) reference_image = File( - argstr='-R %s', - xor=['tightest_box'], - desc='reference image space that you wish to warp INTO') + argstr="-R %s", + xor=["tightest_box"], + desc="reference image space that you wish to warp INTO", + ) tightest_box = traits.Bool( - argstr='--tightest-bounding-box', - desc=('computes tightest bounding box (overrided by ' - 'reference_image if given)'), - xor=['reference_image']) + argstr="--tightest-bounding-box", + desc=( + "computes tightest bounding box (overrided by " "reference_image if given)" + ), + xor=["reference_image"], + ) reslice_by_header = traits.Bool( - argstr='--reslice-by-header', - desc=('Uses orientation matrix and origin encoded in ' - 'reference image file header. Not typically used ' - 'with additional transforms')) + argstr="--reslice-by-header", + desc=( + "Uses orientation matrix and origin encoded in " + "reference image file header. Not typically used " + "with additional transforms" + ), + ) use_nearest = traits.Bool( - argstr='--use-NN', desc='Use nearest neighbor interpolation') + argstr="--use-NN", desc="Use nearest neighbor interpolation" + ) use_bspline = traits.Bool( - argstr='--use-Bspline', desc='Use 3rd order B-Spline interpolation') + argstr="--use-Bspline", desc="Use 3rd order B-Spline interpolation" + ) transformation_series = InputMultiPath( File(exists=True), - argstr='%s', - desc='transformation file(s) to be applied', + argstr="%s", + desc="transformation file(s) to be applied", mandatory=True, - copyfile=False) + copyfile=False, + ) invert_affine = traits.List( traits.Int, desc=( - 'List of Affine transformations to invert.' - 'E.g.: [1,4,5] inverts the 1st, 4th, and 5th Affines ' - 'found in transformation_series. Note that indexing ' - 'starts with 1 and does not include warp fields. Affine ' - 'transformations are distinguished ' + "List of Affine transformations to invert." + "E.g.: [1,4,5] inverts the 1st, 4th, and 5th Affines " + "found in transformation_series. Note that indexing " + "starts with 1 and does not include warp fields. Affine " + "transformations are distinguished " 'from warp fields by the word "affine" included in their filenames.' - )) + ), + ) class WarpTimeSeriesImageMultiTransformOutputSpec(TraitedSpec): - output_image = File(exists=True, desc='Warped image') + output_image = File(exists=True, desc="Warped image") class WarpTimeSeriesImageMultiTransform(ANTSCommand): @@ -93,25 +101,23 @@ class WarpTimeSeriesImageMultiTransform(ANTSCommand): -i ants_Affine.txt' """ - _cmd = 'WarpTimeSeriesImageMultiTransform' + _cmd = "WarpTimeSeriesImageMultiTransform" input_spec = WarpTimeSeriesImageMultiTransformInputSpec output_spec = WarpTimeSeriesImageMultiTransformOutputSpec def _format_arg(self, opt, spec, val): - if opt == 'out_postfix': - _, name, ext = split_filename( - os.path.abspath(self.inputs.input_image)) + if opt == "out_postfix": + _, name, ext = split_filename(os.path.abspath(self.inputs.input_image)) return name + val + ext - if opt == 'transformation_series': + if opt == "transformation_series": series = [] affine_counter = 0 affine_invert = [] for transformation in val: - if 'Affine' in transformation and \ - isdefined(self.inputs.invert_affine): + if "Affine" in transformation and isdefined(self.inputs.invert_affine): affine_counter += 1 if affine_counter in self.inputs.invert_affine: - series += ['-i'] + series += ["-i"] affine_invert.append(affine_counter) series += [transformation] @@ -120,23 +126,26 @@ def _format_arg(self, opt, spec, val): if diff_inv: raise Exceptions( "Review invert_affine, not all indexes from invert_affine were used, " - "check the description for the full definition") + "check the description for the full definition" + ) - return ' '.join(series) + return " ".join(series) return super(WarpTimeSeriesImageMultiTransform, self)._format_arg( - opt, spec, val) + opt, spec, val + ) def _list_outputs(self): outputs = self._outputs().get() _, name, ext = split_filename(os.path.abspath(self.inputs.input_image)) - outputs['output_image'] = os.path.join(os.getcwd(), ''.join( - (name, self.inputs.out_postfix, ext))) + outputs["output_image"] = os.path.join( + os.getcwd(), "".join((name, self.inputs.out_postfix, ext)) + ) return outputs def _run_interface(self, runtime, correct_return_codes=[0]): - runtime = super(WarpTimeSeriesImageMultiTransform, - self)._run_interface( - runtime, correct_return_codes=[0, 1]) + runtime = super(WarpTimeSeriesImageMultiTransform, self)._run_interface( + runtime, correct_return_codes=[0, 1] + ) if "100 % complete" not in runtime.stdout: self.raise_exception(runtime) return runtime @@ -144,70 +153,79 @@ def _run_interface(self, runtime, correct_return_codes=[0]): class WarpImageMultiTransformInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, - 2, - argstr='%d', - usedefault=True, - desc='image dimension (2 or 3)', - position=1) + 3, 2, argstr="%d", usedefault=True, desc="image dimension (2 or 3)", position=1 + ) input_image = File( - argstr='%s', + argstr="%s", mandatory=True, - desc=('image to apply transformation to (generally a ' - 'coregistered functional)'), - position=2) + desc=( + "image to apply transformation to (generally a " "coregistered functional)" + ), + position=2, + ) output_image = File( genfile=True, hash_files=False, - argstr='%s', - desc='name of the output warped image', + argstr="%s", + desc="name of the output warped image", position=3, - xor=['out_postfix']) + xor=["out_postfix"], + ) out_postfix = File( "_wimt", usedefault=True, hash_files=False, - desc=('Postfix that is prepended to all output ' - 'files (default = _wimt)'), - xor=['output_image']) + desc=("Postfix that is prepended to all output " "files (default = _wimt)"), + xor=["output_image"], + ) reference_image = File( - argstr='-R %s', - xor=['tightest_box'], - desc='reference image space that you wish to warp INTO') + argstr="-R %s", + xor=["tightest_box"], + desc="reference image space that you wish to warp INTO", + ) tightest_box = traits.Bool( - argstr='--tightest-bounding-box', - desc=('computes tightest bounding box (overrided by ' - 'reference_image if given)'), - xor=['reference_image']) + argstr="--tightest-bounding-box", + desc=( + "computes tightest bounding box (overrided by " "reference_image if given)" + ), + xor=["reference_image"], + ) reslice_by_header = traits.Bool( - argstr='--reslice-by-header', - desc=('Uses orientation matrix and origin encoded in ' - 'reference image file header. Not typically used ' - 'with additional transforms')) + argstr="--reslice-by-header", + desc=( + "Uses orientation matrix and origin encoded in " + "reference image file header. Not typically used " + "with additional transforms" + ), + ) use_nearest = traits.Bool( - argstr='--use-NN', desc='Use nearest neighbor interpolation') + argstr="--use-NN", desc="Use nearest neighbor interpolation" + ) use_bspline = traits.Bool( - argstr='--use-BSpline', desc='Use 3rd order B-Spline interpolation') + argstr="--use-BSpline", desc="Use 3rd order B-Spline interpolation" + ) transformation_series = InputMultiPath( File(exists=True), - argstr='%s', - desc='transformation file(s) to be applied', + argstr="%s", + desc="transformation file(s) to be applied", mandatory=True, - position=-1) + position=-1, + ) invert_affine = traits.List( traits.Int, desc=( - 'List of Affine transformations to invert.' - 'E.g.: [1,4,5] inverts the 1st, 4th, and 5th Affines ' - 'found in transformation_series. Note that indexing ' - 'starts with 1 and does not include warp fields. Affine ' - 'transformations are distinguished ' + "List of Affine transformations to invert." + "E.g.: [1,4,5] inverts the 1st, 4th, and 5th Affines " + "found in transformation_series. Note that indexing " + "starts with 1 and does not include warp fields. Affine " + "transformations are distinguished " 'from warp fields by the word "affine" included in their filenames.' - )) + ), + ) class WarpImageMultiTransformOutputSpec(TraitedSpec): - output_image = File(exists=True, desc='Warped image') + output_image = File(exists=True, desc="Warped image") class WarpImageMultiTransform(ANTSCommand): @@ -237,28 +255,28 @@ class WarpImageMultiTransform(ANTSCommand): """ - _cmd = 'WarpImageMultiTransform' + _cmd = "WarpImageMultiTransform" input_spec = WarpImageMultiTransformInputSpec output_spec = WarpImageMultiTransformOutputSpec def _gen_filename(self, name): - if name == 'output_image': - _, name, ext = split_filename( - os.path.abspath(self.inputs.input_image)) - return ''.join((name, self.inputs.out_postfix, ext)) + if name == "output_image": + _, name, ext = split_filename(os.path.abspath(self.inputs.input_image)) + return "".join((name, self.inputs.out_postfix, ext)) return None def _format_arg(self, opt, spec, val): - if opt == 'transformation_series': + if opt == "transformation_series": series = [] affine_counter = 0 affine_invert = [] for transformation in val: - if "affine" in transformation.lower() and \ - isdefined(self.inputs.invert_affine): + if "affine" in transformation.lower() and isdefined( + self.inputs.invert_affine + ): affine_counter += 1 if affine_counter in self.inputs.invert_affine: - series += ['-i'] + series += ["-i"] affine_invert.append(affine_counter) series += [transformation] @@ -267,19 +285,21 @@ def _format_arg(self, opt, spec, val): if diff_inv: raise Exceptions( "Review invert_affine, not all indexes from invert_affine were used, " - "check the description for the full definition") + "check the description for the full definition" + ) - return ' '.join(series) + return " ".join(series) return super(WarpImageMultiTransform, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.output_image): - outputs['output_image'] = os.path.abspath(self.inputs.output_image) + outputs["output_image"] = os.path.abspath(self.inputs.output_image) else: - outputs['output_image'] = os.path.abspath( - self._gen_filename('output_image')) + outputs["output_image"] = os.path.abspath( + self._gen_filename("output_image") + ) return outputs @@ -288,81 +308,92 @@ class ApplyTransformsInputSpec(ANTSCommandInputSpec): 2, 3, 4, - argstr='--dimensionality %d', - desc=('This option forces the image to be treated ' - 'as a specified-dimensional image. If not ' - 'specified, antsWarp tries to infer the ' - 'dimensionality from the input image.')) + argstr="--dimensionality %d", + desc=( + "This option forces the image to be treated " + "as a specified-dimensional image. If not " + "specified, antsWarp tries to infer the " + "dimensionality from the input image." + ), + ) input_image_type = traits.Enum( 0, 1, 2, 3, - argstr='--input-image-type %d', - desc=('Option specifying the input image ' - 'type of scalar (default), vector, ' - 'tensor, or time series.')) + argstr="--input-image-type %d", + desc=( + "Option specifying the input image " + "type of scalar (default), vector, " + "tensor, or time series." + ), + ) input_image = File( - argstr='--input %s', + argstr="--input %s", mandatory=True, - desc=('image to apply transformation to (generally a ' - 'coregistered functional)'), - exists=True) + desc=( + "image to apply transformation to (generally a " "coregistered functional)" + ), + exists=True, + ) output_image = traits.Str( - argstr='--output %s', - desc='output file name', - genfile=True, - hash_files=False) + argstr="--output %s", desc="output file name", genfile=True, hash_files=False + ) out_postfix = traits.Str( "_trans", usedefault=True, - desc=('Postfix that is appended to all output ' - 'files (default = _trans)')) + desc=("Postfix that is appended to all output " "files (default = _trans)"), + ) reference_image = File( - argstr='--reference-image %s', + argstr="--reference-image %s", mandatory=True, - desc='reference image space that you wish to warp INTO', - exists=True) + desc="reference image space that you wish to warp INTO", + exists=True, + ) interpolation = traits.Enum( - 'Linear', - 'NearestNeighbor', - 'CosineWindowedSinc', - 'WelchWindowedSinc', - 'HammingWindowedSinc', - 'LanczosWindowedSinc', - 'MultiLabel', - 'Gaussian', - 'BSpline', - argstr='%s', - usedefault=True) + "Linear", + "NearestNeighbor", + "CosineWindowedSinc", + "WelchWindowedSinc", + "HammingWindowedSinc", + "LanczosWindowedSinc", + "MultiLabel", + "Gaussian", + "BSpline", + argstr="%s", + usedefault=True, + ) interpolation_parameters = traits.Either( traits.Tuple(traits.Int()), # BSpline (order) traits.Tuple( - traits.Float(), # Gaussian/MultiLabel (sigma, alpha) - traits.Float())) + traits.Float(), traits.Float() # Gaussian/MultiLabel (sigma, alpha) + ), + ) transforms = traits.Either( InputMultiPath(File(exists=True)), - 'identity', - argstr='%s', + "identity", + argstr="%s", mandatory=True, - desc='transform files: will be applied in reverse order. For ' - 'example, the last specified transform will be applied first.') + desc="transform files: will be applied in reverse order. For " + "example, the last specified transform will be applied first.", + ) invert_transform_flags = InputMultiPath(traits.Bool()) - default_value = traits.Float( - 0.0, argstr='--default-value %g', usedefault=True) + default_value = traits.Float(0.0, argstr="--default-value %g", usedefault=True) print_out_composite_warp_file = traits.Bool( False, requires=["output_image"], - desc='output a composite warp file instead of a transformed image') + desc="output a composite warp file instead of a transformed image", + ) float = traits.Bool( - argstr='--float %d', + argstr="--float %d", default_value=False, usedefault=True, - desc='Use float instead of double for computations.') + desc="Use float instead of double for computations.", + ) class ApplyTransformsOutputSpec(TraitedSpec): - output_image = File(exists=True, desc='Warped image') + output_image = File(exists=True, desc="Warped image") class ApplyTransforms(ANTSCommand): @@ -411,12 +442,13 @@ class ApplyTransforms(ANTSCommand): --interpolation BSpline[ 5 ] --output deformed_moving1.nii --reference-image fixed1.nii \ --transform [ ants_Warp.nii.gz, 0 ] --transform [ trans.mat, 0 ]' """ - _cmd = 'antsApplyTransforms' + + _cmd = "antsApplyTransforms" input_spec = ApplyTransformsInputSpec output_spec = ApplyTransformsOutputSpec def _gen_filename(self, name): - if name == 'output_image': + if name == "output_image": output = self.inputs.output_image if not isdefined(output): _, name, ext = split_filename(self.inputs.input_image) @@ -429,15 +461,20 @@ def _get_transform_filenames(self): for ii in range(len(self.inputs.transforms)): if isdefined(self.inputs.invert_transform_flags): if len(self.inputs.transforms) == len( - self.inputs.invert_transform_flags): - invert_code = 1 if self.inputs.invert_transform_flags[ - ii] else 0 - retval.append("--transform [ %s, %d ]" % - (self.inputs.transforms[ii], invert_code)) + self.inputs.invert_transform_flags + ): + invert_code = 1 if self.inputs.invert_transform_flags[ii] else 0 + retval.append( + "--transform [ %s, %d ]" + % (self.inputs.transforms[ii], invert_code) + ) else: - raise Exception(( - "ERROR: The useInverse list must have the same number " - "of entries as the transformsFileName list.")) + raise Exception( + ( + "ERROR: The useInverse list must have the same number " + "of entries as the transformsFileName list." + ) + ) else: retval.append("--transform %s" % self.inputs.transforms[ii]) return " ".join(retval) @@ -446,7 +483,8 @@ def _get_output_warped_filename(self): if isdefined(self.inputs.print_out_composite_warp_file): return "--output [ %s, %d ]" % ( self._gen_filename("output_image"), - int(self.inputs.print_out_composite_warp_file)) + int(self.inputs.print_out_composite_warp_file), + ) else: return "--output %s" % (self._gen_filename("output_image")) @@ -454,25 +492,28 @@ def _format_arg(self, opt, spec, val): if opt == "output_image": return self._get_output_warped_filename() elif opt == "transforms": - if val == 'identity': - return '-t identity' + if val == "identity": + return "-t identity" return self._get_transform_filenames() - elif opt == 'interpolation': - if self.inputs.interpolation in ['BSpline', 'MultiLabel', 'Gaussian'] and \ - isdefined(self.inputs.interpolation_parameters): - return '--interpolation %s[ %s ]' % ( - self.inputs.interpolation, ', '.join([ - str(param) - for param in self.inputs.interpolation_parameters - ])) + elif opt == "interpolation": + if self.inputs.interpolation in [ + "BSpline", + "MultiLabel", + "Gaussian", + ] and isdefined(self.inputs.interpolation_parameters): + return "--interpolation %s[ %s ]" % ( + self.inputs.interpolation, + ", ".join( + [str(param) for param in self.inputs.interpolation_parameters] + ), + ) else: - return '--interpolation %s' % self.inputs.interpolation + return "--interpolation %s" % self.inputs.interpolation return super(ApplyTransforms, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() - outputs['output_image'] = os.path.abspath( - self._gen_filename('output_image')) + outputs["output_image"] = os.path.abspath(self._gen_filename("output_image")) return outputs @@ -481,44 +522,50 @@ class ApplyTransformsToPointsInputSpec(ANTSCommandInputSpec): 2, 3, 4, - argstr='--dimensionality %d', - desc=('This option forces the image to be treated ' - 'as a specified-dimensional image. If not ' - 'specified, antsWarp tries to infer the ' - 'dimensionality from the input image.')) + argstr="--dimensionality %d", + desc=( + "This option forces the image to be treated " + "as a specified-dimensional image. If not " + "specified, antsWarp tries to infer the " + "dimensionality from the input image." + ), + ) input_file = File( - argstr='--input %s', + argstr="--input %s", mandatory=True, - desc= - ("Currently, the only input supported is a csv file with" - " columns including x,y (2D), x,y,z (3D) or x,y,z,t,label (4D) column headers." - " The points should be defined in physical space." - " If in doubt how to convert coordinates from your files to the space" - " required by antsApplyTransformsToPoints try creating/drawing a simple" - " label volume with only one voxel set to 1 and all others set to 0." - " Write down the voxel coordinates. Then use ImageMaths LabelStats to find" - " out what coordinates for this voxel antsApplyTransformsToPoints is" - " expecting."), - exists=True) + desc=( + "Currently, the only input supported is a csv file with" + " columns including x,y (2D), x,y,z (3D) or x,y,z,t,label (4D) column headers." + " The points should be defined in physical space." + " If in doubt how to convert coordinates from your files to the space" + " required by antsApplyTransformsToPoints try creating/drawing a simple" + " label volume with only one voxel set to 1 and all others set to 0." + " Write down the voxel coordinates. Then use ImageMaths LabelStats to find" + " out what coordinates for this voxel antsApplyTransformsToPoints is" + " expecting." + ), + exists=True, + ) output_file = traits.Str( - argstr='--output %s', - desc='Name of the output CSV file', - name_source=['input_file'], + argstr="--output %s", + desc="Name of the output CSV file", + name_source=["input_file"], hash_files=False, - name_template='%s_transformed.csv') + name_template="%s_transformed.csv", + ) transforms = traits.List( File(exists=True), - argstr='%s', + argstr="%s", mandatory=True, - desc='transforms that will be applied to the points') + desc="transforms that will be applied to the points", + ) invert_transform_flags = traits.List( - traits.Bool(), - desc='list indicating if a transform should be reversed') + traits.Bool(), desc="list indicating if a transform should be reversed" + ) class ApplyTransformsToPointsOutputSpec(TraitedSpec): - output_file = File( - exists=True, desc='csv file with transformed coordinates') + output_file = File(exists=True, desc="csv file with transformed coordinates") class ApplyTransformsToPoints(ANTSCommand): @@ -540,7 +587,8 @@ class ApplyTransformsToPoints(ANTSCommand): """ - _cmd = 'antsApplyTransformsToPoints' + + _cmd = "antsApplyTransformsToPoints" input_spec = ApplyTransformsToPointsInputSpec output_spec = ApplyTransformsToPointsOutputSpec @@ -549,15 +597,20 @@ def _get_transform_filenames(self): for ii in range(len(self.inputs.transforms)): if isdefined(self.inputs.invert_transform_flags): if len(self.inputs.transforms) == len( - self.inputs.invert_transform_flags): - invert_code = 1 if self.inputs.invert_transform_flags[ - ii] else 0 - retval.append("--transform [ %s, %d ]" % - (self.inputs.transforms[ii], invert_code)) + self.inputs.invert_transform_flags + ): + invert_code = 1 if self.inputs.invert_transform_flags[ii] else 0 + retval.append( + "--transform [ %s, %d ]" + % (self.inputs.transforms[ii], invert_code) + ) else: - raise Exception(( - "ERROR: The useInverse list must have the same number " - "of entries as the transformsFileName list.")) + raise Exception( + ( + "ERROR: The useInverse list must have the same number " + "of entries as the transformsFileName list." + ) + ) else: retval.append("--transform %s" % self.inputs.transforms[ii]) return " ".join(retval) diff --git a/nipype/interfaces/ants/segmentation.py b/nipype/interfaces/ants/segmentation.py index e9966bf612..e3fe579844 100644 --- a/nipype/interfaces/ants/segmentation.py +++ b/nipype/interfaces/ants/segmentation.py @@ -13,46 +13,50 @@ class AtroposInputSpec(ANTSCommandInputSpec): 3, 2, 4, - argstr='--image-dimensionality %d', + argstr="--image-dimensionality %d", usedefault=True, - desc='image dimension (2, 3, or 4)') + desc="image dimension (2, 3, or 4)", + ) intensity_images = InputMultiPath( - File(exists=True), argstr="--intensity-image %s...", mandatory=True) - mask_image = File(exists=True, argstr='--mask-image %s', mandatory=True) + File(exists=True), argstr="--intensity-image %s...", mandatory=True + ) + mask_image = File(exists=True, argstr="--mask-image %s", mandatory=True) initialization = traits.Enum( - 'Random', - 'Otsu', - 'KMeans', - 'PriorProbabilityImages', - 'PriorLabelImage', + "Random", + "Otsu", + "KMeans", + "PriorProbabilityImages", + "PriorLabelImage", argstr="%s", - requires=['number_of_tissue_classes'], - mandatory=True) + requires=["number_of_tissue_classes"], + mandatory=True, + ) prior_probability_images = InputMultiPath(File(exists=True)) number_of_tissue_classes = traits.Int(mandatory=True) prior_weighting = traits.Float() - prior_probability_threshold = traits.Float(requires=['prior_weighting']) + prior_probability_threshold = traits.Float(requires=["prior_weighting"]) likelihood_model = traits.Str(argstr="--likelihood-model %s") mrf_smoothing_factor = traits.Float(argstr="%s") - mrf_radius = traits.List(traits.Int(), requires=['mrf_smoothing_factor']) + mrf_radius = traits.List(traits.Int(), requires=["mrf_smoothing_factor"]) icm_use_synchronous_update = traits.Bool(argstr="%s") maximum_number_of_icm_terations = traits.Int( - requires=['icm_use_synchronous_update']) + requires=["icm_use_synchronous_update"] + ) n_iterations = traits.Int(argstr="%s") - convergence_threshold = traits.Float(requires=['n_iterations']) + convergence_threshold = traits.Float(requires=["n_iterations"]) posterior_formulation = traits.Str(argstr="%s") use_random_seed = traits.Bool( True, - argstr='--use-random-seed %d', - desc='use random seed value over constant', - usedefault=True) - use_mixture_model_proportions = traits.Bool( - requires=['posterior_formulation']) - out_classified_image_name = File( - argstr="%s", genfile=True, hash_files=False) + argstr="--use-random-seed %d", + desc="use random seed value over constant", + usedefault=True, + ) + use_mixture_model_proportions = traits.Bool(requires=["posterior_formulation"]) + out_classified_image_name = File(argstr="%s", genfile=True, hash_files=False) save_posteriors = traits.Bool() output_posteriors_name_template = traits.Str( - 'POSTERIOR_%02d.nii.gz', usedefault=True) + "POSTERIOR_%02d.nii.gz", usedefault=True + ) class AtroposOutputSpec(TraitedSpec): @@ -97,27 +101,33 @@ class Atropos(ANTSCommand): --output [structural_labeled.nii,POSTERIOR_%02d.nii.gz] --posterior-formulation Socrates[1] --use-random-seed 1' """ + input_spec = AtroposInputSpec output_spec = AtroposOutputSpec - _cmd = 'Atropos' + _cmd = "Atropos" def _format_arg(self, opt, spec, val): - if opt == 'initialization': + if opt == "initialization": retval = "--initialization %s[%d" % ( - val, self.inputs.number_of_tissue_classes) + val, + self.inputs.number_of_tissue_classes, + ) if val == "PriorProbabilityImages": - _, _, ext = split_filename( - self.inputs.prior_probability_images[0]) - retval += ",priors/priorProbImages%02d" + \ - ext + ",%g" % self.inputs.prior_weighting + _, _, ext = split_filename(self.inputs.prior_probability_images[0]) + retval += ( + ",priors/priorProbImages%02d" + + ext + + ",%g" % self.inputs.prior_weighting + ) if isdefined(self.inputs.prior_probability_threshold): retval += ",%g" % self.inputs.prior_probability_threshold return retval + "]" - if opt == 'mrf_smoothing_factor': + if opt == "mrf_smoothing_factor": retval = "--mrf [%g" % val if isdefined(self.inputs.mrf_radius): retval += ",%s" % self._format_xarray( - [str(s) for s in self.inputs.mrf_radius]) + [str(s) for s in self.inputs.mrf_radius] + ) return retval + "]" if opt == "icm_use_synchronous_update": retval = "--icm [%d" % val @@ -148,90 +158,105 @@ def _run_interface(self, runtime, correct_return_codes=[0]): os.makedirs(priors_directory) _, _, ext = split_filename(self.inputs.prior_probability_images[0]) for i, f in enumerate(self.inputs.prior_probability_images): - target = os.path.join(priors_directory, - 'priorProbImages%02d' % (i + 1) + ext) - if not (os.path.exists(target) - and os.path.realpath(target) == os.path.abspath(f)): + target = os.path.join( + priors_directory, "priorProbImages%02d" % (i + 1) + ext + ) + if not ( + os.path.exists(target) + and os.path.realpath(target) == os.path.abspath(f) + ): copyfile( os.path.abspath(f), - os.path.join(priors_directory, - 'priorProbImages%02d' % (i + 1) + ext)) + os.path.join( + priors_directory, "priorProbImages%02d" % (i + 1) + ext + ), + ) runtime = super(Atropos, self)._run_interface(runtime) return runtime def _gen_filename(self, name): - if name == 'out_classified_image_name': + if name == "out_classified_image_name": output = self.inputs.out_classified_image_name if not isdefined(output): _, name, ext = split_filename(self.inputs.intensity_images[0]) - output = name + '_labeled' + ext + output = name + "_labeled" + ext return output return None def _list_outputs(self): outputs = self._outputs().get() - outputs['classified_image'] = os.path.abspath( - self._gen_filename('out_classified_image_name')) - if isdefined( - self.inputs.save_posteriors) and self.inputs.save_posteriors: - outputs['posteriors'] = [] + outputs["classified_image"] = os.path.abspath( + self._gen_filename("out_classified_image_name") + ) + if isdefined(self.inputs.save_posteriors) and self.inputs.save_posteriors: + outputs["posteriors"] = [] for i in range(self.inputs.number_of_tissue_classes): - outputs['posteriors'].append( + outputs["posteriors"].append( os.path.abspath( - self.inputs.output_posteriors_name_template % (i + 1))) + self.inputs.output_posteriors_name_template % (i + 1) + ) + ) return outputs class LaplacianThicknessInputSpec(ANTSCommandInputSpec): input_wm = File( - argstr='%s', + argstr="%s", mandatory=True, copyfile=True, - desc='white matter segmentation image', - position=1) + desc="white matter segmentation image", + position=1, + ) input_gm = File( - argstr='%s', + argstr="%s", mandatory=True, copyfile=True, - desc='gray matter segmentation image', - position=2) + desc="gray matter segmentation image", + position=2, + ) output_image = File( - desc='name of output file', - argstr='%s', + desc="name of output file", + argstr="%s", position=3, - name_source=['input_wm'], - name_template='%s_thickness', + name_source=["input_wm"], + name_template="%s_thickness", keep_extension=True, - hash_files=False) + hash_files=False, + ) smooth_param = traits.Float( - argstr='%s', - desc='Sigma of the Laplacian Recursive Image Filter (defaults to 1)', - position=4) + argstr="%s", + desc="Sigma of the Laplacian Recursive Image Filter (defaults to 1)", + position=4, + ) prior_thickness = traits.Float( - argstr='%s', - desc='Prior thickness (defaults to 500)', - requires=['smooth_param'], - position=5) + argstr="%s", + desc="Prior thickness (defaults to 500)", + requires=["smooth_param"], + position=5, + ) dT = traits.Float( - argstr='%s', - desc='Time delta used during integration (defaults to 0.01)', - requires=['prior_thickness'], - position=6) + argstr="%s", + desc="Time delta used during integration (defaults to 0.01)", + requires=["prior_thickness"], + position=6, + ) sulcus_prior = traits.Float( - argstr='%s', - desc='Positive floating point number for sulcus prior. ' - 'Authors said that 0.15 might be a reasonable value', - requires=['dT'], - position=7) + argstr="%s", + desc="Positive floating point number for sulcus prior. " + "Authors said that 0.15 might be a reasonable value", + requires=["dT"], + position=7, + ) tolerance = traits.Float( - argstr='%s', - desc='Tolerance to reach during optimization (defaults to 0.001)', - requires=['sulcus_prior'], - position=8) + argstr="%s", + desc="Tolerance to reach during optimization (defaults to 0.001)", + requires=["sulcus_prior"], + position=8, + ) class LaplacianThicknessOutputSpec(TraitedSpec): - output_image = File(exists=True, desc='Cortical thickness') + output_image = File(exists=True, desc="Cortical thickness") class LaplacianThickness(ANTSCommand): @@ -253,81 +278,92 @@ class LaplacianThickness(ANTSCommand): """ - _cmd = 'LaplacianThickness' + _cmd = "LaplacianThickness" input_spec = LaplacianThicknessInputSpec output_spec = LaplacianThicknessOutputSpec class N4BiasFieldCorrectionInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, - 2, - 4, - argstr='-d %d', - usedefault=True, - desc='image dimension (2, 3 or 4)') + 3, 2, 4, argstr="-d %d", usedefault=True, desc="image dimension (2, 3 or 4)" + ) input_image = File( - argstr='--input-image %s', + argstr="--input-image %s", mandatory=True, - desc=('input for bias correction. Negative values or values close to ' - 'zero should be processed prior to correction')) + desc=( + "input for bias correction. Negative values or values close to " + "zero should be processed prior to correction" + ), + ) mask_image = File( - argstr='--mask-image %s', - desc=('image to specify region to perform final bias correction in')) + argstr="--mask-image %s", + desc=("image to specify region to perform final bias correction in"), + ) weight_image = File( - argstr='--weight-image %s', - desc=('image for relative weighting (e.g. probability map of the white ' - 'matter) of voxels during the B-spline fitting. ')) + argstr="--weight-image %s", + desc=( + "image for relative weighting (e.g. probability map of the white " + "matter) of voxels during the B-spline fitting. " + ), + ) output_image = traits.Str( - argstr='--output %s', - desc='output file name', - name_source=['input_image'], - name_template='%s_corrected', + argstr="--output %s", + desc="output file name", + name_source=["input_image"], + name_template="%s_corrected", keep_extension=True, - hash_files=False) + hash_files=False, + ) bspline_fitting_distance = traits.Float(argstr="--bspline-fitting %s") - bspline_order = traits.Int(requires=['bspline_fitting_distance']) + bspline_order = traits.Int(requires=["bspline_fitting_distance"]) shrink_factor = traits.Int(argstr="--shrink-factor %d") n_iterations = traits.List(traits.Int(), argstr="--convergence %s") - convergence_threshold = traits.Float(requires=['n_iterations']) + convergence_threshold = traits.Float(requires=["n_iterations"]) save_bias = traits.Bool( False, mandatory=True, usedefault=True, - desc=('True if the estimated bias should be saved to file.'), - xor=['bias_image']) - bias_image = File( - desc='Filename for the estimated bias.', hash_files=False) + desc=("True if the estimated bias should be saved to file."), + xor=["bias_image"], + ) + bias_image = File(desc="Filename for the estimated bias.", hash_files=False) copy_header = traits.Bool( False, mandatory=True, usedefault=True, - desc='copy headers of the original image into the ' - 'output (corrected) file') + desc="copy headers of the original image into the " "output (corrected) file", + ) rescale_intensities = traits.Bool( - False, usedefault=True, argstr='-r', min_ver='2.1.0', + False, + usedefault=True, + argstr="-r", + min_ver="2.1.0", desc="""\ [NOTE: Only ANTs>=2.1.0] At each iteration, a new intensity mapping is calculated and applied but there is nothing which constrains the new intensity range to be within certain values. The result is that the range can "drift" from the original at each iteration. This option rescales to the [min,max] range of the original image intensities -within the user-specified mask.""") +within the user-specified mask.""", + ) histogram_sharpening = traits.Tuple( (0.15, 0.01, 200), - traits.Float, traits.Float, traits.Int, - argstr='--histogram-sharpening [%g,%g,%d]', + traits.Float, + traits.Float, + traits.Int, + argstr="--histogram-sharpening [%g,%g,%d]", desc="""\ Three-values tuple of histogram sharpening parameters \ (FWHM, wienerNose, numberOfHistogramBins). These options describe the histogram sharpening parameters, i.e. the \ deconvolution step parameters described in the original N3 algorithm. -The default values have been shown to work fairly well.""") +The default values have been shown to work fairly well.""", + ) class N4BiasFieldCorrectionOutputSpec(TraitedSpec): - output_image = File(exists=True, desc='Warped image') - bias_image = File(exists=True, desc='Estimated bias') + output_image = File(exists=True, desc="Warped image") + bias_image = File(exists=True, desc="Estimated bias") class N4BiasFieldCorrection(ANTSCommand): @@ -398,7 +434,7 @@ class N4BiasFieldCorrection(ANTSCommand): """ - _cmd = 'N4BiasFieldCorrection' + _cmd = "N4BiasFieldCorrection" input_spec = N4BiasFieldCorrectionInputSpec output_spec = N4BiasFieldCorrectionOutputSpec @@ -408,38 +444,38 @@ def __init__(self, *args, **kwargs): super(N4BiasFieldCorrection, self).__init__(*args, **kwargs) def _format_arg(self, name, trait_spec, value): - if name == 'output_image' and self._out_bias_file: - newval = '[ %s, %s ]' % (value, self._out_bias_file) + if name == "output_image" and self._out_bias_file: + newval = "[ %s, %s ]" % (value, self._out_bias_file) return trait_spec.argstr % newval - if name == 'bspline_fitting_distance': + if name == "bspline_fitting_distance": if isdefined(self.inputs.bspline_order): - newval = '[ %g, %d ]' % (value, self.inputs.bspline_order) + newval = "[ %g, %d ]" % (value, self.inputs.bspline_order) else: - newval = '[ %g ]' % value + newval = "[ %g ]" % value return trait_spec.argstr % newval - if name == 'n_iterations': + if name == "n_iterations": if isdefined(self.inputs.convergence_threshold): - newval = '[ %s, %g ]' % ( + newval = "[ %s, %g ]" % ( self._format_xarray([str(elt) for elt in value]), - self.inputs.convergence_threshold) + self.inputs.convergence_threshold, + ) else: - newval = '[ %s ]' % self._format_xarray( - [str(elt) for elt in value]) + newval = "[ %s ]" % self._format_xarray([str(elt) for elt in value]) return trait_spec.argstr % newval - return super(N4BiasFieldCorrection, self)._format_arg( - name, trait_spec, value) + return super(N4BiasFieldCorrection, self)._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): - skip = (skip or []) + ['save_bias', 'bias_image'] + skip = (skip or []) + ["save_bias", "bias_image"] self._out_bias_file = None if self.inputs.save_bias or isdefined(self.inputs.bias_image): bias_image = self.inputs.bias_image if not isdefined(bias_image): - bias_image = fname_presuffix(os.path.basename(self.inputs.input_image), - suffix='_bias') + bias_image = fname_presuffix( + os.path.basename(self.inputs.input_image), suffix="_bias" + ) self._out_bias_file = bias_image return super(N4BiasFieldCorrection, self)._parse_inputs(skip=skip) @@ -448,166 +484,202 @@ def _list_outputs(self): # Fix headers if self.inputs.copy_header: - self._copy_header(outputs['output_image']) + self._copy_header(outputs["output_image"]) if self._out_bias_file: - outputs['bias_image'] = os.path.abspath(self._out_bias_file) + outputs["bias_image"] = os.path.abspath(self._out_bias_file) if self.inputs.copy_header: - self._copy_header(outputs['bias_image']) + self._copy_header(outputs["bias_image"]) return outputs def _copy_header(self, fname): """Copy header from input image to an output image.""" import nibabel as nb + in_img = nb.load(self.inputs.input_image) out_img = nb.load(fname, mmap=False) - new_img = out_img.__class__(out_img.get_fdata(), in_img.affine, - in_img.header) + new_img = out_img.__class__(out_img.get_fdata(), in_img.affine, in_img.header) new_img.set_data_dtype(out_img.get_data_dtype()) new_img.to_filename(fname) class CorticalThicknessInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, 2, argstr='-d %d', usedefault=True, desc='image dimension (2 or 3)') + 3, 2, argstr="-d %d", usedefault=True, desc="image dimension (2 or 3)" + ) anatomical_image = File( exists=True, - argstr='-a %s', - desc=('Structural *intensity* image, typically T1.' - ' If more than one anatomical image is specified,' - ' subsequently specified images are used during the' - ' segmentation process. However, only the first' - ' image is used in the registration of priors.' - ' Our suggestion would be to specify the T1' - ' as the first image.'), - mandatory=True) + argstr="-a %s", + desc=( + "Structural *intensity* image, typically T1." + " If more than one anatomical image is specified," + " subsequently specified images are used during the" + " segmentation process. However, only the first" + " image is used in the registration of priors." + " Our suggestion would be to specify the T1" + " as the first image." + ), + mandatory=True, + ) brain_template = File( exists=True, - argstr='-e %s', - desc=('Anatomical *intensity* template (possibly created using a' - ' population data set with buildtemplateparallel.sh in ANTs).' - ' This template is *not* skull-stripped.'), - mandatory=True) + argstr="-e %s", + desc=( + "Anatomical *intensity* template (possibly created using a" + " population data set with buildtemplateparallel.sh in ANTs)." + " This template is *not* skull-stripped." + ), + mandatory=True, + ) brain_probability_mask = File( exists=True, - argstr='-m %s', - desc='brain probability mask in template space', + argstr="-m %s", + desc="brain probability mask in template space", copyfile=False, - mandatory=True) + mandatory=True, + ) segmentation_priors = InputMultiPath( - File(exists=True), argstr='-p %s', mandatory=True) + File(exists=True), argstr="-p %s", mandatory=True + ) out_prefix = traits.Str( - 'antsCT_', - argstr='-o %s', + "antsCT_", + argstr="-o %s", usedefault=True, - desc=('Prefix that is prepended to all output' - ' files (default = antsCT_)')) + desc=("Prefix that is prepended to all output" " files (default = antsCT_)"), + ) image_suffix = traits.Str( - 'nii.gz', - desc=('any of standard ITK formats,' - ' nii.gz is default'), - argstr='-s %s', - usedefault=True) + "nii.gz", + desc=("any of standard ITK formats," " nii.gz is default"), + argstr="-s %s", + usedefault=True, + ) t1_registration_template = File( exists=True, - desc=('Anatomical *intensity* template' - ' (assumed to be skull-stripped). A common' - ' case would be where this would be the same' - ' template as specified in the -e option which' - ' is not skull stripped.'), - argstr='-t %s', - mandatory=True) + desc=( + "Anatomical *intensity* template" + " (assumed to be skull-stripped). A common" + " case would be where this would be the same" + " template as specified in the -e option which" + " is not skull stripped." + ), + argstr="-t %s", + mandatory=True, + ) extraction_registration_mask = File( exists=True, - argstr='-f %s', - desc=('Mask (defined in the template space) used during' - ' registration for brain extraction.')) + argstr="-f %s", + desc=( + "Mask (defined in the template space) used during" + " registration for brain extraction." + ), + ) keep_temporary_files = traits.Int( - argstr='-k %d', - desc='Keep brain extraction/segmentation warps, etc (default = 0).') + argstr="-k %d", + desc="Keep brain extraction/segmentation warps, etc (default = 0).", + ) max_iterations = traits.Int( - argstr='-i %d', - desc=('ANTS registration max iterations (default = 100x100x70x20)')) + argstr="-i %d", + desc=("ANTS registration max iterations (default = 100x100x70x20)"), + ) prior_segmentation_weight = traits.Float( - argstr='-w %f', - desc=('Atropos spatial prior *probability* weight for' - ' the segmentation')) + argstr="-w %f", + desc=("Atropos spatial prior *probability* weight for" " the segmentation"), + ) segmentation_iterations = traits.Int( - argstr='-n %d', - desc=('N4 -> Atropos -> N4 iterations during segmentation' - ' (default = 3)')) + argstr="-n %d", + desc=("N4 -> Atropos -> N4 iterations during segmentation" " (default = 3)"), + ) posterior_formulation = traits.Str( - argstr='-b %s', - desc=('Atropos posterior formulation and whether or not' - ' to use mixture model proportions.' - ''' e.g 'Socrates[1]' (default) or 'Aristotle[1]'.''' - ' Choose the latter if you' - ' want use the distance priors (see also the -l option' - ' for label propagation control).')) + argstr="-b %s", + desc=( + "Atropos posterior formulation and whether or not" + " to use mixture model proportions." + """ e.g 'Socrates[1]' (default) or 'Aristotle[1]'.""" + " Choose the latter if you" + " want use the distance priors (see also the -l option" + " for label propagation control)." + ), + ) use_floatingpoint_precision = traits.Enum( 0, 1, - argstr='-j %d', - desc=('Use floating point precision in registrations (default = 0)')) + argstr="-j %d", + desc=("Use floating point precision in registrations (default = 0)"), + ) use_random_seeding = traits.Enum( 0, 1, - argstr='-u %d', - desc=('Use random number generated from system clock in Atropos' - ' (default = 1)')) + argstr="-u %d", + desc=( + "Use random number generated from system clock in Atropos" " (default = 1)" + ), + ) b_spline_smoothing = traits.Bool( - argstr='-v', - desc=('Use B-spline SyN for registrations and B-spline' - ' exponential mapping in DiReCT.')) + argstr="-v", + desc=( + "Use B-spline SyN for registrations and B-spline" + " exponential mapping in DiReCT." + ), + ) cortical_label_image = File( - exists=True, desc='Cortical ROI labels to use as a prior for ATITH.') + exists=True, desc="Cortical ROI labels to use as a prior for ATITH." + ) label_propagation = traits.Str( - argstr='-l %s', - desc= - ('Incorporate a distance prior one the posterior formulation. Should be' - ''' of the form 'label[lambda,boundaryProbability]' where label''' - ' is a value of 1,2,3,... denoting label ID. The label' - ' probability for anything outside the current label' - ' = boundaryProbability * exp( -lambda * distanceFromBoundary )' - ' Intuitively, smaller lambda values will increase the spatial capture' - ' range of the distance prior. To apply to all label values, simply omit' - ' specifying the label, i.e. -l [lambda,boundaryProbability].')) + argstr="-l %s", + desc=( + "Incorporate a distance prior one the posterior formulation. Should be" + """ of the form 'label[lambda,boundaryProbability]' where label""" + " is a value of 1,2,3,... denoting label ID. The label" + " probability for anything outside the current label" + " = boundaryProbability * exp( -lambda * distanceFromBoundary )" + " Intuitively, smaller lambda values will increase the spatial capture" + " range of the distance prior. To apply to all label values, simply omit" + " specifying the label, i.e. -l [lambda,boundaryProbability]." + ), + ) quick_registration = traits.Bool( - argstr='-q 1', - desc= - ('If = 1, use antsRegistrationSyNQuick.sh as the basis for registration' - ' during brain extraction, brain segmentation, and' - ' (optional) normalization to a template.' - ' Otherwise use antsRegistrationSyN.sh (default = 0).')) + argstr="-q 1", + desc=( + "If = 1, use antsRegistrationSyNQuick.sh as the basis for registration" + " during brain extraction, brain segmentation, and" + " (optional) normalization to a template." + " Otherwise use antsRegistrationSyN.sh (default = 0)." + ), + ) debug = traits.Bool( - argstr='-z 1', + argstr="-z 1", desc=( - 'If > 0, runs a faster version of the script.' - ' Only for testing. Implies -u 0.' - ' Requires single thread computation for complete reproducibility.' - )) + "If > 0, runs a faster version of the script." + " Only for testing. Implies -u 0." + " Requires single thread computation for complete reproducibility." + ), + ) class CorticalThicknessOutputSpec(TraitedSpec): - BrainExtractionMask = File(exists=True, desc='brain extraction mask') - ExtractedBrainN4 = File(exists=True, desc='extracted brain from N4 image') - BrainSegmentation = File(exists=True, desc='brain segmentaion image') - BrainSegmentationN4 = File(exists=True, desc='N4 corrected image') + BrainExtractionMask = File(exists=True, desc="brain extraction mask") + ExtractedBrainN4 = File(exists=True, desc="extracted brain from N4 image") + BrainSegmentation = File(exists=True, desc="brain segmentaion image") + BrainSegmentationN4 = File(exists=True, desc="N4 corrected image") BrainSegmentationPosteriors = OutputMultiPath( - File(exists=True), desc='Posterior probability images') - CorticalThickness = File(exists=True, desc='cortical thickness file') + File(exists=True), desc="Posterior probability images" + ) + CorticalThickness = File(exists=True, desc="cortical thickness file") TemplateToSubject1GenericAffine = File( - exists=True, desc='Template to subject affine') - TemplateToSubject0Warp = File(exists=True, desc='Template to subject warp') - SubjectToTemplate1Warp = File( - exists=True, desc='Template to subject inverse warp') + exists=True, desc="Template to subject affine" + ) + TemplateToSubject0Warp = File(exists=True, desc="Template to subject warp") + SubjectToTemplate1Warp = File(exists=True, desc="Template to subject inverse warp") SubjectToTemplate0GenericAffine = File( - exists=True, desc='Template to subject inverse affine') + exists=True, desc="Template to subject inverse affine" + ) SubjectToTemplateLogJacobian = File( - exists=True, desc='Template to subject log jacobian') + exists=True, desc="Template to subject log jacobian" + ) CorticalThicknessNormedToTemplate = File( - exists=True, desc='Normalized cortical thickness') - BrainVolumes = File(exists=True, desc='Brain volumes as text') + exists=True, desc="Normalized cortical thickness" + ) + BrainVolumes = File(exists=True, desc="Brain volumes as text") class CorticalThickness(ANTSCommand): @@ -633,25 +705,25 @@ class CorticalThickness(ANTSCommand): input_spec = CorticalThicknessInputSpec output_spec = CorticalThicknessOutputSpec - _cmd = 'antsCorticalThickness.sh' + _cmd = "antsCorticalThickness.sh" def _format_arg(self, opt, spec, val): - if opt == 'anatomical_image': - retval = '-a %s' % val + if opt == "anatomical_image": + retval = "-a %s" % val return retval - if opt == 'brain_template': - retval = '-e %s' % val + if opt == "brain_template": + retval = "-e %s" % val return retval - if opt == 'brain_probability_mask': - retval = '-m %s' % val + if opt == "brain_probability_mask": + retval = "-m %s" % val return retval - if opt == 'out_prefix': - retval = '-o %s' % val + if opt == "out_prefix": + retval = "-o %s" % val return retval - if opt == 't1_registration_template': - retval = '-t %s' % val + if opt == "t1_registration_template": + retval = "-t %s" % val return retval - if opt == 'segmentation_priors': + if opt == "segmentation_priors": _, _, ext = split_filename(self.inputs.segmentation_priors[0]) retval = "-p nipype_priors/BrainSegmentationPrior%02d" + ext return retval @@ -663,154 +735,199 @@ def _run_interface(self, runtime, correct_return_codes=[0]): os.makedirs(priors_directory) _, _, ext = split_filename(self.inputs.segmentation_priors[0]) for i, f in enumerate(self.inputs.segmentation_priors): - target = os.path.join(priors_directory, - 'BrainSegmentationPrior%02d' % (i + 1) + ext) - if not (os.path.exists(target) - and os.path.realpath(target) == os.path.abspath(f)): + target = os.path.join( + priors_directory, "BrainSegmentationPrior%02d" % (i + 1) + ext + ) + if not ( + os.path.exists(target) + and os.path.realpath(target) == os.path.abspath(f) + ): copyfile(os.path.abspath(f), target) runtime = super(CorticalThickness, self)._run_interface(runtime) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['BrainExtractionMask'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'BrainExtractionMask.' + - self.inputs.image_suffix) - outputs['ExtractedBrainN4'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'ExtractedBrain0N4.' + - self.inputs.image_suffix) - outputs['BrainSegmentation'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'BrainSegmentation.' + - self.inputs.image_suffix) - outputs['BrainSegmentationN4'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'BrainSegmentation0N4.' + - self.inputs.image_suffix) + outputs["BrainExtractionMask"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "BrainExtractionMask." + self.inputs.image_suffix, + ) + outputs["ExtractedBrainN4"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "ExtractedBrain0N4." + self.inputs.image_suffix, + ) + outputs["BrainSegmentation"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "BrainSegmentation." + self.inputs.image_suffix, + ) + outputs["BrainSegmentationN4"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "BrainSegmentation0N4." + self.inputs.image_suffix, + ) posteriors = [] for i in range(len(self.inputs.segmentation_priors)): posteriors.append( - os.path.join(os.getcwd(), self.inputs.out_prefix + - 'BrainSegmentationPosteriors%02d.' % - (i + 1) + self.inputs.image_suffix)) - outputs['BrainSegmentationPosteriors'] = posteriors - outputs['CorticalThickness'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'CorticalThickness.' + - self.inputs.image_suffix) - outputs['TemplateToSubject1GenericAffine'] = os.path.join( + os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "BrainSegmentationPosteriors%02d." % (i + 1) + + self.inputs.image_suffix, + ) + ) + outputs["BrainSegmentationPosteriors"] = posteriors + outputs["CorticalThickness"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "CorticalThickness." + self.inputs.image_suffix, + ) + outputs["TemplateToSubject1GenericAffine"] = os.path.join( + os.getcwd(), self.inputs.out_prefix + "TemplateToSubject1GenericAffine.mat" + ) + outputs["TemplateToSubject0Warp"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "TemplateToSubject0Warp." + + self.inputs.image_suffix, + ) + outputs["SubjectToTemplate1Warp"] = os.path.join( os.getcwd(), - self.inputs.out_prefix + 'TemplateToSubject1GenericAffine.mat') - outputs['TemplateToSubject0Warp'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'TemplateToSubject0Warp.' + - self.inputs.image_suffix) - outputs['SubjectToTemplate1Warp'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'SubjectToTemplate1Warp.' + - self.inputs.image_suffix) - outputs['SubjectToTemplate0GenericAffine'] = os.path.join( + self.inputs.out_prefix + + "SubjectToTemplate1Warp." + + self.inputs.image_suffix, + ) + outputs["SubjectToTemplate0GenericAffine"] = os.path.join( + os.getcwd(), self.inputs.out_prefix + "SubjectToTemplate0GenericAffine.mat" + ) + outputs["SubjectToTemplateLogJacobian"] = os.path.join( os.getcwd(), - self.inputs.out_prefix + 'SubjectToTemplate0GenericAffine.mat') - outputs['SubjectToTemplateLogJacobian'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + - 'SubjectToTemplateLogJacobian.' + self.inputs.image_suffix) - outputs['CorticalThicknessNormedToTemplate'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'CorticalThickness.' + - self.inputs.image_suffix) - outputs['BrainVolumes'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'brainvols.csv') + self.inputs.out_prefix + + "SubjectToTemplateLogJacobian." + + self.inputs.image_suffix, + ) + outputs["CorticalThicknessNormedToTemplate"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "CorticalThickness." + self.inputs.image_suffix, + ) + outputs["BrainVolumes"] = os.path.join( + os.getcwd(), self.inputs.out_prefix + "brainvols.csv" + ) return outputs class BrainExtractionInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, 2, argstr='-d %d', usedefault=True, desc='image dimension (2 or 3)') + 3, 2, argstr="-d %d", usedefault=True, desc="image dimension (2 or 3)" + ) anatomical_image = File( exists=True, - argstr='-a %s', - desc=('Structural image, typically T1. If more than one' - ' anatomical image is specified, subsequently specified' - ' images are used during the segmentation process. However,' - ' only the first image is used in the registration of priors.' - ' Our suggestion would be to specify the T1 as the first image.' - ' Anatomical template created using e.g. LPBA40 data set with' - ' buildtemplateparallel.sh in ANTs.'), - mandatory=True) + argstr="-a %s", + desc=( + "Structural image, typically T1. If more than one" + " anatomical image is specified, subsequently specified" + " images are used during the segmentation process. However," + " only the first image is used in the registration of priors." + " Our suggestion would be to specify the T1 as the first image." + " Anatomical template created using e.g. LPBA40 data set with" + " buildtemplateparallel.sh in ANTs." + ), + mandatory=True, + ) brain_template = File( exists=True, - argstr='-e %s', - desc=('Anatomical template created using e.g. LPBA40 data set with' - ' buildtemplateparallel.sh in ANTs.'), - mandatory=True) + argstr="-e %s", + desc=( + "Anatomical template created using e.g. LPBA40 data set with" + " buildtemplateparallel.sh in ANTs." + ), + mandatory=True, + ) brain_probability_mask = File( exists=True, - argstr='-m %s', - desc=('Brain probability mask created using e.g. LPBA40 data set which' - ' have brain masks defined, and warped to anatomical template and' - ' averaged resulting in a probability image.'), + argstr="-m %s", + desc=( + "Brain probability mask created using e.g. LPBA40 data set which" + " have brain masks defined, and warped to anatomical template and" + " averaged resulting in a probability image." + ), copyfile=False, - mandatory=True) + mandatory=True, + ) out_prefix = traits.Str( - 'highres001_', - argstr='-o %s', + "highres001_", + argstr="-o %s", usedefault=True, - desc=('Prefix that is prepended to all output' - ' files (default = highress001_)')) + desc=( + "Prefix that is prepended to all output" " files (default = highress001_)" + ), + ) extraction_registration_mask = File( exists=True, - argstr='-f %s', - desc=('Mask (defined in the template space) used during' - ' registration for brain extraction.' - ' To limit the metric computation to a specific region.')) + argstr="-f %s", + desc=( + "Mask (defined in the template space) used during" + " registration for brain extraction." + " To limit the metric computation to a specific region." + ), + ) image_suffix = traits.Str( - 'nii.gz', - desc=('any of standard ITK formats,' - ' nii.gz is default'), - argstr='-s %s', - usedefault=True) + "nii.gz", + desc=("any of standard ITK formats," " nii.gz is default"), + argstr="-s %s", + usedefault=True, + ) use_random_seeding = traits.Enum( 0, 1, - argstr='-u %d', - desc=('Use random number generated from system clock in Atropos' - ' (default = 1)')) + argstr="-u %d", + desc=( + "Use random number generated from system clock in Atropos" " (default = 1)" + ), + ) keep_temporary_files = traits.Int( - argstr='-k %d', - desc='Keep brain extraction/segmentation warps, etc (default = 0).') + argstr="-k %d", + desc="Keep brain extraction/segmentation warps, etc (default = 0).", + ) use_floatingpoint_precision = traits.Enum( 0, 1, - argstr='-q %d', - desc=('Use floating point precision in registrations (default = 0)')) + argstr="-q %d", + desc=("Use floating point precision in registrations (default = 0)"), + ) debug = traits.Bool( - argstr='-z 1', + argstr="-z 1", desc=( - 'If > 0, runs a faster version of the script.' - ' Only for testing. Implies -u 0.' - ' Requires single thread computation for complete reproducibility.' - )) + "If > 0, runs a faster version of the script." + " Only for testing. Implies -u 0." + " Requires single thread computation for complete reproducibility." + ), + ) class BrainExtractionOutputSpec(TraitedSpec): - BrainExtractionMask = File(exists=True, desc='brain extraction mask') - BrainExtractionBrain = File(exists=True, desc='brain extraction image') - BrainExtractionCSF = File( - exists=True, desc='segmentation mask with only CSF') + BrainExtractionMask = File(exists=True, desc="brain extraction mask") + BrainExtractionBrain = File(exists=True, desc="brain extraction image") + BrainExtractionCSF = File(exists=True, desc="segmentation mask with only CSF") BrainExtractionGM = File( - exists=True, desc='segmentation mask with only grey matter') - BrainExtractionInitialAffine = File(exists=True, desc='') - BrainExtractionInitialAffineFixed = File(exists=True, desc='') - BrainExtractionInitialAffineMoving = File(exists=True, desc='') - BrainExtractionLaplacian = File(exists=True, desc='') - BrainExtractionPrior0GenericAffine = File(exists=True, desc='') - BrainExtractionPrior1InverseWarp = File(exists=True, desc='') - BrainExtractionPrior1Warp = File(exists=True, desc='') - BrainExtractionPriorWarped = File(exists=True, desc='') + exists=True, desc="segmentation mask with only grey matter" + ) + BrainExtractionInitialAffine = File(exists=True, desc="") + BrainExtractionInitialAffineFixed = File(exists=True, desc="") + BrainExtractionInitialAffineMoving = File(exists=True, desc="") + BrainExtractionLaplacian = File(exists=True, desc="") + BrainExtractionPrior0GenericAffine = File(exists=True, desc="") + BrainExtractionPrior1InverseWarp = File(exists=True, desc="") + BrainExtractionPrior1Warp = File(exists=True, desc="") + BrainExtractionPriorWarped = File(exists=True, desc="") BrainExtractionSegmentation = File( - exists=True, desc='segmentation mask with CSF, GM, and WM') - BrainExtractionTemplateLaplacian = File(exists=True, desc='') - BrainExtractionTmp = File(exists=True, desc='') + exists=True, desc="segmentation mask with CSF, GM, and WM" + ) + BrainExtractionTemplateLaplacian = File(exists=True, desc="") + BrainExtractionTmp = File(exists=True, desc="") BrainExtractionWM = File( - exists=True, desc='segmenration mask with only white matter') - N4Corrected0 = File(exists=True, desc='N4 bias field corrected image') - N4Truncated0 = File(exists=True, desc='') + exists=True, desc="segmenration mask with only white matter" + ) + N4Corrected0 = File(exists=True, desc="N4 bias field corrected image") + N4Truncated0 = File(exists=True, desc="") class BrainExtraction(ANTSCommand): @@ -827,45 +944,46 @@ class BrainExtraction(ANTSCommand): 'antsBrainExtraction.sh -a T1.nii.gz -m ProbabilityMaskOfStudyTemplate.nii.gz -e study_template.nii.gz -d 3 \ -s nii.gz -o highres001_' """ + input_spec = BrainExtractionInputSpec output_spec = BrainExtractionOutputSpec - _cmd = 'antsBrainExtraction.sh' + _cmd = "antsBrainExtraction.sh" - def _run_interface(self, runtime, correct_return_codes=(0, )): + def _run_interface(self, runtime, correct_return_codes=(0,)): # antsBrainExtraction.sh requires ANTSPATH to be defined out_environ = self._get_environ() - ants_path = out_environ.get('ANTSPATH', None) or os.getenv( - 'ANTSPATH', None) + ants_path = out_environ.get("ANTSPATH", None) or os.getenv("ANTSPATH", None) if ants_path is None: # Check for antsRegistration, which is under bin/ (the $ANTSPATH) instead of # checking for antsBrainExtraction.sh which is under script/ - cmd_path = which('antsRegistration', env=runtime.environ) + cmd_path = which("antsRegistration", env=runtime.environ) if not cmd_path: raise RuntimeError( 'The environment variable $ANTSPATH is not defined in host "%s", ' - 'and Nipype could not determine it automatically.' % - runtime.hostname) + "and Nipype could not determine it automatically." + % runtime.hostname + ) ants_path = os.path.dirname(cmd_path) - self.inputs.environ.update({'ANTSPATH': ants_path}) - runtime.environ.update({'ANTSPATH': ants_path}) + self.inputs.environ.update({"ANTSPATH": ants_path}) + runtime.environ.update({"ANTSPATH": ants_path}) runtime = super(BrainExtraction, self)._run_interface(runtime) # Still, double-check if it didn't found N4 - if 'we cant find' in runtime.stdout: - for line in runtime.stdout.split('\n'): - if line.strip().startswith('we cant find'): - tool = line.strip().replace('we cant find the', - '').split(' ')[0] + if "we cant find" in runtime.stdout: + for line in runtime.stdout.split("\n"): + if line.strip().startswith("we cant find"): + tool = line.strip().replace("we cant find the", "").split(" ")[0] break errmsg = ( 'antsBrainExtraction.sh requires "%s" to be found in $ANTSPATH ' - '($ANTSPATH="%s").') % (tool, ants_path) + '($ANTSPATH="%s").' + ) % (tool, ants_path) if runtime.stderr is None: runtime.stderr = errmsg else: - runtime.stderr += '\n' + errmsg + runtime.stderr += "\n" + errmsg runtime.returncode = 1 self.raise_exception(runtime) @@ -873,64 +991,105 @@ def _run_interface(self, runtime, correct_return_codes=(0, )): def _list_outputs(self): outputs = self._outputs().get() - outputs['BrainExtractionMask'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'BrainExtractionMask.' + - self.inputs.image_suffix) - outputs['BrainExtractionBrain'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'BrainExtractionBrain.' + - self.inputs.image_suffix) - if isdefined(self.inputs.keep_temporary_files - ) and self.inputs.keep_temporary_files != 0: - outputs['BrainExtractionCSF'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'BrainExtractionCSF.' + - self.inputs.image_suffix) - outputs['BrainExtractionGM'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'BrainExtractionGM.' + - self.inputs.image_suffix) - outputs['BrainExtractionInitialAffine'] = os.path.join( + outputs["BrainExtractionMask"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "BrainExtractionMask." + self.inputs.image_suffix, + ) + outputs["BrainExtractionBrain"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "BrainExtractionBrain." + self.inputs.image_suffix, + ) + if ( + isdefined(self.inputs.keep_temporary_files) + and self.inputs.keep_temporary_files != 0 + ): + outputs["BrainExtractionCSF"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "BrainExtractionCSF." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionGM"] = os.path.join( os.getcwd(), - self.inputs.out_prefix + 'BrainExtractionInitialAffine.mat') - outputs['BrainExtractionInitialAffineFixed'] = os.path.join( + self.inputs.out_prefix + + "BrainExtractionGM." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionInitialAffine"] = os.path.join( + os.getcwd(), self.inputs.out_prefix + "BrainExtractionInitialAffine.mat" + ) + outputs["BrainExtractionInitialAffineFixed"] = os.path.join( os.getcwd(), - self.inputs.out_prefix + 'BrainExtractionInitialAffineFixed.' + - self.inputs.image_suffix) - outputs['BrainExtractionInitialAffineMoving'] = os.path.join( + self.inputs.out_prefix + + "BrainExtractionInitialAffineFixed." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionInitialAffineMoving"] = os.path.join( os.getcwd(), - self.inputs.out_prefix + 'BrainExtractionInitialAffineMoving.' - + self.inputs.image_suffix) - outputs['BrainExtractionLaplacian'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + - 'BrainExtractionLaplacian.' + self.inputs.image_suffix) - outputs['BrainExtractionPrior0GenericAffine'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + - 'BrainExtractionPrior0GenericAffine.mat') - outputs['BrainExtractionPrior1InverseWarp'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + - 'BrainExtractionPrior1InverseWarp.' + self.inputs.image_suffix) - outputs['BrainExtractionPrior1Warp'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + - 'BrainExtractionPrior1Warp.' + self.inputs.image_suffix) - outputs['BrainExtractionPriorWarped'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + - 'BrainExtractionPriorWarped.' + self.inputs.image_suffix) - outputs['BrainExtractionSegmentation'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + - 'BrainExtractionSegmentation.' + self.inputs.image_suffix) - outputs['BrainExtractionTemplateLaplacian'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + - 'BrainExtractionTemplateLaplacian.' + self.inputs.image_suffix) - outputs['BrainExtractionTmp'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'BrainExtractionTmp.' + - self.inputs.image_suffix) - outputs['BrainExtractionWM'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'BrainExtractionWM.' + - self.inputs.image_suffix) - outputs['N4Corrected0'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'N4Corrected0.' + - self.inputs.image_suffix) - outputs['N4Truncated0'] = os.path.join( - os.getcwd(), self.inputs.out_prefix + 'N4Truncated0.' + - self.inputs.image_suffix) + self.inputs.out_prefix + + "BrainExtractionInitialAffineMoving." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionLaplacian"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "BrainExtractionLaplacian." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionPrior0GenericAffine"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "BrainExtractionPrior0GenericAffine.mat", + ) + outputs["BrainExtractionPrior1InverseWarp"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "BrainExtractionPrior1InverseWarp." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionPrior1Warp"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "BrainExtractionPrior1Warp." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionPriorWarped"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "BrainExtractionPriorWarped." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionSegmentation"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "BrainExtractionSegmentation." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionTemplateLaplacian"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "BrainExtractionTemplateLaplacian." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionTmp"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "BrainExtractionTmp." + + self.inputs.image_suffix, + ) + outputs["BrainExtractionWM"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "BrainExtractionWM." + + self.inputs.image_suffix, + ) + outputs["N4Corrected0"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "N4Corrected0." + self.inputs.image_suffix, + ) + outputs["N4Truncated0"] = os.path.join( + os.getcwd(), + self.inputs.out_prefix + "N4Truncated0." + self.inputs.image_suffix, + ) return outputs @@ -940,75 +1099,79 @@ class JointFusionInputSpec(ANTSCommandInputSpec): 3, 2, 4, - argstr='%d', + argstr="%d", position=0, usedefault=True, mandatory=True, - desc='image dimension (2, 3, or 4)') + desc="image dimension (2, 3, or 4)", + ) modalities = traits.Int( - argstr='%d', - position=1, - mandatory=True, - desc='Number of modalities or features') + argstr="%d", position=1, mandatory=True, desc="Number of modalities or features" + ) warped_intensity_images = InputMultiPath( - File(exists=True), - argstr="-g %s...", - mandatory=True, - desc='Warped atlas images') + File(exists=True), argstr="-g %s...", mandatory=True, desc="Warped atlas images" + ) target_image = InputMultiPath( - File(exists=True), - argstr='-tg %s...', - mandatory=True, - desc='Target image(s)') + File(exists=True), argstr="-tg %s...", mandatory=True, desc="Target image(s)" + ) warped_label_images = InputMultiPath( File(exists=True), argstr="-l %s...", mandatory=True, - desc='Warped atlas segmentations') + desc="Warped atlas segmentations", + ) method = traits.Str( - default='Joint', - argstr='-m %s', + default="Joint", + argstr="-m %s", usedefault=True, - desc=('Select voting method. Options: Joint (Joint' - ' Label Fusion). May be followed by optional' - ' parameters in brackets, e.g., -m Joint[0.1,2]')) + desc=( + "Select voting method. Options: Joint (Joint" + " Label Fusion). May be followed by optional" + " parameters in brackets, e.g., -m Joint[0.1,2]" + ), + ) alpha = traits.Float( default=0.1, usedefault=True, - requires=['method'], - desc=('Regularization term added to matrix Mx for inverse')) + requires=["method"], + desc=("Regularization term added to matrix Mx for inverse"), + ) beta = traits.Int( default=2, usedefault=True, - requires=['method'], - desc=('Exponent for mapping intensity difference to joint error')) + requires=["method"], + desc=("Exponent for mapping intensity difference to joint error"), + ) output_label_image = File( - argstr='%s', + argstr="%s", mandatory=True, position=-1, - name_template='%s', - output_name='output_label_image', - desc='Output fusion label map image') + name_template="%s", + output_name="output_label_image", + desc="Output fusion label map image", + ) patch_radius = traits.ListInt( minlen=3, maxlen=3, - argstr='-rp %s', - desc=('Patch radius for similarity measures, ' - 'scalar or vector. Default: 2x2x2')) + argstr="-rp %s", + desc=( + "Patch radius for similarity measures, " "scalar or vector. Default: 2x2x2" + ), + ) search_radius = traits.ListInt( - minlen=3, - maxlen=3, - argstr='-rs %s', - desc='Local search radius. Default: 3x3x3') + minlen=3, maxlen=3, argstr="-rs %s", desc="Local search radius. Default: 3x3x3" + ) exclusion_region = File( exists=True, - argstr='-x %s', - desc=('Specify an exclusion region for the given label.')) + argstr="-x %s", + desc=("Specify an exclusion region for the given label."), + ) atlas_group_id = traits.ListInt( - argstr='-gp %d...', desc='Assign a group ID for each atlas') + argstr="-gp %d...", desc="Assign a group ID for each atlas" + ) atlas_group_weights = traits.ListInt( - argstr='-gpw %d...', - desc=('Assign the voting weights to each atlas group')) + argstr="-gpw %d...", desc=("Assign the voting weights to each atlas group") + ) class JointFusionOutputSpec(TraitedSpec): @@ -1047,33 +1210,36 @@ class JointFusion(ANTSCommand): 'jointfusion 3 1 -m Joint[0.5,1] -rp 3x2x1 -rs 1x2x3 -tg T1.nii -g im1.nii -g im2.nii -g im3.nii \ -l segmentation0.nii.gz -l segmentation1.nii.gz -l segmentation1.nii.gz fusion_labelimage_output.nii' """ + input_spec = JointFusionInputSpec output_spec = JointFusionOutputSpec - _cmd = 'jointfusion' + _cmd = "jointfusion" def _format_arg(self, opt, spec, val): - if opt == 'method': - if '[' in val: - retval = '-m {0}'.format(val) + if opt == "method": + if "[" in val: + retval = "-m {0}".format(val) else: - retval = '-m {0}[{1},{2}]'.format( - self.inputs.method, self.inputs.alpha, self.inputs.beta) - elif opt == 'patch_radius': - retval = '-rp {0}'.format(self._format_xarray(val)) - elif opt == 'search_radius': - retval = '-rs {0}'.format(self._format_xarray(val)) + retval = "-m {0}[{1},{2}]".format( + self.inputs.method, self.inputs.alpha, self.inputs.beta + ) + elif opt == "patch_radius": + retval = "-rp {0}".format(self._format_xarray(val)) + elif opt == "search_radius": + retval = "-rs {0}".format(self._format_xarray(val)) else: - if opt == 'warped_intensity_images': - assert len(val) == self.inputs.modalities * len(self.inputs.warped_label_images), \ - "Number of intensity images and label maps must be the same {0}!={1}".format( - len(val), len(self.inputs.warped_label_images)) + if opt == "warped_intensity_images": + assert len(val) == self.inputs.modalities * len( + self.inputs.warped_label_images + ), "Number of intensity images and label maps must be the same {0}!={1}".format( + len(val), len(self.inputs.warped_label_images) + ) return super(JointFusion, self)._format_arg(opt, spec, val) return retval def _list_outputs(self): outputs = self._outputs().get() - outputs['output_label_image'] = os.path.abspath( - self.inputs.output_label_image) + outputs["output_label_image"] = os.path.abspath(self.inputs.output_label_image) return outputs @@ -1082,52 +1248,61 @@ class DenoiseImageInputSpec(ANTSCommandInputSpec): 2, 3, 4, - argstr='-d %d', - desc='This option forces the image to be treated ' - 'as a specified-dimensional image. If not ' - 'specified, the program tries to infer the ' - 'dimensionality from the input image.') + argstr="-d %d", + desc="This option forces the image to be treated " + "as a specified-dimensional image. If not " + "specified, the program tries to infer the " + "dimensionality from the input image.", + ) input_image = File( exists=True, argstr="-i %s", mandatory=True, - desc='A scalar image is expected as input for noise correction.') + desc="A scalar image is expected as input for noise correction.", + ) noise_model = traits.Enum( - 'Gaussian', - 'Rician', - argstr='-n %s', + "Gaussian", + "Rician", + argstr="-n %s", usedefault=True, - desc=('Employ a Rician or Gaussian noise model.')) + desc=("Employ a Rician or Gaussian noise model."), + ) shrink_factor = traits.Int( default_value=1, usedefault=True, - argstr='-s %s', - desc=('Running noise correction on large images can' - ' be time consuming. To lessen computation time,' - ' the input image can be resampled. The shrink' - ' factor, specified as a single integer, describes' - ' this resampling. Shrink factor = 1 is the default.')) + argstr="-s %s", + desc=( + "Running noise correction on large images can" + " be time consuming. To lessen computation time," + " the input image can be resampled. The shrink" + " factor, specified as a single integer, describes" + " this resampling. Shrink factor = 1 is the default." + ), + ) output_image = File( argstr="-o %s", - name_source=['input_image'], + name_source=["input_image"], hash_files=False, keep_extension=True, - name_template='%s_noise_corrected', - desc='The output consists of the noise corrected' - ' version of the input image.') + name_template="%s_noise_corrected", + desc="The output consists of the noise corrected" + " version of the input image.", + ) save_noise = traits.Bool( False, mandatory=True, usedefault=True, - desc=('True if the estimated noise should be saved to file.'), - xor=['noise_image']) + desc=("True if the estimated noise should be saved to file."), + xor=["noise_image"], + ) noise_image = File( - name_source=['input_image'], + name_source=["input_image"], hash_files=False, keep_extension=True, - name_template='%s_noise', - desc='Filename for the estimated noise.') - verbose = traits.Bool(False, argstr="-v", desc=('Verbose output.')) + name_template="%s_noise", + desc="Filename for the estimated noise.", + ) + verbose = traits.Bool(False, argstr="-v", desc=("Verbose output.")) class DenoiseImageOutputSpec(TraitedSpec): @@ -1160,16 +1335,19 @@ class DenoiseImage(ANTSCommand): >>> denoise_3.cmdline 'DenoiseImage -i im1.nii -n Gaussian -o [ im1_noise_corrected.nii, im1_noise.nii ] -s 1' """ + input_spec = DenoiseImageInputSpec output_spec = DenoiseImageOutputSpec - _cmd = 'DenoiseImage' + _cmd = "DenoiseImage" def _format_arg(self, name, trait_spec, value): - if ((name == 'output_image') and - (self.inputs.save_noise or isdefined(self.inputs.noise_image))): - newval = '[ %s, %s ]' % ( - self._filename_from_source('output_image'), - self._filename_from_source('noise_image')) + if (name == "output_image") and ( + self.inputs.save_noise or isdefined(self.inputs.noise_image) + ): + newval = "[ %s, %s ]" % ( + self._filename_from_source("output_image"), + self._filename_from_source("noise_image"), + ) return trait_spec.argstr % newval return super(DenoiseImage, self)._format_arg(name, trait_spec, value) @@ -1180,121 +1358,145 @@ class AntsJointFusionInputSpec(ANTSCommandInputSpec): 3, 2, 4, - argstr='-d %d', - desc='This option forces the image to be treated ' - 'as a specified-dimensional image. If not ' - 'specified, the program tries to infer the ' - 'dimensionality from the input image.') + argstr="-d %d", + desc="This option forces the image to be treated " + "as a specified-dimensional image. If not " + "specified, the program tries to infer the " + "dimensionality from the input image.", + ) target_image = traits.List( InputMultiPath(File(exists=True)), - argstr='-t %s', + argstr="-t %s", mandatory=True, - desc='The target image (or ' - 'multimodal target images) assumed to be ' - 'aligned to a common image domain.') + desc="The target image (or " + "multimodal target images) assumed to be " + "aligned to a common image domain.", + ) atlas_image = traits.List( InputMultiPath(File(exists=True)), argstr="-g %s...", mandatory=True, - desc='The atlas image (or ' - 'multimodal atlas images) assumed to be ' - 'aligned to a common image domain.') + desc="The atlas image (or " + "multimodal atlas images) assumed to be " + "aligned to a common image domain.", + ) atlas_segmentation_image = InputMultiPath( File(exists=True), argstr="-l %s...", mandatory=True, - desc='The atlas segmentation ' - 'images. For performing label fusion the number ' - 'of specified segmentations should be identical ' - 'to the number of atlas image sets.') + desc="The atlas segmentation " + "images. For performing label fusion the number " + "of specified segmentations should be identical " + "to the number of atlas image sets.", + ) alpha = traits.Float( default_value=0.1, usedefault=True, - argstr='-a %s', + argstr="-a %s", desc=( - 'Regularization ' - 'term added to matrix Mx for calculating the inverse. Default = 0.1' - )) + "Regularization " + "term added to matrix Mx for calculating the inverse. Default = 0.1" + ), + ) beta = traits.Float( default_value=2.0, usedefault=True, - argstr='-b %s', - desc=('Exponent for mapping ' - 'intensity difference to the joint error. Default = 2.0')) + argstr="-b %s", + desc=( + "Exponent for mapping " + "intensity difference to the joint error. Default = 2.0" + ), + ) retain_label_posterior_images = traits.Bool( False, - argstr='-r', + argstr="-r", usedefault=True, - requires=['atlas_segmentation_image'], - desc=('Retain label posterior probability images. Requires ' - 'atlas segmentations to be specified. Default = false')) + requires=["atlas_segmentation_image"], + desc=( + "Retain label posterior probability images. Requires " + "atlas segmentations to be specified. Default = false" + ), + ) retain_atlas_voting_images = traits.Bool( False, - argstr='-f', + argstr="-f", usedefault=True, - desc=('Retain atlas voting images. Default = false')) + desc=("Retain atlas voting images. Default = false"), + ) constrain_nonnegative = traits.Bool( False, - argstr='-c', + argstr="-c", usedefault=True, - desc=('Constrain solution to non-negative weights.')) + desc=("Constrain solution to non-negative weights."), + ) patch_radius = traits.ListInt( minlen=3, maxlen=3, - argstr='-p %s', - desc=('Patch radius for similarity measures.' - 'Default: 2x2x2')) + argstr="-p %s", + desc=("Patch radius for similarity measures." "Default: 2x2x2"), + ) patch_metric = traits.Enum( - 'PC', - 'MSQ', - argstr='-m %s', - desc=('Metric to be used in determining the most similar ' - 'neighborhood patch. Options include Pearson\'s ' - 'correlation (PC) and mean squares (MSQ). Default = ' - 'PC (Pearson correlation).')) + "PC", + "MSQ", + argstr="-m %s", + desc=( + "Metric to be used in determining the most similar " + "neighborhood patch. Options include Pearson's " + "correlation (PC) and mean squares (MSQ). Default = " + "PC (Pearson correlation)." + ), + ) search_radius = traits.List( [3, 3, 3], minlen=1, maxlen=3, - argstr='-s %s', + argstr="-s %s", usedefault=True, - desc=('Search radius for similarity measures. Default = 3x3x3. ' - 'One can also specify an image where the value at the ' - 'voxel specifies the isotropic search radius at that voxel.')) + desc=( + "Search radius for similarity measures. Default = 3x3x3. " + "One can also specify an image where the value at the " + "voxel specifies the isotropic search radius at that voxel." + ), + ) exclusion_image_label = traits.List( traits.Str(), - argstr='-e %s', - requires=['exclusion_image'], - desc=('Specify a label for the exclusion region.')) + argstr="-e %s", + requires=["exclusion_image"], + desc=("Specify a label for the exclusion region."), + ) exclusion_image = traits.List( - File(exists=True), - desc=('Specify an exclusion region for the given label.')) + File(exists=True), desc=("Specify an exclusion region for the given label.") + ) mask_image = File( - argstr='-x %s', + argstr="-x %s", exists=True, - desc='If a mask image ' - 'is specified, fusion is only performed in the mask region.') + desc="If a mask image " + "is specified, fusion is only performed in the mask region.", + ) out_label_fusion = File( - argstr="%s", hash_files=False, desc='The output label fusion image.') + argstr="%s", hash_files=False, desc="The output label fusion image." + ) out_intensity_fusion_name_format = traits.Str( argstr="", - desc='Optional intensity fusion ' - 'image file name format. ' - '(e.g. "antsJointFusionIntensity_%d.nii.gz")') + desc="Optional intensity fusion " + "image file name format. " + '(e.g. "antsJointFusionIntensity_%d.nii.gz")', + ) out_label_post_prob_name_format = traits.Str( - 'antsJointFusionPosterior_%d.nii.gz', - requires=['out_label_fusion', 'out_intensity_fusion_name_format'], - desc='Optional label posterior probability ' - 'image file name format.') + "antsJointFusionPosterior_%d.nii.gz", + requires=["out_label_fusion", "out_intensity_fusion_name_format"], + desc="Optional label posterior probability " "image file name format.", + ) out_atlas_voting_weight_name_format = traits.Str( - 'antsJointFusionVotingWeight_%d.nii.gz', + "antsJointFusionVotingWeight_%d.nii.gz", requires=[ - 'out_label_fusion', 'out_intensity_fusion_name_format', - 'out_label_post_prob_name_format' + "out_label_fusion", + "out_intensity_fusion_name_format", + "out_label_post_prob_name_format", ], - desc='Optional atlas voting weight image ' - 'file name format.') - verbose = traits.Bool(False, argstr="-v", desc=('Verbose output.')) + desc="Optional atlas voting weight image " "file name format.", + ) + verbose = traits.Bool(False, argstr="-v", desc=("Verbose output.")) class AntsJointFusionOutputSpec(TraitedSpec): @@ -1364,69 +1566,79 @@ class AntsJointFusion(ANTSCommand): -p 3x2x1 -s mask.nii -t ['im1.nii', 'im2.nii'] -v" """ + input_spec = AntsJointFusionInputSpec output_spec = AntsJointFusionOutputSpec - _cmd = 'antsJointFusion' + _cmd = "antsJointFusion" def _format_arg(self, opt, spec, val): - if opt == 'exclusion_image_label': + if opt == "exclusion_image_label": retval = [] for ii in range(len(self.inputs.exclusion_image_label)): retval.append( - '-e {0}[{1}]'.format(self.inputs.exclusion_image_label[ii], - self.inputs.exclusion_image[ii])) - retval = ' '.join(retval) - elif opt == 'patch_radius': - retval = '-p {0}'.format(self._format_xarray(val)) - elif opt == 'search_radius': - retval = '-s {0}'.format(self._format_xarray(val)) - elif opt == 'out_label_fusion': + "-e {0}[{1}]".format( + self.inputs.exclusion_image_label[ii], + self.inputs.exclusion_image[ii], + ) + ) + retval = " ".join(retval) + elif opt == "patch_radius": + retval = "-p {0}".format(self._format_xarray(val)) + elif opt == "search_radius": + retval = "-s {0}".format(self._format_xarray(val)) + elif opt == "out_label_fusion": if isdefined(self.inputs.out_intensity_fusion_name_format): if isdefined(self.inputs.out_label_post_prob_name_format): - if isdefined( - self.inputs.out_atlas_voting_weight_name_format): - retval = '-o [{0}, {1}, {2}, {3}]'.format( + if isdefined(self.inputs.out_atlas_voting_weight_name_format): + retval = "-o [{0}, {1}, {2}, {3}]".format( self.inputs.out_label_fusion, self.inputs.out_intensity_fusion_name_format, self.inputs.out_label_post_prob_name_format, - self.inputs.out_atlas_voting_weight_name_format) + self.inputs.out_atlas_voting_weight_name_format, + ) else: - retval = '-o [{0}, {1}, {2}]'.format( + retval = "-o [{0}, {1}, {2}]".format( self.inputs.out_label_fusion, self.inputs.out_intensity_fusion_name_format, - self.inputs.out_label_post_prob_name_format) + self.inputs.out_label_post_prob_name_format, + ) else: - retval = '-o [{0}, {1}]'.format( + retval = "-o [{0}, {1}]".format( self.inputs.out_label_fusion, - self.inputs.out_intensity_fusion_name_format) + self.inputs.out_intensity_fusion_name_format, + ) else: - retval = '-o {0}'.format(self.inputs.out_label_fusion) - elif opt == 'out_intensity_fusion_name_format': - retval = '' + retval = "-o {0}".format(self.inputs.out_label_fusion) + elif opt == "out_intensity_fusion_name_format": + retval = "" if not isdefined(self.inputs.out_label_fusion): - retval = '-o {0}'.format( - self.inputs.out_intensity_fusion_name_format) - elif opt == 'atlas_image': - atlas_image_cmd = " ".join([ - '-g [{0}]'.format(", ".join("'%s'" % fn for fn in ai)) - for ai in self.inputs.atlas_image - ]) + retval = "-o {0}".format(self.inputs.out_intensity_fusion_name_format) + elif opt == "atlas_image": + atlas_image_cmd = " ".join( + [ + "-g [{0}]".format(", ".join("'%s'" % fn for fn in ai)) + for ai in self.inputs.atlas_image + ] + ) retval = atlas_image_cmd - elif opt == 'target_image': - target_image_cmd = " ".join([ - '-t [{0}]'.format(", ".join("'%s'" % fn for fn in ai)) - for ai in self.inputs.target_image - ]) + elif opt == "target_image": + target_image_cmd = " ".join( + [ + "-t [{0}]".format(", ".join("'%s'" % fn for fn in ai)) + for ai in self.inputs.target_image + ] + ) retval = target_image_cmd - elif opt == 'atlas_segmentation_image': - assert len(val) == len(self.inputs.atlas_image), "Number of specified " \ - "segmentations should be identical to the number of atlas image " \ + elif opt == "atlas_segmentation_image": + assert len(val) == len(self.inputs.atlas_image), ( + "Number of specified " + "segmentations should be identical to the number of atlas image " "sets {0}!={1}".format(len(val), len(self.inputs.atlas_image)) + ) - atlas_segmentation_image_cmd = " ".join([ - '-l {0}'.format(fn) - for fn in self.inputs.atlas_segmentation_image - ]) + atlas_segmentation_image_cmd = " ".join( + ["-l {0}".format(fn) for fn in self.inputs.atlas_segmentation_image] + ) retval = atlas_segmentation_image_cmd else: @@ -1436,17 +1648,19 @@ def _format_arg(self, opt, spec, val): def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.out_label_fusion): - outputs['out_label_fusion'] = os.path.abspath( - self.inputs.out_label_fusion) + outputs["out_label_fusion"] = os.path.abspath(self.inputs.out_label_fusion) if isdefined(self.inputs.out_intensity_fusion_name_format): - outputs['out_intensity_fusion_name_format'] = os.path.abspath( - self.inputs.out_intensity_fusion_name_format) + outputs["out_intensity_fusion_name_format"] = os.path.abspath( + self.inputs.out_intensity_fusion_name_format + ) if isdefined(self.inputs.out_label_post_prob_name_format): - outputs['out_label_post_prob_name_format'] = os.path.abspath( - self.inputs.out_label_post_prob_name_format) + outputs["out_label_post_prob_name_format"] = os.path.abspath( + self.inputs.out_label_post_prob_name_format + ) if isdefined(self.inputs.out_atlas_voting_weight_name_format): - outputs['out_atlas_voting_weight_name_format'] = os.path.abspath( - self.inputs.out_atlas_voting_weight_name_format) + outputs["out_atlas_voting_weight_name_format"] = os.path.abspath( + self.inputs.out_atlas_voting_weight_name_format + ) return outputs @@ -1455,54 +1669,52 @@ class KellyKapowskiInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, - argstr='--image-dimensionality %d', + argstr="--image-dimensionality %d", usedefault=True, - desc='image dimension (2 or 3)') + desc="image dimension (2 or 3)", + ) segmentation_image = File( exists=True, argstr='--segmentation-image "%s"', mandatory=True, - desc= - "A segmentation image must be supplied labeling the gray and white matters." + desc="A segmentation image must be supplied labeling the gray and white matters." " Default values = 2 and 3, respectively.", ) gray_matter_label = traits.Int( 2, usedefault=True, - desc= - "The label value for the gray matter label in the segmentation_image.") + desc="The label value for the gray matter label in the segmentation_image.", + ) white_matter_label = traits.Int( 3, usedefault=True, - desc= - "The label value for the white matter label in the segmentation_image." + desc="The label value for the white matter label in the segmentation_image.", ) gray_matter_prob_image = File( exists=True, argstr='--gray-matter-probability-image "%s"', - desc= - "In addition to the segmentation image, a gray matter probability image can be" + desc="In addition to the segmentation image, a gray matter probability image can be" " used. If no such image is supplied, one is created using the segmentation image" - " and a variance of 1.0 mm.") + " and a variance of 1.0 mm.", + ) white_matter_prob_image = File( exists=True, argstr='--white-matter-probability-image "%s"', - desc= - "In addition to the segmentation image, a white matter probability image can be" + desc="In addition to the segmentation image, a white matter probability image can be" " used. If no such image is supplied, one is created using the segmentation image" - " and a variance of 1.0 mm.") + " and a variance of 1.0 mm.", + ) convergence = traits.Str( default="[50,0.001,10]", argstr='--convergence "%s"', usedefault=True, - desc= - "Convergence is determined by fitting a line to the normalized energy profile of" + desc="Convergence is determined by fitting a line to the normalized energy profile of" " the last N iterations (where N is specified by the window size) and determining" " the slope which is then compared with the convergence threshold.", ) @@ -1511,68 +1723,80 @@ class KellyKapowskiInputSpec(ANTSCommandInputSpec): 10, usedefault=True, argstr="--thickness-prior-estimate %f", - desc= - "Provides a prior constraint on the final thickness measurement in mm." + desc="Provides a prior constraint on the final thickness measurement in mm.", ) thickness_prior_image = File( exists=True, argstr='--thickness-prior-image "%s"', - desc="An image containing spatially varying prior thickness values.") + desc="An image containing spatially varying prior thickness values.", + ) gradient_step = traits.Float( 0.025, usedefault=True, argstr="--gradient-step %f", - desc="Gradient step size for the optimization.") + desc="Gradient step size for the optimization.", + ) smoothing_variance = traits.Float( - 1.0, usedefault=True, + 1.0, + usedefault=True, argstr="--smoothing-variance %f", - desc="Defines the Gaussian smoothing of the hit and total images.") + desc="Defines the Gaussian smoothing of the hit and total images.", + ) smoothing_velocity_field = traits.Float( - 1.5, usedefault=True, + 1.5, + usedefault=True, argstr="--smoothing-velocity-field-parameter %f", - desc= - "Defines the Gaussian smoothing of the velocity field (default = 1.5)." + desc="Defines the Gaussian smoothing of the velocity field (default = 1.5)." " If the b-spline smoothing option is chosen, then this defines the" - " isotropic mesh spacing for the smoothing spline (default = 15).") + " isotropic mesh spacing for the smoothing spline (default = 15).", + ) use_bspline_smoothing = traits.Bool( argstr="--use-bspline-smoothing 1", - desc="Sets the option for B-spline smoothing of the velocity field.") + desc="Sets the option for B-spline smoothing of the velocity field.", + ) number_integration_points = traits.Int( - 10, usedefault=True, + 10, + usedefault=True, argstr="--number-of-integration-points %d", - desc="Number of compositions of the diffeomorphism per iteration.") + desc="Number of compositions of the diffeomorphism per iteration.", + ) max_invert_displacement_field_iters = traits.Int( - 20, usedefault=True, + 20, + usedefault=True, argstr="--maximum-number-of-invert-displacement-field-iterations %d", desc="Maximum number of iterations for estimating the invert" - "displacement field.") + "displacement field.", + ) cortical_thickness = File( argstr='--output "%s"', keep_extension=True, name_source=["segmentation_image"], - name_template='%s_cortical_thickness', - desc='Filename for the cortical thickness.', - hash_files=False) + name_template="%s_cortical_thickness", + desc="Filename for the cortical thickness.", + hash_files=False, + ) warped_white_matter = File( name_source=["segmentation_image"], keep_extension=True, - name_template='%s_warped_white_matter', - desc='Filename for the warped white matter file.', - hash_files=False) + name_template="%s_warped_white_matter", + desc="Filename for the warped white matter file.", + hash_files=False, + ) class KellyKapowskiOutputSpec(TraitedSpec): cortical_thickness = File( - desc="A thickness map defined in the segmented gray matter.") + desc="A thickness map defined in the segmented gray matter." + ) warped_white_matter = File(desc="A warped white matter image.") @@ -1600,66 +1824,68 @@ class KellyKapowski(ANTSCommand): --smoothing-velocity-field-parameter 1.500000 --thickness-prior-estimate 10.000000' """ + _cmd = "KellyKapowski" input_spec = KellyKapowskiInputSpec output_spec = KellyKapowskiOutputSpec - references_ = [{ - 'entry': - BibTeX( - "@book{Das2009867," - "author={Sandhitsu R. Das and Brian B. Avants and Murray Grossman and James C. Gee}," - "title={Registration based cortical thickness measurement.}," - "journal={NeuroImage}," - "volume={45}," - "number={37}," - "pages={867--879}," - "year={2009}," - "issn={1053-8119}," - "url={http://www.sciencedirect.com/science/article/pii/S1053811908012780}," - "doi={https://doi.org/10.1016/j.neuroimage.2008.12.016}" - "}"), - 'description': - 'The details on the implementation of DiReCT.', - 'tags': ['implementation'], - }] + references_ = [ + { + "entry": BibTeX( + "@book{Das2009867," + "author={Sandhitsu R. Das and Brian B. Avants and Murray Grossman and James C. Gee}," + "title={Registration based cortical thickness measurement.}," + "journal={NeuroImage}," + "volume={45}," + "number={37}," + "pages={867--879}," + "year={2009}," + "issn={1053-8119}," + "url={http://www.sciencedirect.com/science/article/pii/S1053811908012780}," + "doi={https://doi.org/10.1016/j.neuroimage.2008.12.016}" + "}" + ), + "description": "The details on the implementation of DiReCT.", + "tags": ["implementation"], + } + ] def _parse_inputs(self, skip=None): if skip is None: skip = [] - skip += [ - 'warped_white_matter', 'gray_matter_label', 'white_matter_label' - ] + skip += ["warped_white_matter", "gray_matter_label", "white_matter_label"] return super(KellyKapowski, self)._parse_inputs(skip=skip) def _gen_filename(self, name): - if name == 'cortical_thickness': + if name == "cortical_thickness": output = self.inputs.cortical_thickness if not isdefined(output): _, name, ext = split_filename(self.inputs.segmentation_image) - output = name + '_cortical_thickness' + ext + output = name + "_cortical_thickness" + ext return output - if name == 'warped_white_matter': + if name == "warped_white_matter": output = self.inputs.warped_white_matter if not isdefined(output): _, name, ext = split_filename(self.inputs.segmentation_image) - output = name + '_warped_white_matter' + ext + output = name + "_warped_white_matter" + ext return output return None def _format_arg(self, opt, spec, val): if opt == "segmentation_image": - newval = '[{0},{1},{2}]'.format(self.inputs.segmentation_image, - self.inputs.gray_matter_label, - self.inputs.white_matter_label) + newval = "[{0},{1},{2}]".format( + self.inputs.segmentation_image, + self.inputs.gray_matter_label, + self.inputs.white_matter_label, + ) return spec.argstr % newval if opt == "cortical_thickness": ct = self._gen_filename("cortical_thickness") wm = self._gen_filename("warped_white_matter") - newval = '[{},{}]'.format(ct, wm) + newval = "[{},{}]".format(ct, wm) return spec.argstr % newval return super(KellyKapowski, self)._format_arg(opt, spec, val) diff --git a/nipype/interfaces/ants/tests/test_auto_ANTS.py b/nipype/interfaces/ants/tests/test_auto_ANTS.py index 3e762beaa9..f3e329f957 100644 --- a/nipype/interfaces/ants/tests/test_auto_ANTS.py +++ b/nipype/interfaces/ants/tests/test_auto_ANTS.py @@ -4,90 +4,50 @@ def test_ANTS_inputs(): input_map = dict( - affine_gradient_descent_option=dict(argstr='%s', ), - args=dict(argstr='%s', ), - delta_time=dict(requires=['number_of_time_steps'], ), - dimension=dict( - argstr='%d', - position=1, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixed_image=dict(mandatory=True, ), - gradient_step_length=dict(requires=['transformation_model'], ), - metric=dict(mandatory=True, ), - metric_weight=dict( - mandatory=True, - requires=['metric'], - usedefault=True, - ), - mi_option=dict( - argstr='--MI-option %s', - sep='x', - ), - moving_image=dict( - argstr='%s', - mandatory=True, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + affine_gradient_descent_option=dict(argstr="%s",), + args=dict(argstr="%s",), + delta_time=dict(requires=["number_of_time_steps"],), + dimension=dict(argstr="%d", position=1,), + environ=dict(nohash=True, usedefault=True,), + fixed_image=dict(mandatory=True,), + gradient_step_length=dict(requires=["transformation_model"],), + metric=dict(mandatory=True,), + metric_weight=dict(mandatory=True, requires=["metric"], usedefault=True,), + mi_option=dict(argstr="--MI-option %s", sep="x",), + moving_image=dict(argstr="%s", mandatory=True,), + num_threads=dict(nohash=True, usedefault=True,), number_of_affine_iterations=dict( - argstr='--number-of-affine-iterations %s', - sep='x', + argstr="--number-of-affine-iterations %s", sep="x", ), - number_of_iterations=dict( - argstr='--number-of-iterations %s', - sep='x', - ), - number_of_time_steps=dict(requires=['gradient_step_length'], ), + number_of_iterations=dict(argstr="--number-of-iterations %s", sep="x",), + number_of_time_steps=dict(requires=["gradient_step_length"],), output_transform_prefix=dict( - argstr='--output-naming %s', - mandatory=True, - usedefault=True, - ), - radius=dict( - mandatory=True, - requires=['metric'], - ), - regularization=dict(argstr='%s', ), - regularization_deformation_field_sigma=dict( - requires=['regularization'], ), - regularization_gradient_field_sigma=dict(requires=['regularization' - ], ), - smoothing_sigmas=dict( - argstr='--gaussian-smoothing-sigmas %s', - sep='x', - ), - subsampling_factors=dict( - argstr='--subsampling-factors %s', - sep='x', - ), - symmetry_type=dict(requires=['delta_time'], ), - transformation_model=dict( - argstr='%s', - mandatory=True, - ), - use_histogram_matching=dict( - argstr='%s', - usedefault=True, - ), + argstr="--output-naming %s", mandatory=True, usedefault=True, + ), + radius=dict(mandatory=True, requires=["metric"],), + regularization=dict(argstr="%s",), + regularization_deformation_field_sigma=dict(requires=["regularization"],), + regularization_gradient_field_sigma=dict(requires=["regularization"],), + smoothing_sigmas=dict(argstr="--gaussian-smoothing-sigmas %s", sep="x",), + subsampling_factors=dict(argstr="--subsampling-factors %s", sep="x",), + symmetry_type=dict(requires=["delta_time"],), + transformation_model=dict(argstr="%s", mandatory=True,), + use_histogram_matching=dict(argstr="%s", usedefault=True,), ) inputs = ANTS.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ANTS_outputs(): output_map = dict( - affine_transform=dict(extensions=None, ), - inverse_warp_transform=dict(extensions=None, ), - metaheader=dict(extensions=None, ), - metaheader_raw=dict(extensions=None, ), - warp_transform=dict(extensions=None, ), + affine_transform=dict(extensions=None,), + inverse_warp_transform=dict(extensions=None,), + metaheader=dict(extensions=None,), + metaheader_raw=dict(extensions=None,), + warp_transform=dict(extensions=None,), ) outputs = ANTS.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_ANTSCommand.py b/nipype/interfaces/ants/tests/test_auto_ANTSCommand.py index 54d1effe3a..7a5ff5dec5 100644 --- a/nipype/interfaces/ants/tests/test_auto_ANTSCommand.py +++ b/nipype/interfaces/ants/tests/test_auto_ANTSCommand.py @@ -4,15 +4,9 @@ def test_ANTSCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + num_threads=dict(nohash=True, usedefault=True,), ) inputs = ANTSCommand.input_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_AffineInitializer.py b/nipype/interfaces/ants/tests/test_auto_AffineInitializer.py index 0ae65a48a2..f97fbe3352 100644 --- a/nipype/interfaces/ants/tests/test_auto_AffineInitializer.py +++ b/nipype/interfaces/ants/tests/test_auto_AffineInitializer.py @@ -4,66 +4,27 @@ def test_AffineInitializer_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict( - argstr='%s', - position=0, - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixed_image=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), - local_search=dict( - argstr='%d', - position=7, - usedefault=True, - ), - moving_image=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - out_file=dict( - argstr='%s', - extensions=None, - position=3, - usedefault=True, - ), - principal_axes=dict( - argstr='%d', - position=6, - usedefault=True, - ), - radian_fraction=dict( - argstr='%f', - position=5, - usedefault=True, - ), - search_factor=dict( - argstr='%f', - position=4, - usedefault=True, - ), + args=dict(argstr="%s",), + dimension=dict(argstr="%s", position=0, usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + fixed_image=dict(argstr="%s", extensions=None, mandatory=True, position=1,), + local_search=dict(argstr="%d", position=7, usedefault=True,), + moving_image=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + num_threads=dict(nohash=True, usedefault=True,), + out_file=dict(argstr="%s", extensions=None, position=3, usedefault=True,), + principal_axes=dict(argstr="%d", position=6, usedefault=True,), + radian_fraction=dict(argstr="%f", position=5, usedefault=True,), + search_factor=dict(argstr="%f", position=4, usedefault=True,), ) inputs = AffineInitializer.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AffineInitializer_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = AffineInitializer.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_AntsJointFusion.py b/nipype/interfaces/ants/tests/test_auto_AntsJointFusion.py index e4e6721d82..d86f7f84cb 100644 --- a/nipype/interfaces/ants/tests/test_auto_AntsJointFusion.py +++ b/nipype/interfaces/ants/tests/test_auto_AntsJointFusion.py @@ -4,93 +4,52 @@ def test_AntsJointFusion_inputs(): input_map = dict( - alpha=dict( - argstr='-a %s', - usedefault=True, - ), - args=dict(argstr='%s', ), - atlas_image=dict( - argstr='-g %s...', - mandatory=True, - ), - atlas_segmentation_image=dict( - argstr='-l %s...', - mandatory=True, - ), - beta=dict( - argstr='-b %s', - usedefault=True, - ), - constrain_nonnegative=dict( - argstr='-c', - usedefault=True, - ), - dimension=dict(argstr='-d %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), + alpha=dict(argstr="-a %s", usedefault=True,), + args=dict(argstr="%s",), + atlas_image=dict(argstr="-g %s...", mandatory=True,), + atlas_segmentation_image=dict(argstr="-l %s...", mandatory=True,), + beta=dict(argstr="-b %s", usedefault=True,), + constrain_nonnegative=dict(argstr="-c", usedefault=True,), + dimension=dict(argstr="-d %d",), + environ=dict(nohash=True, usedefault=True,), exclusion_image=dict(), - exclusion_image_label=dict( - argstr='-e %s', - requires=['exclusion_image'], - ), - mask_image=dict( - argstr='-x %s', - extensions=None, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - out_atlas_voting_weight_name_format=dict(requires=[ - 'out_label_fusion', 'out_intensity_fusion_name_format', - 'out_label_post_prob_name_format' - ], ), - out_intensity_fusion_name_format=dict(argstr='', ), - out_label_fusion=dict( - argstr='%s', - extensions=None, - hash_files=False, - ), + exclusion_image_label=dict(argstr="-e %s", requires=["exclusion_image"],), + mask_image=dict(argstr="-x %s", extensions=None,), + num_threads=dict(nohash=True, usedefault=True,), + out_atlas_voting_weight_name_format=dict( + requires=[ + "out_label_fusion", + "out_intensity_fusion_name_format", + "out_label_post_prob_name_format", + ], + ), + out_intensity_fusion_name_format=dict(argstr="",), + out_label_fusion=dict(argstr="%s", extensions=None, hash_files=False,), out_label_post_prob_name_format=dict( - requires=['out_label_fusion', - 'out_intensity_fusion_name_format'], ), - patch_metric=dict(argstr='-m %s', ), - patch_radius=dict( - argstr='-p %s', - maxlen=3, - minlen=3, - ), - retain_atlas_voting_images=dict( - argstr='-f', - usedefault=True, + requires=["out_label_fusion", "out_intensity_fusion_name_format"], ), + patch_metric=dict(argstr="-m %s",), + patch_radius=dict(argstr="-p %s", maxlen=3, minlen=3,), + retain_atlas_voting_images=dict(argstr="-f", usedefault=True,), retain_label_posterior_images=dict( - argstr='-r', - requires=['atlas_segmentation_image'], - usedefault=True, - ), - search_radius=dict( - argstr='-s %s', - usedefault=True, + argstr="-r", requires=["atlas_segmentation_image"], usedefault=True, ), - target_image=dict( - argstr='-t %s', - mandatory=True, - ), - verbose=dict(argstr='-v', ), + search_radius=dict(argstr="-s %s", usedefault=True,), + target_image=dict(argstr="-t %s", mandatory=True,), + verbose=dict(argstr="-v",), ) inputs = AntsJointFusion.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AntsJointFusion_outputs(): output_map = dict( out_atlas_voting_weight_name_format=dict(), out_intensity_fusion_name_format=dict(), - out_label_fusion=dict(extensions=None, ), + out_label_fusion=dict(extensions=None,), out_label_post_prob_name_format=dict(), ) outputs = AntsJointFusion.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_ApplyTransforms.py b/nipype/interfaces/ants/tests/test_auto_ApplyTransforms.py index 00a21dce13..18add39b6c 100644 --- a/nipype/interfaces/ants/tests/test_auto_ApplyTransforms.py +++ b/nipype/interfaces/ants/tests/test_auto_ApplyTransforms.py @@ -4,60 +4,34 @@ def test_ApplyTransforms_inputs(): input_map = dict( - args=dict(argstr='%s', ), - default_value=dict( - argstr='--default-value %g', - usedefault=True, - ), - dimension=dict(argstr='--dimensionality %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - float=dict( - argstr='--float %d', - usedefault=True, - ), - input_image=dict( - argstr='--input %s', - extensions=None, - mandatory=True, - ), - input_image_type=dict(argstr='--input-image-type %d', ), - interpolation=dict( - argstr='%s', - usedefault=True, - ), + args=dict(argstr="%s",), + default_value=dict(argstr="--default-value %g", usedefault=True,), + dimension=dict(argstr="--dimensionality %d",), + environ=dict(nohash=True, usedefault=True,), + float=dict(argstr="--float %d", usedefault=True,), + input_image=dict(argstr="--input %s", extensions=None, mandatory=True,), + input_image_type=dict(argstr="--input-image-type %d",), + interpolation=dict(argstr="%s", usedefault=True,), interpolation_parameters=dict(), invert_transform_flags=dict(), - num_threads=dict( - nohash=True, - usedefault=True, - ), - out_postfix=dict(usedefault=True, ), - output_image=dict( - argstr='--output %s', - genfile=True, - hash_files=False, - ), - print_out_composite_warp_file=dict(requires=['output_image'], ), + num_threads=dict(nohash=True, usedefault=True,), + out_postfix=dict(usedefault=True,), + output_image=dict(argstr="--output %s", genfile=True, hash_files=False,), + print_out_composite_warp_file=dict(requires=["output_image"],), reference_image=dict( - argstr='--reference-image %s', - extensions=None, - mandatory=True, - ), - transforms=dict( - argstr='%s', - mandatory=True, + argstr="--reference-image %s", extensions=None, mandatory=True, ), + transforms=dict(argstr="%s", mandatory=True,), ) inputs = ApplyTransforms.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyTransforms_outputs(): - output_map = dict(output_image=dict(extensions=None, ), ) + output_map = dict(output_image=dict(extensions=None,),) outputs = ApplyTransforms.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_ApplyTransformsToPoints.py b/nipype/interfaces/ants/tests/test_auto_ApplyTransformsToPoints.py index 7ef77a6204..a0258471b8 100644 --- a/nipype/interfaces/ants/tests/test_auto_ApplyTransformsToPoints.py +++ b/nipype/interfaces/ants/tests/test_auto_ApplyTransformsToPoints.py @@ -4,40 +4,29 @@ def test_ApplyTransformsToPoints_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict(argstr='--dimensionality %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - input_file=dict( - argstr='--input %s', - extensions=None, - mandatory=True, - ), + args=dict(argstr="%s",), + dimension=dict(argstr="--dimensionality %d",), + environ=dict(nohash=True, usedefault=True,), + input_file=dict(argstr="--input %s", extensions=None, mandatory=True,), invert_transform_flags=dict(), - num_threads=dict( - nohash=True, - usedefault=True, - ), + num_threads=dict(nohash=True, usedefault=True,), output_file=dict( - argstr='--output %s', + argstr="--output %s", hash_files=False, - name_source=['input_file'], - name_template='%s_transformed.csv', - ), - transforms=dict( - argstr='%s', - mandatory=True, + name_source=["input_file"], + name_template="%s_transformed.csv", ), + transforms=dict(argstr="%s", mandatory=True,), ) inputs = ApplyTransformsToPoints.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyTransformsToPoints_outputs(): - output_map = dict(output_file=dict(extensions=None, ), ) + output_map = dict(output_file=dict(extensions=None,),) outputs = ApplyTransformsToPoints.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_Atropos.py b/nipype/interfaces/ants/tests/test_auto_Atropos.py index bb7a2afe78..021348bba0 100644 --- a/nipype/interfaces/ants/tests/test_auto_Atropos.py +++ b/nipype/interfaces/ants/tests/test_auto_Atropos.py @@ -4,71 +4,44 @@ def test_Atropos_inputs(): input_map = dict( - args=dict(argstr='%s', ), - convergence_threshold=dict(requires=['n_iterations'], ), - dimension=dict( - argstr='--image-dimensionality %d', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - icm_use_synchronous_update=dict(argstr='%s', ), + args=dict(argstr="%s",), + convergence_threshold=dict(requires=["n_iterations"],), + dimension=dict(argstr="--image-dimensionality %d", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + icm_use_synchronous_update=dict(argstr="%s",), initialization=dict( - argstr='%s', - mandatory=True, - requires=['number_of_tissue_classes'], - ), - intensity_images=dict( - argstr='--intensity-image %s...', - mandatory=True, - ), - likelihood_model=dict(argstr='--likelihood-model %s', ), - mask_image=dict( - argstr='--mask-image %s', - extensions=None, - mandatory=True, - ), - maximum_number_of_icm_terations=dict( - requires=['icm_use_synchronous_update'], ), - mrf_radius=dict(requires=['mrf_smoothing_factor'], ), - mrf_smoothing_factor=dict(argstr='%s', ), - n_iterations=dict(argstr='%s', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - number_of_tissue_classes=dict(mandatory=True, ), + argstr="%s", mandatory=True, requires=["number_of_tissue_classes"], + ), + intensity_images=dict(argstr="--intensity-image %s...", mandatory=True,), + likelihood_model=dict(argstr="--likelihood-model %s",), + mask_image=dict(argstr="--mask-image %s", extensions=None, mandatory=True,), + maximum_number_of_icm_terations=dict(requires=["icm_use_synchronous_update"],), + mrf_radius=dict(requires=["mrf_smoothing_factor"],), + mrf_smoothing_factor=dict(argstr="%s",), + n_iterations=dict(argstr="%s",), + num_threads=dict(nohash=True, usedefault=True,), + number_of_tissue_classes=dict(mandatory=True,), out_classified_image_name=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, + argstr="%s", extensions=None, genfile=True, hash_files=False, ), - output_posteriors_name_template=dict(usedefault=True, ), - posterior_formulation=dict(argstr='%s', ), + output_posteriors_name_template=dict(usedefault=True,), + posterior_formulation=dict(argstr="%s",), prior_probability_images=dict(), - prior_probability_threshold=dict(requires=['prior_weighting'], ), + prior_probability_threshold=dict(requires=["prior_weighting"],), prior_weighting=dict(), save_posteriors=dict(), - use_mixture_model_proportions=dict(requires=['posterior_formulation' - ], ), - use_random_seed=dict( - argstr='--use-random-seed %d', - usedefault=True, - ), + use_mixture_model_proportions=dict(requires=["posterior_formulation"],), + use_random_seed=dict(argstr="--use-random-seed %d", usedefault=True,), ) inputs = Atropos.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Atropos_outputs(): - output_map = dict( - classified_image=dict(extensions=None, ), - posteriors=dict(), - ) + output_map = dict(classified_image=dict(extensions=None,), posteriors=dict(),) outputs = Atropos.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_AverageAffineTransform.py b/nipype/interfaces/ants/tests/test_auto_AverageAffineTransform.py index 9247abda86..5d8b191931 100644 --- a/nipype/interfaces/ants/tests/test_auto_AverageAffineTransform.py +++ b/nipype/interfaces/ants/tests/test_auto_AverageAffineTransform.py @@ -4,39 +4,24 @@ def test_AverageAffineTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict( - argstr='%d', - mandatory=True, - position=0, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + dimension=dict(argstr="%d", mandatory=True, position=0,), + environ=dict(nohash=True, usedefault=True,), + num_threads=dict(nohash=True, usedefault=True,), output_affine_transform=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), - transforms=dict( - argstr='%s', - mandatory=True, - position=3, + argstr="%s", extensions=None, mandatory=True, position=1, ), + transforms=dict(argstr="%s", mandatory=True, position=3,), ) inputs = AverageAffineTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AverageAffineTransform_outputs(): - output_map = dict(affine_transform=dict(extensions=None, ), ) + output_map = dict(affine_transform=dict(extensions=None,),) outputs = AverageAffineTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_AverageImages.py b/nipype/interfaces/ants/tests/test_auto_AverageImages.py index 206d27d4bf..572407efbc 100644 --- a/nipype/interfaces/ants/tests/test_auto_AverageImages.py +++ b/nipype/interfaces/ants/tests/test_auto_AverageImages.py @@ -4,36 +4,14 @@ def test_AverageImages_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict( - argstr='%d', - mandatory=True, - position=0, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - images=dict( - argstr='%s', - mandatory=True, - position=3, - ), - normalize=dict( - argstr='%d', - mandatory=True, - position=2, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + dimension=dict(argstr="%d", mandatory=True, position=0,), + environ=dict(nohash=True, usedefault=True,), + images=dict(argstr="%s", mandatory=True, position=3,), + normalize=dict(argstr="%d", mandatory=True, position=2,), + num_threads=dict(nohash=True, usedefault=True,), output_average_image=dict( - argstr='%s', - extensions=None, - hash_files=False, - position=1, - usedefault=True, + argstr="%s", extensions=None, hash_files=False, position=1, usedefault=True, ), ) inputs = AverageImages.input_spec() @@ -41,8 +19,10 @@ def test_AverageImages_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AverageImages_outputs(): - output_map = dict(output_average_image=dict(extensions=None, ), ) + output_map = dict(output_average_image=dict(extensions=None,),) outputs = AverageImages.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_BrainExtraction.py b/nipype/interfaces/ants/tests/test_auto_BrainExtraction.py index ba220afb9c..8eb0293313 100644 --- a/nipype/interfaces/ants/tests/test_auto_BrainExtraction.py +++ b/nipype/interfaces/ants/tests/test_auto_BrainExtraction.py @@ -4,77 +4,50 @@ def test_BrainExtraction_inputs(): input_map = dict( - anatomical_image=dict( - argstr='-a %s', - extensions=None, - mandatory=True, - ), - args=dict(argstr='%s', ), + anatomical_image=dict(argstr="-a %s", extensions=None, mandatory=True,), + args=dict(argstr="%s",), brain_probability_mask=dict( - argstr='-m %s', - copyfile=False, - extensions=None, - mandatory=True, - ), - brain_template=dict( - argstr='-e %s', - extensions=None, - mandatory=True, - ), - debug=dict(argstr='-z 1', ), - dimension=dict( - argstr='-d %d', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - extraction_registration_mask=dict( - argstr='-f %s', - extensions=None, - ), - image_suffix=dict( - argstr='-s %s', - usedefault=True, - ), - keep_temporary_files=dict(argstr='-k %d', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - out_prefix=dict( - argstr='-o %s', - usedefault=True, - ), - use_floatingpoint_precision=dict(argstr='-q %d', ), - use_random_seeding=dict(argstr='-u %d', ), + argstr="-m %s", copyfile=False, extensions=None, mandatory=True, + ), + brain_template=dict(argstr="-e %s", extensions=None, mandatory=True,), + debug=dict(argstr="-z 1",), + dimension=dict(argstr="-d %d", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + extraction_registration_mask=dict(argstr="-f %s", extensions=None,), + image_suffix=dict(argstr="-s %s", usedefault=True,), + keep_temporary_files=dict(argstr="-k %d",), + num_threads=dict(nohash=True, usedefault=True,), + out_prefix=dict(argstr="-o %s", usedefault=True,), + use_floatingpoint_precision=dict(argstr="-q %d",), + use_random_seeding=dict(argstr="-u %d",), ) inputs = BrainExtraction.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BrainExtraction_outputs(): output_map = dict( - BrainExtractionBrain=dict(extensions=None, ), - BrainExtractionCSF=dict(extensions=None, ), - BrainExtractionGM=dict(extensions=None, ), - BrainExtractionInitialAffine=dict(extensions=None, ), - BrainExtractionInitialAffineFixed=dict(extensions=None, ), - BrainExtractionInitialAffineMoving=dict(extensions=None, ), - BrainExtractionLaplacian=dict(extensions=None, ), - BrainExtractionMask=dict(extensions=None, ), - BrainExtractionPrior0GenericAffine=dict(extensions=None, ), - BrainExtractionPrior1InverseWarp=dict(extensions=None, ), - BrainExtractionPrior1Warp=dict(extensions=None, ), - BrainExtractionPriorWarped=dict(extensions=None, ), - BrainExtractionSegmentation=dict(extensions=None, ), - BrainExtractionTemplateLaplacian=dict(extensions=None, ), - BrainExtractionTmp=dict(extensions=None, ), - BrainExtractionWM=dict(extensions=None, ), - N4Corrected0=dict(extensions=None, ), - N4Truncated0=dict(extensions=None, ), + BrainExtractionBrain=dict(extensions=None,), + BrainExtractionCSF=dict(extensions=None,), + BrainExtractionGM=dict(extensions=None,), + BrainExtractionInitialAffine=dict(extensions=None,), + BrainExtractionInitialAffineFixed=dict(extensions=None,), + BrainExtractionInitialAffineMoving=dict(extensions=None,), + BrainExtractionLaplacian=dict(extensions=None,), + BrainExtractionMask=dict(extensions=None,), + BrainExtractionPrior0GenericAffine=dict(extensions=None,), + BrainExtractionPrior1InverseWarp=dict(extensions=None,), + BrainExtractionPrior1Warp=dict(extensions=None,), + BrainExtractionPriorWarped=dict(extensions=None,), + BrainExtractionSegmentation=dict(extensions=None,), + BrainExtractionTemplateLaplacian=dict(extensions=None,), + BrainExtractionTmp=dict(extensions=None,), + BrainExtractionWM=dict(extensions=None,), + N4Corrected0=dict(extensions=None,), + N4Truncated0=dict(extensions=None,), ) outputs = BrainExtraction.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_ComposeMultiTransform.py b/nipype/interfaces/ants/tests/test_auto_ComposeMultiTransform.py index 476f316a9f..ccc54c6eb8 100644 --- a/nipype/interfaces/ants/tests/test_auto_ComposeMultiTransform.py +++ b/nipype/interfaces/ants/tests/test_auto_ComposeMultiTransform.py @@ -4,46 +4,30 @@ def test_ComposeMultiTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict( - argstr='%d', - position=0, - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + dimension=dict(argstr="%d", position=0, usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + num_threads=dict(nohash=True, usedefault=True,), output_transform=dict( - argstr='%s', + argstr="%s", extensions=None, keep_extension=True, - name_source=['transforms'], - name_template='%s_composed', + name_source=["transforms"], + name_template="%s_composed", position=1, ), - reference_image=dict( - argstr='%s', - extensions=None, - position=2, - ), - transforms=dict( - argstr='%s', - mandatory=True, - position=3, - ), + reference_image=dict(argstr="%s", extensions=None, position=2,), + transforms=dict(argstr="%s", mandatory=True, position=3,), ) inputs = ComposeMultiTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComposeMultiTransform_outputs(): - output_map = dict(output_transform=dict(extensions=None, ), ) + output_map = dict(output_transform=dict(extensions=None,),) outputs = ComposeMultiTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_CompositeTransformUtil.py b/nipype/interfaces/ants/tests/test_auto_CompositeTransformUtil.py index 9bedb990ec..125c69f141 100644 --- a/nipype/interfaces/ants/tests/test_auto_CompositeTransformUtil.py +++ b/nipype/interfaces/ants/tests/test_auto_CompositeTransformUtil.py @@ -4,46 +4,26 @@ def test_CompositeTransformUtil_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s...', - mandatory=True, - position=3, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - out_file=dict( - argstr='%s', - extensions=None, - position=2, - ), - output_prefix=dict( - argstr='%s', - position=4, - usedefault=True, - ), - process=dict( - argstr='--%s', - position=1, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s...", mandatory=True, position=3,), + num_threads=dict(nohash=True, usedefault=True,), + out_file=dict(argstr="%s", extensions=None, position=2,), + output_prefix=dict(argstr="%s", position=4, usedefault=True,), + process=dict(argstr="--%s", position=1, usedefault=True,), ) inputs = CompositeTransformUtil.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CompositeTransformUtil_outputs(): output_map = dict( - affine_transform=dict(extensions=None, ), - displacement_field=dict(extensions=None, ), - out_file=dict(extensions=None, ), + affine_transform=dict(extensions=None,), + displacement_field=dict(extensions=None,), + out_file=dict(extensions=None,), ) outputs = CompositeTransformUtil.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_ConvertScalarImageToRGB.py b/nipype/interfaces/ants/tests/test_auto_ConvertScalarImageToRGB.py index b3944ed735..7e8c5605f7 100644 --- a/nipype/interfaces/ants/tests/test_auto_ConvertScalarImageToRGB.py +++ b/nipype/interfaces/ants/tests/test_auto_ConvertScalarImageToRGB.py @@ -4,77 +4,29 @@ def test_ConvertScalarImageToRGB_inputs(): input_map = dict( - args=dict(argstr='%s', ), - colormap=dict( - argstr='%s', - mandatory=True, - position=4, - usedefault=True, - ), - custom_color_map_file=dict( - argstr='%s', - position=5, - usedefault=True, - ), - dimension=dict( - argstr='%d', - mandatory=True, - position=0, - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - input_image=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), - mask_image=dict( - argstr='%s', - extensions=None, - position=3, - usedefault=True, - ), - maximum_RGB_output=dict( - argstr='%d', - position=9, - usedefault=True, - ), - maximum_input=dict( - argstr='%d', - mandatory=True, - position=7, - ), - minimum_RGB_output=dict( - argstr='%d', - position=8, - usedefault=True, - ), - minimum_input=dict( - argstr='%d', - mandatory=True, - position=6, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - output_image=dict( - argstr='%s', - position=2, - usedefault=True, - ), + args=dict(argstr="%s",), + colormap=dict(argstr="%s", mandatory=True, position=4, usedefault=True,), + custom_color_map_file=dict(argstr="%s", position=5, usedefault=True,), + dimension=dict(argstr="%d", mandatory=True, position=0, usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + input_image=dict(argstr="%s", extensions=None, mandatory=True, position=1,), + mask_image=dict(argstr="%s", extensions=None, position=3, usedefault=True,), + maximum_RGB_output=dict(argstr="%d", position=9, usedefault=True,), + maximum_input=dict(argstr="%d", mandatory=True, position=7,), + minimum_RGB_output=dict(argstr="%d", position=8, usedefault=True,), + minimum_input=dict(argstr="%d", mandatory=True, position=6,), + num_threads=dict(nohash=True, usedefault=True,), + output_image=dict(argstr="%s", position=2, usedefault=True,), ) inputs = ConvertScalarImageToRGB.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ConvertScalarImageToRGB_outputs(): - output_map = dict(output_image=dict(extensions=None, ), ) + output_map = dict(output_image=dict(extensions=None,),) outputs = ConvertScalarImageToRGB.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_CorticalThickness.py b/nipype/interfaces/ants/tests/test_auto_CorticalThickness.py index d3cf218934..92a3f04b57 100644 --- a/nipype/interfaces/ants/tests/test_auto_CorticalThickness.py +++ b/nipype/interfaces/ants/tests/test_auto_CorticalThickness.py @@ -4,89 +4,55 @@ def test_CorticalThickness_inputs(): input_map = dict( - anatomical_image=dict( - argstr='-a %s', - extensions=None, - mandatory=True, - ), - args=dict(argstr='%s', ), - b_spline_smoothing=dict(argstr='-v', ), + anatomical_image=dict(argstr="-a %s", extensions=None, mandatory=True,), + args=dict(argstr="%s",), + b_spline_smoothing=dict(argstr="-v",), brain_probability_mask=dict( - argstr='-m %s', - copyfile=False, - extensions=None, - mandatory=True, - ), - brain_template=dict( - argstr='-e %s', - extensions=None, - mandatory=True, - ), - cortical_label_image=dict(extensions=None, ), - debug=dict(argstr='-z 1', ), - dimension=dict( - argstr='-d %d', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - extraction_registration_mask=dict( - argstr='-f %s', - extensions=None, - ), - image_suffix=dict( - argstr='-s %s', - usedefault=True, - ), - keep_temporary_files=dict(argstr='-k %d', ), - label_propagation=dict(argstr='-l %s', ), - max_iterations=dict(argstr='-i %d', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - out_prefix=dict( - argstr='-o %s', - usedefault=True, - ), - posterior_formulation=dict(argstr='-b %s', ), - prior_segmentation_weight=dict(argstr='-w %f', ), - quick_registration=dict(argstr='-q 1', ), - segmentation_iterations=dict(argstr='-n %d', ), - segmentation_priors=dict( - argstr='-p %s', - mandatory=True, - ), - t1_registration_template=dict( - argstr='-t %s', - extensions=None, - mandatory=True, - ), - use_floatingpoint_precision=dict(argstr='-j %d', ), - use_random_seeding=dict(argstr='-u %d', ), + argstr="-m %s", copyfile=False, extensions=None, mandatory=True, + ), + brain_template=dict(argstr="-e %s", extensions=None, mandatory=True,), + cortical_label_image=dict(extensions=None,), + debug=dict(argstr="-z 1",), + dimension=dict(argstr="-d %d", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + extraction_registration_mask=dict(argstr="-f %s", extensions=None,), + image_suffix=dict(argstr="-s %s", usedefault=True,), + keep_temporary_files=dict(argstr="-k %d",), + label_propagation=dict(argstr="-l %s",), + max_iterations=dict(argstr="-i %d",), + num_threads=dict(nohash=True, usedefault=True,), + out_prefix=dict(argstr="-o %s", usedefault=True,), + posterior_formulation=dict(argstr="-b %s",), + prior_segmentation_weight=dict(argstr="-w %f",), + quick_registration=dict(argstr="-q 1",), + segmentation_iterations=dict(argstr="-n %d",), + segmentation_priors=dict(argstr="-p %s", mandatory=True,), + t1_registration_template=dict(argstr="-t %s", extensions=None, mandatory=True,), + use_floatingpoint_precision=dict(argstr="-j %d",), + use_random_seeding=dict(argstr="-u %d",), ) inputs = CorticalThickness.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CorticalThickness_outputs(): output_map = dict( - BrainExtractionMask=dict(extensions=None, ), - BrainSegmentation=dict(extensions=None, ), - BrainSegmentationN4=dict(extensions=None, ), + BrainExtractionMask=dict(extensions=None,), + BrainSegmentation=dict(extensions=None,), + BrainSegmentationN4=dict(extensions=None,), BrainSegmentationPosteriors=dict(), - BrainVolumes=dict(extensions=None, ), - CorticalThickness=dict(extensions=None, ), - CorticalThicknessNormedToTemplate=dict(extensions=None, ), - ExtractedBrainN4=dict(extensions=None, ), - SubjectToTemplate0GenericAffine=dict(extensions=None, ), - SubjectToTemplate1Warp=dict(extensions=None, ), - SubjectToTemplateLogJacobian=dict(extensions=None, ), - TemplateToSubject0Warp=dict(extensions=None, ), - TemplateToSubject1GenericAffine=dict(extensions=None, ), + BrainVolumes=dict(extensions=None,), + CorticalThickness=dict(extensions=None,), + CorticalThicknessNormedToTemplate=dict(extensions=None,), + ExtractedBrainN4=dict(extensions=None,), + SubjectToTemplate0GenericAffine=dict(extensions=None,), + SubjectToTemplate1Warp=dict(extensions=None,), + SubjectToTemplateLogJacobian=dict(extensions=None,), + TemplateToSubject0Warp=dict(extensions=None,), + TemplateToSubject1GenericAffine=dict(extensions=None,), ) outputs = CorticalThickness.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_CreateJacobianDeterminantImage.py b/nipype/interfaces/ants/tests/test_auto_CreateJacobianDeterminantImage.py index d5ac0c1d06..be694c055f 100644 --- a/nipype/interfaces/ants/tests/test_auto_CreateJacobianDeterminantImage.py +++ b/nipype/interfaces/ants/tests/test_auto_CreateJacobianDeterminantImage.py @@ -4,48 +4,26 @@ def test_CreateJacobianDeterminantImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), deformationField=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), - doLogJacobian=dict( - argstr='%d', - position=3, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - imageDimension=dict( - argstr='%d', - mandatory=True, - position=0, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - outputImage=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - useGeometric=dict( - argstr='%d', - position=4, + argstr="%s", extensions=None, mandatory=True, position=1, ), + doLogJacobian=dict(argstr="%d", position=3,), + environ=dict(nohash=True, usedefault=True,), + imageDimension=dict(argstr="%d", mandatory=True, position=0,), + num_threads=dict(nohash=True, usedefault=True,), + outputImage=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + useGeometric=dict(argstr="%d", position=4,), ) inputs = CreateJacobianDeterminantImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CreateJacobianDeterminantImage_outputs(): - output_map = dict(jacobian_image=dict(extensions=None, ), ) + output_map = dict(jacobian_image=dict(extensions=None,),) outputs = CreateJacobianDeterminantImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_CreateTiledMosaic.py b/nipype/interfaces/ants/tests/test_auto_CreateTiledMosaic.py index e6a4142c90..3bb36c9d01 100644 --- a/nipype/interfaces/ants/tests/test_auto_CreateTiledMosaic.py +++ b/nipype/interfaces/ants/tests/test_auto_CreateTiledMosaic.py @@ -4,48 +4,30 @@ def test_CreateTiledMosaic_inputs(): input_map = dict( - alpha_value=dict(argstr='-a %.2f', ), - args=dict(argstr='%s', ), - direction=dict(argstr='-d %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - flip_slice=dict(argstr='-f %s', ), - input_image=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - ), - mask_image=dict( - argstr='-x %s', - extensions=None, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - output_image=dict( - argstr='-o %s', - usedefault=True, - ), - pad_or_crop=dict(argstr='-p %s', ), - permute_axes=dict(argstr='-g', ), - rgb_image=dict( - argstr='-r %s', - extensions=None, - mandatory=True, - ), - slices=dict(argstr='-s %s', ), - tile_geometry=dict(argstr='-t %s', ), + alpha_value=dict(argstr="-a %.2f",), + args=dict(argstr="%s",), + direction=dict(argstr="-d %d",), + environ=dict(nohash=True, usedefault=True,), + flip_slice=dict(argstr="-f %s",), + input_image=dict(argstr="-i %s", extensions=None, mandatory=True,), + mask_image=dict(argstr="-x %s", extensions=None,), + num_threads=dict(nohash=True, usedefault=True,), + output_image=dict(argstr="-o %s", usedefault=True,), + pad_or_crop=dict(argstr="-p %s",), + permute_axes=dict(argstr="-g",), + rgb_image=dict(argstr="-r %s", extensions=None, mandatory=True,), + slices=dict(argstr="-s %s",), + tile_geometry=dict(argstr="-t %s",), ) inputs = CreateTiledMosaic.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CreateTiledMosaic_outputs(): - output_map = dict(output_image=dict(extensions=None, ), ) + output_map = dict(output_image=dict(extensions=None,),) outputs = CreateTiledMosaic.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_DenoiseImage.py b/nipype/interfaces/ants/tests/test_auto_DenoiseImage.py index 0c65abc907..244b2ca778 100644 --- a/nipype/interfaces/ants/tests/test_auto_DenoiseImage.py +++ b/nipype/interfaces/ants/tests/test_auto_DenoiseImage.py @@ -4,60 +4,41 @@ def test_DenoiseImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict(argstr='-d %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - input_image=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - ), + args=dict(argstr="%s",), + dimension=dict(argstr="-d %d",), + environ=dict(nohash=True, usedefault=True,), + input_image=dict(argstr="-i %s", extensions=None, mandatory=True,), noise_image=dict( extensions=None, hash_files=False, keep_extension=True, - name_source=['input_image'], - name_template='%s_noise', - ), - noise_model=dict( - argstr='-n %s', - usedefault=True, - ), - num_threads=dict( - nohash=True, - usedefault=True, + name_source=["input_image"], + name_template="%s_noise", ), + noise_model=dict(argstr="-n %s", usedefault=True,), + num_threads=dict(nohash=True, usedefault=True,), output_image=dict( - argstr='-o %s', + argstr="-o %s", extensions=None, hash_files=False, keep_extension=True, - name_source=['input_image'], - name_template='%s_noise_corrected', + name_source=["input_image"], + name_template="%s_noise_corrected", ), - save_noise=dict( - mandatory=True, - usedefault=True, - xor=['noise_image'], - ), - shrink_factor=dict( - argstr='-s %s', - usedefault=True, - ), - verbose=dict(argstr='-v', ), + save_noise=dict(mandatory=True, usedefault=True, xor=["noise_image"],), + shrink_factor=dict(argstr="-s %s", usedefault=True,), + verbose=dict(argstr="-v",), ) inputs = DenoiseImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DenoiseImage_outputs(): output_map = dict( - noise_image=dict(extensions=None, ), - output_image=dict(extensions=None, ), + noise_image=dict(extensions=None,), output_image=dict(extensions=None,), ) outputs = DenoiseImage.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_JointFusion.py b/nipype/interfaces/ants/tests/test_auto_JointFusion.py index b6639938bb..4919b27a2d 100644 --- a/nipype/interfaces/ants/tests/test_auto_JointFusion.py +++ b/nipype/interfaces/ants/tests/test_auto_JointFusion.py @@ -4,82 +4,40 @@ def test_JointFusion_inputs(): input_map = dict( - alpha=dict( - requires=['method'], - usedefault=True, - ), - args=dict(argstr='%s', ), - atlas_group_id=dict(argstr='-gp %d...', ), - atlas_group_weights=dict(argstr='-gpw %d...', ), - beta=dict( - requires=['method'], - usedefault=True, - ), - dimension=dict( - argstr='%d', - mandatory=True, - position=0, - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - exclusion_region=dict( - argstr='-x %s', - extensions=None, - ), - method=dict( - argstr='-m %s', - usedefault=True, - ), - modalities=dict( - argstr='%d', - mandatory=True, - position=1, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + alpha=dict(requires=["method"], usedefault=True,), + args=dict(argstr="%s",), + atlas_group_id=dict(argstr="-gp %d...",), + atlas_group_weights=dict(argstr="-gpw %d...",), + beta=dict(requires=["method"], usedefault=True,), + dimension=dict(argstr="%d", mandatory=True, position=0, usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + exclusion_region=dict(argstr="-x %s", extensions=None,), + method=dict(argstr="-m %s", usedefault=True,), + modalities=dict(argstr="%d", mandatory=True, position=1,), + num_threads=dict(nohash=True, usedefault=True,), output_label_image=dict( - argstr='%s', + argstr="%s", extensions=None, mandatory=True, - name_template='%s', - output_name='output_label_image', + name_template="%s", + output_name="output_label_image", position=-1, ), - patch_radius=dict( - argstr='-rp %s', - maxlen=3, - minlen=3, - ), - search_radius=dict( - argstr='-rs %s', - maxlen=3, - minlen=3, - ), - target_image=dict( - argstr='-tg %s...', - mandatory=True, - ), - warped_intensity_images=dict( - argstr='-g %s...', - mandatory=True, - ), - warped_label_images=dict( - argstr='-l %s...', - mandatory=True, - ), + patch_radius=dict(argstr="-rp %s", maxlen=3, minlen=3,), + search_radius=dict(argstr="-rs %s", maxlen=3, minlen=3,), + target_image=dict(argstr="-tg %s...", mandatory=True,), + warped_intensity_images=dict(argstr="-g %s...", mandatory=True,), + warped_label_images=dict(argstr="-l %s...", mandatory=True,), ) inputs = JointFusion.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JointFusion_outputs(): - output_map = dict(output_label_image=dict(extensions=None, ), ) + output_map = dict(output_label_image=dict(extensions=None,),) outputs = JointFusion.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_KellyKapowski.py b/nipype/interfaces/ants/tests/test_auto_KellyKapowski.py index 881071500e..94ce9e9abf 100644 --- a/nipype/interfaces/ants/tests/test_auto_KellyKapowski.py +++ b/nipype/interfaces/ants/tests/test_auto_KellyKapowski.py @@ -4,82 +4,55 @@ def test_KellyKapowski_inputs(): input_map = dict( - args=dict(argstr='%s', ), - convergence=dict( - argstr='--convergence "%s"', - usedefault=True, - ), + args=dict(argstr="%s",), + convergence=dict(argstr='--convergence "%s"', usedefault=True,), cortical_thickness=dict( argstr='--output "%s"', extensions=None, hash_files=False, keep_extension=True, - name_source=['segmentation_image'], - name_template='%s_cortical_thickness', - ), - dimension=dict( - argstr='--image-dimensionality %d', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, + name_source=["segmentation_image"], + name_template="%s_cortical_thickness", ), - gradient_step=dict( - argstr='--gradient-step %f', - usedefault=True, - ), - gray_matter_label=dict(usedefault=True, ), + dimension=dict(argstr="--image-dimensionality %d", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + gradient_step=dict(argstr="--gradient-step %f", usedefault=True,), + gray_matter_label=dict(usedefault=True,), gray_matter_prob_image=dict( - argstr='--gray-matter-probability-image "%s"', - extensions=None, + argstr='--gray-matter-probability-image "%s"', extensions=None, ), max_invert_displacement_field_iters=dict( - argstr= - '--maximum-number-of-invert-displacement-field-iterations %d', - usedefault=True, - ), - num_threads=dict( - nohash=True, + argstr="--maximum-number-of-invert-displacement-field-iterations %d", usedefault=True, ), + num_threads=dict(nohash=True, usedefault=True,), number_integration_points=dict( - argstr='--number-of-integration-points %d', - usedefault=True, + argstr="--number-of-integration-points %d", usedefault=True, ), segmentation_image=dict( - argstr='--segmentation-image "%s"', - extensions=None, - mandatory=True, - ), - smoothing_variance=dict( - argstr='--smoothing-variance %f', - usedefault=True, + argstr='--segmentation-image "%s"', extensions=None, mandatory=True, ), + smoothing_variance=dict(argstr="--smoothing-variance %f", usedefault=True,), smoothing_velocity_field=dict( - argstr='--smoothing-velocity-field-parameter %f', - usedefault=True, + argstr="--smoothing-velocity-field-parameter %f", usedefault=True, ), thickness_prior_estimate=dict( - argstr='--thickness-prior-estimate %f', - usedefault=True, + argstr="--thickness-prior-estimate %f", usedefault=True, ), thickness_prior_image=dict( - argstr='--thickness-prior-image "%s"', - extensions=None, + argstr='--thickness-prior-image "%s"', extensions=None, ), - use_bspline_smoothing=dict(argstr='--use-bspline-smoothing 1', ), + use_bspline_smoothing=dict(argstr="--use-bspline-smoothing 1",), warped_white_matter=dict( extensions=None, hash_files=False, keep_extension=True, - name_source=['segmentation_image'], - name_template='%s_warped_white_matter', + name_source=["segmentation_image"], + name_template="%s_warped_white_matter", ), - white_matter_label=dict(usedefault=True, ), + white_matter_label=dict(usedefault=True,), white_matter_prob_image=dict( - argstr='--white-matter-probability-image "%s"', - extensions=None, + argstr='--white-matter-probability-image "%s"', extensions=None, ), ) inputs = KellyKapowski.input_spec() @@ -87,10 +60,12 @@ def test_KellyKapowski_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_KellyKapowski_outputs(): output_map = dict( - cortical_thickness=dict(extensions=None, ), - warped_white_matter=dict(extensions=None, ), + cortical_thickness=dict(extensions=None,), + warped_white_matter=dict(extensions=None,), ) outputs = KellyKapowski.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_LabelGeometry.py b/nipype/interfaces/ants/tests/test_auto_LabelGeometry.py index 73e789f885..dfc4e0ff60 100644 --- a/nipype/interfaces/ants/tests/test_auto_LabelGeometry.py +++ b/nipype/interfaces/ants/tests/test_auto_LabelGeometry.py @@ -4,37 +4,18 @@ def test_LabelGeometry_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict( - argstr='%d', - position=0, - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + dimension=dict(argstr="%d", position=0, usedefault=True,), + environ=dict(nohash=True, usedefault=True,), intensity_image=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - usedefault=True, - ), - label_image=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), - num_threads=dict( - nohash=True, - usedefault=True, + argstr="%s", extensions=None, mandatory=True, position=2, usedefault=True, ), + label_image=dict(argstr="%s", extensions=None, mandatory=True, position=1,), + num_threads=dict(nohash=True, usedefault=True,), output_file=dict( - argstr='%s', - name_source=['label_image'], - name_template='%s.csv', + argstr="%s", + name_source=["label_image"], + name_template="%s.csv", position=3, ), ) @@ -43,8 +24,10 @@ def test_LabelGeometry_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LabelGeometry_outputs(): - output_map = dict(output_file=dict(extensions=None, ), ) + output_map = dict(output_file=dict(extensions=None,),) outputs = LabelGeometry.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_LaplacianThickness.py b/nipype/interfaces/ants/tests/test_auto_LaplacianThickness.py index 89fa00044c..3b18ca5d0f 100644 --- a/nipype/interfaces/ants/tests/test_auto_LaplacianThickness.py +++ b/nipype/interfaces/ants/tests/test_auto_LaplacianThickness.py @@ -4,70 +4,39 @@ def test_LaplacianThickness_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dT=dict( - argstr='%s', - position=6, - requires=['prior_thickness'], - ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + dT=dict(argstr="%s", position=6, requires=["prior_thickness"],), + environ=dict(nohash=True, usedefault=True,), input_gm=dict( - argstr='%s', - copyfile=True, - extensions=None, - mandatory=True, - position=2, + argstr="%s", copyfile=True, extensions=None, mandatory=True, position=2, ), input_wm=dict( - argstr='%s', - copyfile=True, - extensions=None, - mandatory=True, - position=1, - ), - num_threads=dict( - nohash=True, - usedefault=True, + argstr="%s", copyfile=True, extensions=None, mandatory=True, position=1, ), + num_threads=dict(nohash=True, usedefault=True,), output_image=dict( - argstr='%s', + argstr="%s", extensions=None, hash_files=False, keep_extension=True, - name_source=['input_wm'], - name_template='%s_thickness', + name_source=["input_wm"], + name_template="%s_thickness", position=3, ), - prior_thickness=dict( - argstr='%s', - position=5, - requires=['smooth_param'], - ), - smooth_param=dict( - argstr='%s', - position=4, - ), - sulcus_prior=dict( - argstr='%s', - position=7, - requires=['dT'], - ), - tolerance=dict( - argstr='%s', - position=8, - requires=['sulcus_prior'], - ), + prior_thickness=dict(argstr="%s", position=5, requires=["smooth_param"],), + smooth_param=dict(argstr="%s", position=4,), + sulcus_prior=dict(argstr="%s", position=7, requires=["dT"],), + tolerance=dict(argstr="%s", position=8, requires=["sulcus_prior"],), ) inputs = LaplacianThickness.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LaplacianThickness_outputs(): - output_map = dict(output_image=dict(extensions=None, ), ) + output_map = dict(output_image=dict(extensions=None,),) outputs = LaplacianThickness.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_MeasureImageSimilarity.py b/nipype/interfaces/ants/tests/test_auto_MeasureImageSimilarity.py index bc90711b69..2d15c49afa 100644 --- a/nipype/interfaces/ants/tests/test_auto_MeasureImageSimilarity.py +++ b/nipype/interfaces/ants/tests/test_auto_MeasureImageSimilarity.py @@ -4,63 +4,29 @@ def test_MeasureImageSimilarity_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict( - argstr='--dimensionality %d', - position=1, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixed_image=dict( - extensions=None, - mandatory=True, - ), - fixed_image_mask=dict( - argstr='%s', - extensions=None, - ), - metric=dict( - argstr='%s', - mandatory=True, - ), - metric_weight=dict( - requires=['metric'], - usedefault=True, - ), - moving_image=dict( - extensions=None, - mandatory=True, - ), - moving_image_mask=dict( - extensions=None, - requires=['fixed_image_mask'], - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - radius_or_number_of_bins=dict( - mandatory=True, - requires=['metric'], - ), - sampling_percentage=dict( - mandatory=True, - requires=['metric'], - ), - sampling_strategy=dict( - requires=['metric'], - usedefault=True, - ), + args=dict(argstr="%s",), + dimension=dict(argstr="--dimensionality %d", position=1,), + environ=dict(nohash=True, usedefault=True,), + fixed_image=dict(extensions=None, mandatory=True,), + fixed_image_mask=dict(argstr="%s", extensions=None,), + metric=dict(argstr="%s", mandatory=True,), + metric_weight=dict(requires=["metric"], usedefault=True,), + moving_image=dict(extensions=None, mandatory=True,), + moving_image_mask=dict(extensions=None, requires=["fixed_image_mask"],), + num_threads=dict(nohash=True, usedefault=True,), + radius_or_number_of_bins=dict(mandatory=True, requires=["metric"],), + sampling_percentage=dict(mandatory=True, requires=["metric"],), + sampling_strategy=dict(requires=["metric"], usedefault=True,), ) inputs = MeasureImageSimilarity.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MeasureImageSimilarity_outputs(): - output_map = dict(similarity=dict(), ) + output_map = dict(similarity=dict(),) outputs = MeasureImageSimilarity.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_MultiplyImages.py b/nipype/interfaces/ants/tests/test_auto_MultiplyImages.py index 5aad6c1722..8a8d0958eb 100644 --- a/nipype/interfaces/ants/tests/test_auto_MultiplyImages.py +++ b/nipype/interfaces/ants/tests/test_auto_MultiplyImages.py @@ -4,45 +4,25 @@ def test_MultiplyImages_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict( - argstr='%d', - mandatory=True, - position=0, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - first_input=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + dimension=dict(argstr="%d", mandatory=True, position=0,), + environ=dict(nohash=True, usedefault=True,), + first_input=dict(argstr="%s", extensions=None, mandatory=True, position=1,), + num_threads=dict(nohash=True, usedefault=True,), output_product_image=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=3, - ), - second_input=dict( - argstr='%s', - mandatory=True, - position=2, + argstr="%s", extensions=None, mandatory=True, position=3, ), + second_input=dict(argstr="%s", mandatory=True, position=2,), ) inputs = MultiplyImages.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MultiplyImages_outputs(): - output_map = dict(output_product_image=dict(extensions=None, ), ) + output_map = dict(output_product_image=dict(extensions=None,),) outputs = MultiplyImages.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_N4BiasFieldCorrection.py b/nipype/interfaces/ants/tests/test_auto_N4BiasFieldCorrection.py index c7391afb71..2426660455 100644 --- a/nipype/interfaces/ants/tests/test_auto_N4BiasFieldCorrection.py +++ b/nipype/interfaces/ants/tests/test_auto_N4BiasFieldCorrection.py @@ -4,74 +4,41 @@ def test_N4BiasFieldCorrection_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bias_image=dict( - extensions=None, - hash_files=False, - ), - bspline_fitting_distance=dict(argstr='--bspline-fitting %s', ), - bspline_order=dict(requires=['bspline_fitting_distance'], ), - convergence_threshold=dict(requires=['n_iterations'], ), - copy_header=dict( - mandatory=True, - usedefault=True, - ), - dimension=dict( - argstr='-d %d', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - histogram_sharpening=dict( - argstr='--histogram-sharpening [%g,%g,%d]', ), - input_image=dict( - argstr='--input-image %s', - extensions=None, - mandatory=True, - ), - mask_image=dict( - argstr='--mask-image %s', - extensions=None, - ), - n_iterations=dict(argstr='--convergence %s', ), - num_threads=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + bias_image=dict(extensions=None, hash_files=False,), + bspline_fitting_distance=dict(argstr="--bspline-fitting %s",), + bspline_order=dict(requires=["bspline_fitting_distance"],), + convergence_threshold=dict(requires=["n_iterations"],), + copy_header=dict(mandatory=True, usedefault=True,), + dimension=dict(argstr="-d %d", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + histogram_sharpening=dict(argstr="--histogram-sharpening [%g,%g,%d]",), + input_image=dict(argstr="--input-image %s", extensions=None, mandatory=True,), + mask_image=dict(argstr="--mask-image %s", extensions=None,), + n_iterations=dict(argstr="--convergence %s",), + num_threads=dict(nohash=True, usedefault=True,), output_image=dict( - argstr='--output %s', + argstr="--output %s", hash_files=False, keep_extension=True, - name_source=['input_image'], - name_template='%s_corrected', - ), - rescale_intensities=dict( - argstr='-r', - min_ver='2.1.0', - usedefault=True, - ), - save_bias=dict( - mandatory=True, - usedefault=True, - xor=['bias_image'], - ), - shrink_factor=dict(argstr='--shrink-factor %d', ), - weight_image=dict( - argstr='--weight-image %s', - extensions=None, + name_source=["input_image"], + name_template="%s_corrected", ), + rescale_intensities=dict(argstr="-r", min_ver="2.1.0", usedefault=True,), + save_bias=dict(mandatory=True, usedefault=True, xor=["bias_image"],), + shrink_factor=dict(argstr="--shrink-factor %d",), + weight_image=dict(argstr="--weight-image %s", extensions=None,), ) inputs = N4BiasFieldCorrection.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_N4BiasFieldCorrection_outputs(): output_map = dict( - bias_image=dict(extensions=None, ), - output_image=dict(extensions=None, ), + bias_image=dict(extensions=None,), output_image=dict(extensions=None,), ) outputs = N4BiasFieldCorrection.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_Registration.py b/nipype/interfaces/ants/tests/test_auto_Registration.py index 1231b1d1a0..33921e8638 100644 --- a/nipype/interfaces/ants/tests/test_auto_Registration.py +++ b/nipype/interfaces/ants/tests/test_auto_Registration.py @@ -4,141 +4,81 @@ def test_Registration_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), collapse_output_transforms=dict( - argstr='--collapse-output-transforms %d', - usedefault=True, - ), - convergence_threshold=dict( - requires=['number_of_iterations'], - usedefault=True, + argstr="--collapse-output-transforms %d", usedefault=True, ), + convergence_threshold=dict(requires=["number_of_iterations"], usedefault=True,), convergence_window_size=dict( - requires=['convergence_threshold'], - usedefault=True, - ), - dimension=dict( - argstr='--dimensionality %d', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, + requires=["convergence_threshold"], usedefault=True, ), - fixed_image=dict(mandatory=True, ), + dimension=dict(argstr="--dimensionality %d", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + fixed_image=dict(mandatory=True,), fixed_image_mask=dict( - argstr='%s', - extensions=None, - max_ver='2.1.0', - xor=['fixed_image_masks'], - ), - fixed_image_masks=dict( - min_ver='2.2.0', - xor=['fixed_image_mask'], + argstr="%s", extensions=None, max_ver="2.1.0", xor=["fixed_image_masks"], ), - float=dict(argstr='--float %d', ), + fixed_image_masks=dict(min_ver="2.2.0", xor=["fixed_image_mask"],), + float=dict(argstr="--float %d",), initial_moving_transform=dict( - argstr='%s', - xor=['initial_moving_transform_com'], + argstr="%s", xor=["initial_moving_transform_com"], ), initial_moving_transform_com=dict( - argstr='%s', - xor=['initial_moving_transform'], + argstr="%s", xor=["initial_moving_transform"], ), initialize_transforms_per_stage=dict( - argstr='--initialize-transforms-per-stage %d', - usedefault=True, - ), - interpolation=dict( - argstr='%s', - usedefault=True, + argstr="--initialize-transforms-per-stage %d", usedefault=True, ), + interpolation=dict(argstr="%s", usedefault=True,), interpolation_parameters=dict(), invert_initial_moving_transform=dict( - requires=['initial_moving_transform'], - xor=['initial_moving_transform_com'], + requires=["initial_moving_transform"], xor=["initial_moving_transform_com"], ), - metric=dict(mandatory=True, ), + metric=dict(mandatory=True,), metric_item_trait=dict(), metric_stage_trait=dict(), - metric_weight=dict( - mandatory=True, - requires=['metric'], - usedefault=True, - ), - metric_weight_item_trait=dict(usedefault=True, ), + metric_weight=dict(mandatory=True, requires=["metric"], usedefault=True,), + metric_weight_item_trait=dict(usedefault=True,), metric_weight_stage_trait=dict(), - moving_image=dict(mandatory=True, ), + moving_image=dict(mandatory=True,), moving_image_mask=dict( extensions=None, - max_ver='2.1.0', - requires=['fixed_image_mask'], - xor=['moving_image_masks'], - ), - moving_image_masks=dict( - min_ver='2.2.0', - xor=['moving_image_mask'], - ), - num_threads=dict( - nohash=True, - usedefault=True, + max_ver="2.1.0", + requires=["fixed_image_mask"], + xor=["moving_image_masks"], ), + moving_image_masks=dict(min_ver="2.2.0", xor=["moving_image_mask"],), + num_threads=dict(nohash=True, usedefault=True,), number_of_iterations=dict(), output_inverse_warped_image=dict( - hash_files=False, - requires=['output_warped_image'], + hash_files=False, requires=["output_warped_image"], ), - output_transform_prefix=dict( - argstr='%s', - usedefault=True, - ), - output_warped_image=dict(hash_files=False, ), - radius_bins_item_trait=dict(usedefault=True, ), + output_transform_prefix=dict(argstr="%s", usedefault=True,), + output_warped_image=dict(hash_files=False,), + radius_bins_item_trait=dict(usedefault=True,), radius_bins_stage_trait=dict(), - radius_or_number_of_bins=dict( - requires=['metric_weight'], - usedefault=True, - ), - restore_state=dict( - argstr='--restore-state %s', - extensions=None, - ), + radius_or_number_of_bins=dict(requires=["metric_weight"], usedefault=True,), + restore_state=dict(argstr="--restore-state %s", extensions=None,), restrict_deformation=dict(), - sampling_percentage=dict(requires=['sampling_strategy'], ), + sampling_percentage=dict(requires=["sampling_strategy"],), sampling_percentage_item_trait=dict(), sampling_percentage_stage_trait=dict(), - sampling_strategy=dict(requires=['metric_weight'], ), + sampling_strategy=dict(requires=["metric_weight"],), sampling_strategy_item_trait=dict(), sampling_strategy_stage_trait=dict(), - save_state=dict( - argstr='--save-state %s', - extensions=None, - ), - shrink_factors=dict(mandatory=True, ), - sigma_units=dict(requires=['smoothing_sigmas'], ), - smoothing_sigmas=dict(mandatory=True, ), + save_state=dict(argstr="--save-state %s", extensions=None,), + shrink_factors=dict(mandatory=True,), + sigma_units=dict(requires=["smoothing_sigmas"],), + smoothing_sigmas=dict(mandatory=True,), transform_parameters=dict(), - transforms=dict( - argstr='%s', - mandatory=True, - ), + transforms=dict(argstr="%s", mandatory=True,), use_estimate_learning_rate_once=dict(), - use_histogram_matching=dict(usedefault=True, ), - verbose=dict( - argstr='-v', - usedefault=True, - ), - winsorize_lower_quantile=dict( - argstr='%s', - usedefault=True, - ), - winsorize_upper_quantile=dict( - argstr='%s', - usedefault=True, - ), + use_histogram_matching=dict(usedefault=True,), + verbose=dict(argstr="-v", usedefault=True,), + winsorize_lower_quantile=dict(argstr="%s", usedefault=True,), + winsorize_upper_quantile=dict(argstr="%s", usedefault=True,), write_composite_transform=dict( - argstr='--write-composite-transform %d', - usedefault=True, + argstr="--write-composite-transform %d", usedefault=True, ), ) inputs = Registration.input_spec() @@ -146,19 +86,21 @@ def test_Registration_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Registration_outputs(): output_map = dict( - composite_transform=dict(extensions=None, ), + composite_transform=dict(extensions=None,), elapsed_time=dict(), forward_invert_flags=dict(), forward_transforms=dict(), - inverse_composite_transform=dict(extensions=None, ), - inverse_warped_image=dict(extensions=None, ), + inverse_composite_transform=dict(extensions=None,), + inverse_warped_image=dict(extensions=None,), metric_value=dict(), reverse_invert_flags=dict(), reverse_transforms=dict(), - save_state=dict(extensions=None, ), - warped_image=dict(extensions=None, ), + save_state=dict(extensions=None,), + warped_image=dict(extensions=None,), ) outputs = Registration.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_RegistrationSynQuick.py b/nipype/interfaces/ants/tests/test_auto_RegistrationSynQuick.py index 5448df5ed5..3bc1b8aa06 100644 --- a/nipype/interfaces/ants/tests/test_auto_RegistrationSynQuick.py +++ b/nipype/interfaces/ants/tests/test_auto_RegistrationSynQuick.py @@ -4,61 +4,33 @@ def test_RegistrationSynQuick_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict( - argstr='-d %d', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixed_image=dict( - argstr='-f %s...', - mandatory=True, - ), - histogram_bins=dict( - argstr='-r %d', - usedefault=True, - ), - moving_image=dict( - argstr='-m %s...', - mandatory=True, - ), - num_threads=dict( - argstr='-n %d', - usedefault=True, - ), - output_prefix=dict( - argstr='-o %s', - usedefault=True, - ), - precision_type=dict( - argstr='-p %s', - usedefault=True, - ), - spline_distance=dict( - argstr='-s %d', - usedefault=True, - ), - transform_type=dict( - argstr='-t %s', - usedefault=True, - ), - use_histogram_matching=dict(argstr='-j %d', ), + args=dict(argstr="%s",), + dimension=dict(argstr="-d %d", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + fixed_image=dict(argstr="-f %s...", mandatory=True,), + histogram_bins=dict(argstr="-r %d", usedefault=True,), + moving_image=dict(argstr="-m %s...", mandatory=True,), + num_threads=dict(argstr="-n %d", usedefault=True,), + output_prefix=dict(argstr="-o %s", usedefault=True,), + precision_type=dict(argstr="-p %s", usedefault=True,), + spline_distance=dict(argstr="-s %d", usedefault=True,), + transform_type=dict(argstr="-t %s", usedefault=True,), + use_histogram_matching=dict(argstr="-j %d",), ) inputs = RegistrationSynQuick.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RegistrationSynQuick_outputs(): output_map = dict( - forward_warp_field=dict(extensions=None, ), - inverse_warp_field=dict(extensions=None, ), - inverse_warped_image=dict(extensions=None, ), - out_matrix=dict(extensions=None, ), - warped_image=dict(extensions=None, ), + forward_warp_field=dict(extensions=None,), + inverse_warp_field=dict(extensions=None,), + inverse_warped_image=dict(extensions=None,), + out_matrix=dict(extensions=None,), + warped_image=dict(extensions=None,), ) outputs = RegistrationSynQuick.output_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_WarpImageMultiTransform.py b/nipype/interfaces/ants/tests/test_auto_WarpImageMultiTransform.py index e8385c9c6a..b9acca1442 100644 --- a/nipype/interfaces/ants/tests/test_auto_WarpImageMultiTransform.py +++ b/nipype/interfaces/ants/tests/test_auto_WarpImageMultiTransform.py @@ -4,66 +4,39 @@ def test_WarpImageMultiTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict( - argstr='%d', - position=1, - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - input_image=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), + args=dict(argstr="%s",), + dimension=dict(argstr="%d", position=1, usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + input_image=dict(argstr="%s", extensions=None, mandatory=True, position=2,), invert_affine=dict(), - num_threads=dict( - nohash=True, - usedefault=True, - ), + num_threads=dict(nohash=True, usedefault=True,), out_postfix=dict( - extensions=None, - hash_files=False, - usedefault=True, - xor=['output_image'], + extensions=None, hash_files=False, usedefault=True, xor=["output_image"], ), output_image=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, position=3, - xor=['out_postfix'], - ), - reference_image=dict( - argstr='-R %s', - extensions=None, - xor=['tightest_box'], - ), - reslice_by_header=dict(argstr='--reslice-by-header', ), - tightest_box=dict( - argstr='--tightest-bounding-box', - xor=['reference_image'], - ), - transformation_series=dict( - argstr='%s', - mandatory=True, - position=-1, - ), - use_bspline=dict(argstr='--use-BSpline', ), - use_nearest=dict(argstr='--use-NN', ), + xor=["out_postfix"], + ), + reference_image=dict(argstr="-R %s", extensions=None, xor=["tightest_box"],), + reslice_by_header=dict(argstr="--reslice-by-header",), + tightest_box=dict(argstr="--tightest-bounding-box", xor=["reference_image"],), + transformation_series=dict(argstr="%s", mandatory=True, position=-1,), + use_bspline=dict(argstr="--use-BSpline",), + use_nearest=dict(argstr="--use-NN",), ) inputs = WarpImageMultiTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_WarpImageMultiTransform_outputs(): - output_map = dict(output_image=dict(extensions=None, ), ) + output_map = dict(output_image=dict(extensions=None,),) outputs = WarpImageMultiTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_auto_WarpTimeSeriesImageMultiTransform.py b/nipype/interfaces/ants/tests/test_auto_WarpTimeSeriesImageMultiTransform.py index 0ab8a379de..e95d70c9ac 100644 --- a/nipype/interfaces/ants/tests/test_auto_WarpTimeSeriesImageMultiTransform.py +++ b/nipype/interfaces/ants/tests/test_auto_WarpTimeSeriesImageMultiTransform.py @@ -4,56 +4,29 @@ def test_WarpTimeSeriesImageMultiTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict( - argstr='%d', - position=1, - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - input_image=dict( - argstr='%s', - copyfile=True, - extensions=None, - mandatory=True, - ), + args=dict(argstr="%s",), + dimension=dict(argstr="%d", position=1, usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + input_image=dict(argstr="%s", copyfile=True, extensions=None, mandatory=True,), invert_affine=dict(), - num_threads=dict( - nohash=True, - usedefault=True, - ), - out_postfix=dict( - argstr='%s', - usedefault=True, - ), - reference_image=dict( - argstr='-R %s', - extensions=None, - xor=['tightest_box'], - ), - reslice_by_header=dict(argstr='--reslice-by-header', ), - tightest_box=dict( - argstr='--tightest-bounding-box', - xor=['reference_image'], - ), - transformation_series=dict( - argstr='%s', - copyfile=False, - mandatory=True, - ), - use_bspline=dict(argstr='--use-Bspline', ), - use_nearest=dict(argstr='--use-NN', ), + num_threads=dict(nohash=True, usedefault=True,), + out_postfix=dict(argstr="%s", usedefault=True,), + reference_image=dict(argstr="-R %s", extensions=None, xor=["tightest_box"],), + reslice_by_header=dict(argstr="--reslice-by-header",), + tightest_box=dict(argstr="--tightest-bounding-box", xor=["reference_image"],), + transformation_series=dict(argstr="%s", copyfile=False, mandatory=True,), + use_bspline=dict(argstr="--use-Bspline",), + use_nearest=dict(argstr="--use-NN",), ) inputs = WarpTimeSeriesImageMultiTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_WarpTimeSeriesImageMultiTransform_outputs(): - output_map = dict(output_image=dict(extensions=None, ), ) + output_map = dict(output_image=dict(extensions=None,),) outputs = WarpTimeSeriesImageMultiTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/ants/tests/test_extra_Registration.py b/nipype/interfaces/ants/tests/test_extra_Registration.py index 1f38f3c61f..d134324253 100644 --- a/nipype/interfaces/ants/tests/test_extra_Registration.py +++ b/nipype/interfaces/ants/tests/test_extra_Registration.py @@ -8,13 +8,13 @@ def test_ants_mand(tmpdir): tmpdir.chdir() filepath = os.path.dirname(os.path.realpath(__file__)) - datadir = os.path.realpath(os.path.join(filepath, '../../../testing/data')) + datadir = os.path.realpath(os.path.join(filepath, "../../../testing/data")) ants = registration.ANTS() ants.inputs.transformation_model = "SyN" - ants.inputs.moving_image = [os.path.join(datadir, 'resting.nii')] - ants.inputs.fixed_image = [os.path.join(datadir, 'T1.nii')] - ants.inputs.metric = ['MI'] + ants.inputs.moving_image = [os.path.join(datadir, "resting.nii")] + ants.inputs.fixed_image = [os.path.join(datadir, "T1.nii")] + ants.inputs.metric = ["MI"] with pytest.raises(ValueError) as er: ants.run() diff --git a/nipype/interfaces/ants/tests/test_resampling.py b/nipype/interfaces/ants/tests/test_resampling.py index 14903f0137..3b1da9d3ee 100644 --- a/nipype/interfaces/ants/tests/test_resampling.py +++ b/nipype/interfaces/ants/tests/test_resampling.py @@ -1,7 +1,10 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from nipype.interfaces.ants import WarpImageMultiTransform, WarpTimeSeriesImageMultiTransform +from nipype.interfaces.ants import ( + WarpImageMultiTransform, + WarpTimeSeriesImageMultiTransform, +) import os import pytest @@ -10,7 +13,7 @@ def change_dir(request): orig_dir = os.getcwd() filepath = os.path.dirname(os.path.realpath(__file__)) - datadir = os.path.realpath(os.path.join(filepath, '../../../testing/data')) + datadir = os.path.realpath(os.path.join(filepath, "../../../testing/data")) os.chdir(datadir) def move2orig(): @@ -22,32 +25,43 @@ def move2orig(): @pytest.fixture() def create_wimt(): wimt = WarpImageMultiTransform() - wimt.inputs.input_image = 'diffusion_weighted.nii' - wimt.inputs.reference_image = 'functional.nii' + wimt.inputs.input_image = "diffusion_weighted.nii" + wimt.inputs.reference_image = "functional.nii" wimt.inputs.transformation_series = [ - 'func2anat_coreg_Affine.txt', 'func2anat_InverseWarp.nii.gz', - 'dwi2anat_Warp.nii.gz', 'dwi2anat_coreg_Affine.txt' + "func2anat_coreg_Affine.txt", + "func2anat_InverseWarp.nii.gz", + "dwi2anat_Warp.nii.gz", + "dwi2anat_coreg_Affine.txt", ] return wimt def test_WarpImageMultiTransform(change_dir, create_wimt): wimt = create_wimt - assert wimt.cmdline == 'WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii \ -func2anat_coreg_Affine.txt func2anat_InverseWarp.nii.gz dwi2anat_Warp.nii.gz dwi2anat_coreg_Affine.txt' + assert ( + wimt.cmdline + == "WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii \ +func2anat_coreg_Affine.txt func2anat_InverseWarp.nii.gz dwi2anat_Warp.nii.gz dwi2anat_coreg_Affine.txt" + ) def test_WarpImageMultiTransform_invaffine_1(change_dir, create_wimt): wimt = create_wimt wimt.inputs.invert_affine = [1] - assert wimt.cmdline == 'WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii \ --i func2anat_coreg_Affine.txt func2anat_InverseWarp.nii.gz dwi2anat_Warp.nii.gz dwi2anat_coreg_Affine.txt' + assert ( + wimt.cmdline + == "WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii \ +-i func2anat_coreg_Affine.txt func2anat_InverseWarp.nii.gz dwi2anat_Warp.nii.gz dwi2anat_coreg_Affine.txt" + ) def test_WarpImageMultiTransform_invaffine_2(change_dir, create_wimt): wimt = create_wimt wimt.inputs.invert_affine = [2] - assert wimt.cmdline == 'WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii func2anat_coreg_Affine.txt func2anat_InverseWarp.nii.gz dwi2anat_Warp.nii.gz -i dwi2anat_coreg_Affine.txt' + assert ( + wimt.cmdline + == "WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii func2anat_coreg_Affine.txt func2anat_InverseWarp.nii.gz dwi2anat_Warp.nii.gz -i dwi2anat_coreg_Affine.txt" + ) def test_WarpImageMultiTransform_invaffine_wrong(change_dir, create_wimt): @@ -60,30 +74,32 @@ def test_WarpImageMultiTransform_invaffine_wrong(change_dir, create_wimt): @pytest.fixture() def create_wtsimt(): wtsimt = WarpTimeSeriesImageMultiTransform() - wtsimt.inputs.input_image = 'resting.nii' - wtsimt.inputs.reference_image = 'ants_deformed.nii.gz' - wtsimt.inputs.transformation_series = [ - 'ants_Warp.nii.gz', 'ants_Affine.txt' - ] + wtsimt.inputs.input_image = "resting.nii" + wtsimt.inputs.reference_image = "ants_deformed.nii.gz" + wtsimt.inputs.transformation_series = ["ants_Warp.nii.gz", "ants_Affine.txt"] return wtsimt def test_WarpTimeSeriesImageMultiTransform(change_dir, create_wtsimt): wtsimt = create_wtsimt - assert wtsimt.cmdline == 'WarpTimeSeriesImageMultiTransform 4 resting.nii resting_wtsimt.nii \ --R ants_deformed.nii.gz ants_Warp.nii.gz ants_Affine.txt' + assert ( + wtsimt.cmdline + == "WarpTimeSeriesImageMultiTransform 4 resting.nii resting_wtsimt.nii \ +-R ants_deformed.nii.gz ants_Warp.nii.gz ants_Affine.txt" + ) -def test_WarpTimeSeriesImageMultiTransform_invaffine(change_dir, - create_wtsimt): +def test_WarpTimeSeriesImageMultiTransform_invaffine(change_dir, create_wtsimt): wtsimt = create_wtsimt wtsimt.inputs.invert_affine = [1] - assert wtsimt.cmdline == 'WarpTimeSeriesImageMultiTransform 4 resting.nii resting_wtsimt.nii \ --R ants_deformed.nii.gz ants_Warp.nii.gz -i ants_Affine.txt' + assert ( + wtsimt.cmdline + == "WarpTimeSeriesImageMultiTransform 4 resting.nii resting_wtsimt.nii \ +-R ants_deformed.nii.gz ants_Warp.nii.gz -i ants_Affine.txt" + ) -def test_WarpTimeSeriesImageMultiTransform_invaffine_wrong( - change_dir, create_wtsimt): +def test_WarpTimeSeriesImageMultiTransform_invaffine_wrong(change_dir, create_wtsimt): wtsimt = create_wtsimt wtsimt.inputs.invert_affine = [0] with pytest.raises(Exception): diff --git a/nipype/interfaces/ants/tests/test_segmentation.py b/nipype/interfaces/ants/tests/test_segmentation.py index e9a1443934..4fc22ee34a 100644 --- a/nipype/interfaces/ants/tests/test_segmentation.py +++ b/nipype/interfaces/ants/tests/test_segmentation.py @@ -12,7 +12,7 @@ def change_dir(request): orig_dir = os.getcwd() filepath = os.path.dirname(os.path.realpath(__file__)) - datadir = os.path.realpath(os.path.join(filepath, '../../../testing/data')) + datadir = os.path.realpath(os.path.join(filepath, "../../../testing/data")) os.chdir(datadir) def move2orig(): @@ -25,14 +25,14 @@ def move2orig(): def create_lt(): lt = LaplacianThickness() # we do not run, so I stick some not really proper files as input - lt.inputs.input_gm = 'diffusion_weighted.nii' - lt.inputs.input_wm = 'functional.nii' + lt.inputs.input_gm = "diffusion_weighted.nii" + lt.inputs.input_wm = "functional.nii" return lt def test_LaplacianThickness_defaults(change_dir, create_lt): lt = create_lt - base_cmd = 'LaplacianThickness functional.nii diffusion_weighted.nii functional_thickness.nii' + base_cmd = "LaplacianThickness functional.nii diffusion_weighted.nii functional_thickness.nii" assert lt.cmdline == base_cmd lt.inputs.smooth_param = 4.5 assert lt.cmdline == base_cmd + " 4.5" @@ -43,17 +43,25 @@ def test_LaplacianThickness_defaults(change_dir, create_lt): def test_LaplacianThickness_wrongargs(change_dir, create_lt): lt = create_lt lt.inputs.tolerance = 0.001 - with pytest.raises(ValueError, match=r".* requires a value for input 'sulcus_prior' .*"): + with pytest.raises( + ValueError, match=r".* requires a value for input 'sulcus_prior' .*" + ): lt.cmdline lt.inputs.sulcus_prior = 0.15 with pytest.raises(ValueError, match=r".* requires a value for input 'dT' .*"): lt.cmdline lt.inputs.dT = 0.01 - with pytest.raises(ValueError, match=r".* requires a value for input 'prior_thickness' .*"): + with pytest.raises( + ValueError, match=r".* requires a value for input 'prior_thickness' .*" + ): lt.cmdline lt.inputs.prior_thickness = 5.9 - with pytest.raises(ValueError, match=r".* requires a value for input 'smooth_param' .*"): + with pytest.raises( + ValueError, match=r".* requires a value for input 'smooth_param' .*" + ): lt.cmdline lt.inputs.smooth_param = 4.5 - assert lt.cmdline == 'LaplacianThickness functional.nii diffusion_weighted.nii ' \ - 'functional_thickness.nii 4.5 5.9 0.01 0.15 0.001' + assert ( + lt.cmdline == "LaplacianThickness functional.nii diffusion_weighted.nii " + "functional_thickness.nii 4.5 5.9 0.01 0.15 0.001" + ) diff --git a/nipype/interfaces/ants/tests/test_spec_JointFusion.py b/nipype/interfaces/ants/tests/test_spec_JointFusion.py index a1dc03cc40..a0276afbb0 100644 --- a/nipype/interfaces/ants/tests/test_spec_JointFusion.py +++ b/nipype/interfaces/ants/tests/test_spec_JointFusion.py @@ -8,7 +8,7 @@ def test_JointFusion_dimension(): at = JointFusion() - set_dimension = lambda d: setattr(at.inputs, 'dimension', int(d)) + set_dimension = lambda d: setattr(at.inputs, "dimension", int(d)) for d in range(2, 5): set_dimension(d) assert at.inputs.dimension == int(d) @@ -20,66 +20,73 @@ def test_JointFusion_dimension(): @pytest.mark.parametrize("m", range(1, 5)) def test_JointFusion_modalities(m): at = JointFusion() - setattr(at.inputs, 'modalities', int(m)) + setattr(at.inputs, "modalities", int(m)) assert at.inputs.modalities == int(m) -@pytest.mark.parametrize("a, b", - [(a, b) for a in range(10) for b in range(10)]) +@pytest.mark.parametrize("a, b", [(a, b) for a in range(10) for b in range(10)]) def test_JointFusion_method(a, b): at = JointFusion() - set_method = lambda a, b: setattr(at.inputs, 'method', 'Joint[%.1f,%d]'.format(a, b)) + set_method = lambda a, b: setattr( + at.inputs, "method", "Joint[%.1f,%d]".format(a, b) + ) _a = a / 10.0 set_method(_a, b) # set directly - assert at.inputs.method == 'Joint[%.1f,%d]'.format(_a, b) + assert at.inputs.method == "Joint[%.1f,%d]".format(_a, b) aprime = _a + 0.1 bprime = b + 1 at.inputs.alpha = aprime at.inputs.beta = bprime # set with alpha/beta - assert at.inputs.method == 'Joint[%.1f,%d]'.format(aprime, bprime) + assert at.inputs.method == "Joint[%.1f,%d]".format(aprime, bprime) -@pytest.mark.parametrize("attr, x", - [(attr, x) - for attr in ['patch_radius', 'search_radius'] - for x in range(5)]) +@pytest.mark.parametrize( + "attr, x", + [(attr, x) for attr in ["patch_radius", "search_radius"] for x in range(5)], +) def test_JointFusion_radius(attr, x): at = JointFusion() - setattr(at.inputs, attr, [x, x + 1, x**x]) - assert at._format_arg(attr, None, getattr( - at.inputs, attr))[4:] == '{0}x{1}x{2}'.format(x, x + 1, x**x) + setattr(at.inputs, attr, [x, x + 1, x ** x]) + assert at._format_arg(attr, None, getattr(at.inputs, attr))[ + 4: + ] == "{0}x{1}x{2}".format(x, x + 1, x ** x) def test_JointFusion_cmd(): at = JointFusion() at.inputs.dimension = 3 at.inputs.modalities = 1 - at.inputs.method = 'Joint[0.1,2]' - at.inputs.output_label_image = 'fusion_labelimage_output.nii' - warped_intensity_images = [ - example_data('im1.nii'), - example_data('im2.nii') - ] + at.inputs.method = "Joint[0.1,2]" + at.inputs.output_label_image = "fusion_labelimage_output.nii" + warped_intensity_images = [example_data("im1.nii"), example_data("im2.nii")] at.inputs.warped_intensity_images = warped_intensity_images segmentation_images = [ - example_data('segmentation0.nii.gz'), - example_data('segmentation1.nii.gz') + example_data("segmentation0.nii.gz"), + example_data("segmentation1.nii.gz"), ] at.inputs.warped_label_images = segmentation_images - T1_image = example_data('T1.nii') + T1_image = example_data("T1.nii") at.inputs.target_image = T1_image at.inputs.patch_radius = [3, 2, 1] at.inputs.search_radius = [1, 2, 3] - expected_command = ('jointfusion 3 1 -m Joint[0.1,2] -rp 3x2x1 -rs 1x2x3' - ' -tg %s -g %s -g %s -l %s -l %s' - ' fusion_labelimage_output.nii') % ( - T1_image, warped_intensity_images[0], - warped_intensity_images[1], segmentation_images[0], - segmentation_images[1]) + expected_command = ( + "jointfusion 3 1 -m Joint[0.1,2] -rp 3x2x1 -rs 1x2x3" + " -tg %s -g %s -g %s -l %s -l %s" + " fusion_labelimage_output.nii" + ) % ( + T1_image, + warped_intensity_images[0], + warped_intensity_images[1], + segmentation_images[0], + segmentation_images[1], + ) assert at.cmdline == expected_command # setting intensity or labels with unequal lengths raises error with pytest.raises(AssertionError): - at._format_arg('warped_intensity_images', InputMultiPath, - warped_intensity_images + [example_data('im3.nii')]) + at._format_arg( + "warped_intensity_images", + InputMultiPath, + warped_intensity_images + [example_data("im3.nii")], + ) diff --git a/nipype/interfaces/ants/utils.py b/nipype/interfaces/ants/utils.py index 0725f45edc..5497535609 100644 --- a/nipype/interfaces/ants/utils.py +++ b/nipype/interfaces/ants/utils.py @@ -10,27 +10,25 @@ class AverageAffineTransformInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, - 2, - argstr='%d', - mandatory=True, - position=0, - desc='image dimension (2 or 3)') + 3, 2, argstr="%d", mandatory=True, position=0, desc="image dimension (2 or 3)" + ) output_affine_transform = File( - argstr='%s', + argstr="%s", mandatory=True, position=1, - desc='Outputfname.txt: the name of the resulting transform.') + desc="Outputfname.txt: the name of the resulting transform.", + ) transforms = InputMultiPath( File(exists=True), - argstr='%s', + argstr="%s", mandatory=True, position=3, - desc='transforms to average') + desc="transforms to average", + ) class AverageAffineTransformOutputSpec(TraitedSpec): - affine_transform = File(exists=True, desc='average transform file') + affine_transform = File(exists=True, desc="average transform file") class AverageAffineTransform(ANTSCommand): @@ -45,7 +43,8 @@ class AverageAffineTransform(ANTSCommand): >>> avg.cmdline 'AverageAffineTransform 3 MYtemplatewarp.mat trans.mat func_to_struct.mat' """ - _cmd = 'AverageAffineTransform' + + _cmd = "AverageAffineTransform" input_spec = AverageAffineTransformInputSpec output_spec = AverageAffineTransformOutputSpec @@ -54,44 +53,42 @@ def _format_arg(self, opt, spec, val): def _list_outputs(self): outputs = self._outputs().get() - outputs['affine_transform'] = os.path.abspath( - self.inputs.output_affine_transform) + outputs["affine_transform"] = os.path.abspath( + self.inputs.output_affine_transform + ) return outputs class AverageImagesInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, - 2, - argstr='%d', - mandatory=True, - position=0, - desc='image dimension (2 or 3)') + 3, 2, argstr="%d", mandatory=True, position=0, desc="image dimension (2 or 3)" + ) output_average_image = File( "average.nii", - argstr='%s', + argstr="%s", position=1, usedefault=True, hash_files=False, - desc='the name of the resulting image.') + desc="the name of the resulting image.", + ) normalize = traits.Bool( argstr="%d", mandatory=True, position=2, - desc='Normalize: if true, the 2nd image is divided by its mean. ' - 'This will select the largest image to average into.') + desc="Normalize: if true, the 2nd image is divided by its mean. " + "This will select the largest image to average into.", + ) images = InputMultiPath( File(exists=True), - argstr='%s', + argstr="%s", mandatory=True, position=3, - desc= - 'image to apply transformation to (generally a coregistered functional)' + desc="image to apply transformation to (generally a coregistered functional)", ) class AverageImagesOutputSpec(TraitedSpec): - output_average_image = File(exists=True, desc='average image file') + output_average_image = File(exists=True, desc="average image file") class AverageImages(ANTSCommand): @@ -107,7 +104,8 @@ class AverageImages(ANTSCommand): >>> avg.cmdline 'AverageImages 3 average.nii.gz 1 rc1s1.nii rc1s1.nii' """ - _cmd = 'AverageImages' + + _cmd = "AverageImages" input_spec = AverageImagesInputSpec output_spec = AverageImagesOutputSpec @@ -116,37 +114,37 @@ def _format_arg(self, opt, spec, val): def _list_outputs(self): outputs = self._outputs().get() - outputs['output_average_image'] = os.path.realpath( - self.inputs.output_average_image) + outputs["output_average_image"] = os.path.realpath( + self.inputs.output_average_image + ) return outputs class MultiplyImagesInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, - 2, - argstr='%d', - mandatory=True, - position=0, - desc='image dimension (2 or 3)') + 3, 2, argstr="%d", mandatory=True, position=0, desc="image dimension (2 or 3)" + ) first_input = File( - argstr='%s', exists=True, mandatory=True, position=1, desc='image 1') + argstr="%s", exists=True, mandatory=True, position=1, desc="image 1" + ) second_input = traits.Either( File(exists=True), traits.Float, - argstr='%s', + argstr="%s", mandatory=True, position=2, - desc='image 2 or multiplication weight') + desc="image 2 or multiplication weight", + ) output_product_image = File( - argstr='%s', + argstr="%s", mandatory=True, position=3, - desc='Outputfname.nii.gz: the name of the resulting image.') + desc="Outputfname.nii.gz: the name of the resulting image.", + ) class MultiplyImagesOutputSpec(TraitedSpec): - output_product_image = File(exists=True, desc='average image file') + output_product_image = File(exists=True, desc="average image file") class MultiplyImages(ANTSCommand): @@ -162,7 +160,8 @@ class MultiplyImages(ANTSCommand): >>> test.cmdline 'MultiplyImages 3 moving2.nii 0.25 out.nii' """ - _cmd = 'MultiplyImages' + + _cmd = "MultiplyImages" input_spec = MultiplyImagesInputSpec output_spec = MultiplyImagesOutputSpec @@ -171,35 +170,34 @@ def _format_arg(self, opt, spec, val): def _list_outputs(self): outputs = self._outputs().get() - outputs['output_product_image'] = os.path.abspath( - self.inputs.output_product_image) + outputs["output_product_image"] = os.path.abspath( + self.inputs.output_product_image + ) return outputs class CreateJacobianDeterminantImageInputSpec(ANTSCommandInputSpec): imageDimension = traits.Enum( - 3, - 2, - argstr='%d', - mandatory=True, - position=0, - desc='image dimension (2 or 3)') + 3, 2, argstr="%d", mandatory=True, position=0, desc="image dimension (2 or 3)" + ) deformationField = File( - argstr='%s', + argstr="%s", exists=True, mandatory=True, position=1, - desc='deformation transformation file') - outputImage = File( - argstr='%s', mandatory=True, position=2, desc='output filename') + desc="deformation transformation file", + ) + outputImage = File(argstr="%s", mandatory=True, position=2, desc="output filename") doLogJacobian = traits.Enum( - 0, 1, argstr='%d', position=3, desc='return the log jacobian') + 0, 1, argstr="%d", position=3, desc="return the log jacobian" + ) useGeometric = traits.Enum( - 0, 1, argstr='%d', position=4, desc='return the geometric jacobian') + 0, 1, argstr="%d", position=4, desc="return the geometric jacobian" + ) class CreateJacobianDeterminantImageOutputSpec(TraitedSpec): - jacobian_image = File(exists=True, desc='jacobian image') + jacobian_image = File(exists=True, desc="jacobian image") class CreateJacobianDeterminantImage(ANTSCommand): @@ -215,76 +213,72 @@ class CreateJacobianDeterminantImage(ANTSCommand): 'CreateJacobianDeterminantImage 3 ants_Warp.nii.gz out_name.nii.gz' """ - _cmd = 'CreateJacobianDeterminantImage' + _cmd = "CreateJacobianDeterminantImage" input_spec = CreateJacobianDeterminantImageInputSpec output_spec = CreateJacobianDeterminantImageOutputSpec def _format_arg(self, opt, spec, val): - return super(CreateJacobianDeterminantImage, self)._format_arg( - opt, spec, val) + return super(CreateJacobianDeterminantImage, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() - outputs['jacobian_image'] = os.path.abspath(self.inputs.outputImage) + outputs["jacobian_image"] = os.path.abspath(self.inputs.outputImage) return outputs class AffineInitializerInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, 2, usedefault=True, position=0, argstr='%s', desc='dimension') + 3, 2, usedefault=True, position=0, argstr="%s", desc="dimension" + ) fixed_image = File( - exists=True, - mandatory=True, - position=1, - argstr='%s', - desc='reference image') + exists=True, mandatory=True, position=1, argstr="%s", desc="reference image" + ) moving_image = File( - exists=True, - mandatory=True, - position=2, - argstr='%s', - desc='moving image') + exists=True, mandatory=True, position=2, argstr="%s", desc="moving image" + ) out_file = File( - 'transform.mat', + "transform.mat", usedefault=True, position=3, - argstr='%s', - desc='output transform file') + argstr="%s", + desc="output transform file", + ) # Defaults in antsBrainExtraction.sh -> 15 0.1 0 10 search_factor = traits.Float( 15.0, usedefault=True, position=4, - argstr='%f', - desc='increments (degrees) for affine search') + argstr="%f", + desc="increments (degrees) for affine search", + ) radian_fraction = traits.Range( 0.0, 1.0, value=0.1, usedefault=True, position=5, - argstr='%f', - desc='search this arc +/- principal axes') + argstr="%f", + desc="search this arc +/- principal axes", + ) principal_axes = traits.Bool( False, usedefault=True, position=6, - argstr='%d', - desc= - 'whether the rotation is searched around an initial principal axis alignment.' + argstr="%d", + desc="whether the rotation is searched around an initial principal axis alignment.", ) local_search = traits.Int( 10, usedefault=True, position=7, - argstr='%d', - desc= - ' determines if a local optimization is run at each search point for the set ' - 'number of iterations') + argstr="%d", + desc=" determines if a local optimization is run at each search point for the set " + "number of iterations", + ) class AffineInitializerOutputSpec(TraitedSpec): - out_file = File(desc='output transform file') + out_file = File(desc="output transform file") class AffineInitializer(ANTSCommand): @@ -299,43 +293,43 @@ class AffineInitializer(ANTSCommand): 'antsAffineInitializer 3 fixed1.nii moving1.nii transform.mat 15.000000 0.100000 0 10' """ - _cmd = 'antsAffineInitializer' + + _cmd = "antsAffineInitializer" input_spec = AffineInitializerInputSpec output_spec = AffineInitializerOutputSpec def _list_outputs(self): - return {'out_file': os.path.abspath(self.inputs.out_file)} + return {"out_file": os.path.abspath(self.inputs.out_file)} class ComposeMultiTransformInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, - 2, - argstr='%d', - usedefault=True, - position=0, - desc='image dimension (2 or 3)') + 3, 2, argstr="%d", usedefault=True, position=0, desc="image dimension (2 or 3)" + ) output_transform = File( - argstr='%s', + argstr="%s", position=1, - name_source=['transforms'], - name_template='%s_composed', + name_source=["transforms"], + name_template="%s_composed", keep_extension=True, - desc='the name of the resulting transform.') + desc="the name of the resulting transform.", + ) reference_image = File( - argstr='%s', + argstr="%s", position=2, - desc='Reference image (only necessary when output is warpfield)') + desc="Reference image (only necessary when output is warpfield)", + ) transforms = InputMultiPath( File(exists=True), - argstr='%s', + argstr="%s", mandatory=True, position=3, - desc='transforms to average') + desc="transforms to average", + ) class ComposeMultiTransformOutputSpec(TraitedSpec): - output_transform = File(exists=True, desc='Composed transform file') + output_transform = File(exists=True, desc="Composed transform file") class ComposeMultiTransform(ANTSCommand): @@ -352,43 +346,42 @@ class ComposeMultiTransform(ANTSCommand): 'ComposeMultiTransform 3 struct_to_template_composed.mat struct_to_template.mat func_to_struct.mat' """ - _cmd = 'ComposeMultiTransform' + + _cmd = "ComposeMultiTransform" input_spec = ComposeMultiTransformInputSpec output_spec = ComposeMultiTransformOutputSpec class LabelGeometryInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( - 3, - 2, - argstr='%d', - usedefault=True, - position=0, - desc='image dimension (2 or 3)') + 3, 2, argstr="%d", usedefault=True, position=0, desc="image dimension (2 or 3)" + ) label_image = File( - argstr='%s', + argstr="%s", position=1, mandatory=True, - desc='label image to use for extracting geometry measures') + desc="label image to use for extracting geometry measures", + ) intensity_image = File( - value='[]', + value="[]", exists=True, - argstr='%s', + argstr="%s", mandatory=True, usedefault=True, position=2, - desc='Intensity image to extract values from. ' - 'This is an optional input') + desc="Intensity image to extract values from. " "This is an optional input", + ) output_file = traits.Str( - name_source=['label_image'], - name_template='%s.csv', - argstr='%s', + name_source=["label_image"], + name_template="%s.csv", + argstr="%s", position=3, - desc='name of output file') + desc="name of output file", + ) class LabelGeometryOutputSpec(TraitedSpec): - output_file = File(exists=True, desc='CSV file of geometry measures') + output_file = File(exists=True, desc="CSV file of geometry measures") class LabelGeometry(ANTSCommand): @@ -409,6 +402,7 @@ class LabelGeometry(ANTSCommand): 'LabelGeometryMeasures 3 atlas.nii.gz ants_Warp.nii.gz atlas.csv' """ - _cmd = 'LabelGeometryMeasures' + + _cmd = "LabelGeometryMeasures" input_spec = LabelGeometryInputSpec output_spec = LabelGeometryOutputSpec diff --git a/nipype/interfaces/ants/visualization.py b/nipype/interfaces/ants/visualization.py index b5ab661889..3e3c75be50 100644 --- a/nipype/interfaces/ants/visualization.py +++ b/nipype/interfaces/ants/visualization.py @@ -12,56 +12,55 @@ class ConvertScalarImageToRGBInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, - argstr='%d', + argstr="%d", usedefault=True, - desc='image dimension (2 or 3)', + desc="image dimension (2 or 3)", mandatory=True, - position=0) + position=0, + ) input_image = File( - argstr='%s', + argstr="%s", exists=True, - desc='Main input is a 3-D grayscale image.', + desc="Main input is a 3-D grayscale image.", mandatory=True, - position=1) + position=1, + ) output_image = traits.Str( - 'rgb.nii.gz', - argstr='%s', - usedefault=True, - desc='rgb output image', - position=2) + "rgb.nii.gz", argstr="%s", usedefault=True, desc="rgb output image", position=2 + ) mask_image = File( - 'none', - argstr='%s', - exists=True, - desc='mask image', - position=3, - usedefault=True) + "none", argstr="%s", exists=True, desc="mask image", position=3, usedefault=True + ) colormap = traits.Str( - argstr='%s', + argstr="%s", usedefault=True, - desc=('Possible colormaps: grey, red, green, ' - 'blue, copper, jet, hsv, spring, summer, ' - 'autumn, winter, hot, cool, overunder, custom '), + desc=( + "Possible colormaps: grey, red, green, " + "blue, copper, jet, hsv, spring, summer, " + "autumn, winter, hot, cool, overunder, custom " + ), mandatory=True, - position=4) + position=4, + ) custom_color_map_file = traits.Str( - 'none', - argstr='%s', - usedefault=True, - desc='custom color map file', - position=5) + "none", argstr="%s", usedefault=True, desc="custom color map file", position=5 + ) minimum_input = traits.Int( - argstr='%d', desc='minimum input', mandatory=True, position=6) + argstr="%d", desc="minimum input", mandatory=True, position=6 + ) maximum_input = traits.Int( - argstr='%d', desc='maximum input', mandatory=True, position=7) + argstr="%d", desc="maximum input", mandatory=True, position=7 + ) minimum_RGB_output = traits.Int( - 0, usedefault=True, argstr='%d', desc='', position=8) + 0, usedefault=True, argstr="%d", desc="", position=8 + ) maximum_RGB_output = traits.Int( - 255, usedefault=True, argstr='%d', desc='', position=9) + 255, usedefault=True, argstr="%d", desc="", position=9 + ) class ConvertScalarImageToRGBOutputSpec(TraitedSpec): - output_image = File(exists=True, desc='converted RGB image') + output_image = File(exists=True, desc="converted RGB image") class ConvertScalarImageToRGB(ANTSCommand): @@ -78,7 +77,8 @@ class ConvertScalarImageToRGB(ANTSCommand): >>> converter.cmdline 'ConvertScalarImageToRGB 3 T1.nii.gz rgb.nii.gz none jet none 0 6 0 255' """ - _cmd = 'ConvertScalarImageToRGB' + + _cmd = "ConvertScalarImageToRGB" input_spec = ConvertScalarImageToRGBInputSpec output_spec = ConvertScalarImageToRGBOutputSpec @@ -87,73 +87,88 @@ def _format_arg(self, opt, spec, val): def _list_outputs(self): outputs = self._outputs().get() - outputs['output_image'] = os.path.join(os.getcwd(), - self.inputs.output_image) + outputs["output_image"] = os.path.join(os.getcwd(), self.inputs.output_image) return outputs class CreateTiledMosaicInputSpec(ANTSCommandInputSpec): input_image = File( - argstr='-i %s', + argstr="-i %s", exists=True, - desc='Main input is a 3-D grayscale image.', - mandatory=True) + desc="Main input is a 3-D grayscale image.", + mandatory=True, + ) rgb_image = File( - argstr='-r %s', + argstr="-r %s", exists=True, - desc=('An optional Rgb image can be added as an overlay.' - 'It must have the same image' - 'geometry as the input grayscale image.'), - mandatory=True) + desc=( + "An optional Rgb image can be added as an overlay." + "It must have the same image" + "geometry as the input grayscale image." + ), + mandatory=True, + ) mask_image = File( - argstr='-x %s', - exists=True, - desc='Specifies the ROI of the RGB voxels used.') + argstr="-x %s", exists=True, desc="Specifies the ROI of the RGB voxels used." + ) alpha_value = traits.Float( - argstr='-a %.2f', - desc=('If an Rgb image is provided, render the overlay ' - 'using the specified alpha parameter.')) + argstr="-a %.2f", + desc=( + "If an Rgb image is provided, render the overlay " + "using the specified alpha parameter." + ), + ) output_image = traits.Str( - 'output.png', - argstr='-o %s', - desc='The output consists of the tiled mosaic image.', - usedefault=True) + "output.png", + argstr="-o %s", + desc="The output consists of the tiled mosaic image.", + usedefault=True, + ) tile_geometry = traits.Str( - argstr='-t %s', + argstr="-t %s", desc=( - 'The tile geometry specifies the number of rows and columns' + "The tile geometry specifies the number of rows and columns" 'in the output image. For example, if the user specifies "5x10", ' - 'then 5 rows by 10 columns of slices are rendered. If R < 0 and C > ' - '0 (or vice versa), the negative value is selected' - 'based on direction.')) + "then 5 rows by 10 columns of slices are rendered. If R < 0 and C > " + "0 (or vice versa), the negative value is selected" + "based on direction." + ), + ) direction = traits.Int( - argstr='-d %d', - desc=('Specifies the direction of ' - 'the slices. If no direction is specified, the ' - 'direction with the coarsest spacing is chosen.')) + argstr="-d %d", + desc=( + "Specifies the direction of " + "the slices. If no direction is specified, the " + "direction with the coarsest spacing is chosen." + ), + ) pad_or_crop = traits.Str( - argstr='-p %s', - desc='argument passed to -p flag:' - '[padVoxelWidth,]' - '[lowerPadding[0]xlowerPadding[1],upperPadding[0]xupperPadding[1],' - 'constantValue]' - 'The user can specify whether to pad or crop a specified ' - 'voxel-width boundary of each individual slice. For this ' - 'program, cropping is simply padding with negative voxel-widths.' - 'If one pads (+), the user can also specify a constant pad ' - 'value (default = 0). If a mask is specified, the user can use ' + argstr="-p %s", + desc="argument passed to -p flag:" + "[padVoxelWidth,]" + "[lowerPadding[0]xlowerPadding[1],upperPadding[0]xupperPadding[1]," + "constantValue]" + "The user can specify whether to pad or crop a specified " + "voxel-width boundary of each individual slice. For this " + "program, cropping is simply padding with negative voxel-widths." + "If one pads (+), the user can also specify a constant pad " + "value (default = 0). If a mask is specified, the user can use " 'the mask to define the region, by using the keyword "mask"' - ' plus an offset, e.g. "-p mask+3".') + ' plus an offset, e.g. "-p mask+3".', + ) slices = traits.Str( - argstr='-s %s', - desc=('Number of slices to increment Slice1xSlice2xSlice3' - '[numberOfSlicesToIncrement,,]')) - flip_slice = traits.Str(argstr='-f %s', desc='flipXxflipY') - permute_axes = traits.Bool(argstr='-g', desc='doPermute') + argstr="-s %s", + desc=( + "Number of slices to increment Slice1xSlice2xSlice3" + "[numberOfSlicesToIncrement,,]" + ), + ) + flip_slice = traits.Str(argstr="-f %s", desc="flipXxflipY") + permute_axes = traits.Bool(argstr="-g", desc="doPermute") class CreateTiledMosaicOutputSpec(TraitedSpec): - output_image = File(exists=True, desc='image file') + output_image = File(exists=True, desc="image file") class CreateTiledMosaic(ANTSCommand): @@ -180,12 +195,11 @@ class CreateTiledMosaic(ANTSCommand): -r rgb.nii.gz -s [2 ,100 ,160]' """ - _cmd = 'CreateTiledMosaic' + _cmd = "CreateTiledMosaic" input_spec = CreateTiledMosaicInputSpec output_spec = CreateTiledMosaicOutputSpec def _list_outputs(self): outputs = self._outputs().get() - outputs['output_image'] = os.path.join(os.getcwd(), - self.inputs.output_image) + outputs["output_image"] = os.path.join(os.getcwd(), self.inputs.output_image) return outputs diff --git a/nipype/interfaces/base/__init__.py b/nipype/interfaces/base/__init__.py index a846794561..7c70f9768d 100644 --- a/nipype/interfaces/base/__init__.py +++ b/nipype/interfaces/base/__init__.py @@ -11,19 +11,41 @@ from traits.trait_handlers import TraitDictObject, TraitListObject from traits.trait_errors import TraitError -from .core import (Interface, BaseInterface, SimpleInterface, CommandLine, - StdOutCommandLine, MpiCommandLine, SEMLikeCommandLine, - LibraryBaseInterface, PackageInfo) +from .core import ( + Interface, + BaseInterface, + SimpleInterface, + CommandLine, + StdOutCommandLine, + MpiCommandLine, + SEMLikeCommandLine, + LibraryBaseInterface, + PackageInfo, +) -from .specs import (BaseTraitedSpec, TraitedSpec, DynamicTraitedSpec, - BaseInterfaceInputSpec, CommandLineInputSpec, - StdOutCommandLineInputSpec) +from .specs import ( + BaseTraitedSpec, + TraitedSpec, + DynamicTraitedSpec, + BaseInterfaceInputSpec, + CommandLineInputSpec, + StdOutCommandLineInputSpec, +) from .traits_extension import ( - traits, Undefined, isdefined, has_metadata, - File, ImageFile, Directory, - Str, DictStrStr, - OutputMultiObject, InputMultiObject, - OutputMultiPath, InputMultiPath) + traits, + Undefined, + isdefined, + has_metadata, + File, + ImageFile, + Directory, + Str, + DictStrStr, + OutputMultiObject, + InputMultiObject, + OutputMultiPath, + InputMultiPath, +) -from .support import (Bunch, InterfaceResult, NipypeInterfaceError) +from .support import Bunch, InterfaceResult, NipypeInterfaceError diff --git a/nipype/interfaces/base/core.py b/nipype/interfaces/base/core.py index 126fd51a8a..fd4c701fff 100644 --- a/nipype/interfaces/base/core.py +++ b/nipype/interfaces/base/core.py @@ -26,25 +26,33 @@ from ... import config, logging, LooseVersion from ...utils.provenance import write_provenance from ...utils.misc import str2bool, rgetcwd -from ...utils.filemanip import (split_filename, which, get_dependencies) +from ...utils.filemanip import split_filename, which, get_dependencies from ...utils.subprocess import run_command from ...external.due import due from .traits_extension import traits, isdefined -from .specs import (BaseInterfaceInputSpec, CommandLineInputSpec, - StdOutCommandLineInputSpec, MpiCommandLineInputSpec, - get_filecopy_info) -from .support import (Bunch, InterfaceResult, NipypeInterfaceError, - format_help) +from .specs import ( + BaseInterfaceInputSpec, + CommandLineInputSpec, + StdOutCommandLineInputSpec, + MpiCommandLineInputSpec, + get_filecopy_info, +) +from .support import Bunch, InterfaceResult, NipypeInterfaceError, format_help -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") VALID_TERMINAL_OUTPUT = [ - 'stream', 'allatonce', 'file', 'file_split', 'file_stdout', 'file_stderr', - 'none' + "stream", + "allatonce", + "file", + "file_split", + "file_stdout", + "file_stderr", + "none", ] -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" class Interface(object): @@ -113,8 +121,9 @@ def _get_filecopy_info(cls): Necessary for pipeline operation """ iflogger.warning( - '_get_filecopy_info member of Interface was deprecated ' - 'in nipype-1.1.6 and will be removed in 1.2.0') + "_get_filecopy_info member of Interface was deprecated " + "in nipype-1.1.6 and will be removed in 1.2.0" + ) return get_filecopy_info(cls) @@ -149,6 +158,7 @@ class BaseInterface(Interface): """ + input_spec = BaseInterfaceInputSpec _version = None _additional_metadata = [] @@ -157,16 +167,17 @@ class BaseInterface(Interface): resource_monitor = True # Enabled for this interface IFF enabled in the config _etelemetry_version_data = None - def __init__(self, from_file=None, resource_monitor=None, - ignore_exception=False, **inputs): - if config.getboolean('execution', 'check_version'): + def __init__( + self, from_file=None, resource_monitor=None, ignore_exception=False, **inputs + ): + if config.getboolean("execution", "check_version"): from ... import check_latest_version + if BaseInterface._etelemetry_version_data is None: BaseInterface._etelemetry_version_data = check_latest_version() if not self.input_spec: - raise Exception( - 'No input_spec in class: %s' % self.__class__.__name__) + raise Exception("No input_spec in class: %s" % self.__class__.__name__) self.inputs = self.input_spec(**inputs) self.ignore_exception = ignore_exception @@ -194,34 +205,42 @@ def _check_requires(self, spec, name, value): """ if spec.requires: values = [ - not isdefined(getattr(self.inputs, field)) - for field in spec.requires + not isdefined(getattr(self.inputs, field)) for field in spec.requires ] if any(values) and isdefined(value): if len(values) > 1: - fmt = ("%s requires values for inputs %s because '%s' is set. " - "For a list of required inputs, see %s.help()") + fmt = ( + "%s requires values for inputs %s because '%s' is set. " + "For a list of required inputs, see %s.help()" + ) else: - fmt = ("%s requires a value for input %s because '%s' is set. " - "For a list of required inputs, see %s.help()") - msg = fmt % (self.__class__.__name__, - ', '.join("'%s'" % req for req in spec.requires), - name, - self.__class__.__name__) + fmt = ( + "%s requires a value for input %s because '%s' is set. " + "For a list of required inputs, see %s.help()" + ) + msg = fmt % ( + self.__class__.__name__, + ", ".join("'%s'" % req for req in spec.requires), + name, + self.__class__.__name__, + ) raise ValueError(msg) def _check_xor(self, spec, name, value): """ check if mutually exclusive inputs are satisfied """ if spec.xor: - values = [ - isdefined(getattr(self.inputs, field)) for field in spec.xor - ] + values = [isdefined(getattr(self.inputs, field)) for field in spec.xor] if not any(values) and not isdefined(value): - msg = ("%s requires a value for one of the inputs '%s'. " - "For a list of required inputs, see %s.help()" % - (self.__class__.__name__, ', '.join(spec.xor), - self.__class__.__name__)) + msg = ( + "%s requires a value for one of the inputs '%s'. " + "For a list of required inputs, see %s.help()" + % ( + self.__class__.__name__, + ", ".join(spec.xor), + self.__class__.__name__, + ) + ) raise ValueError(msg) def _check_mandatory_inputs(self): @@ -231,15 +250,17 @@ def _check_mandatory_inputs(self): value = getattr(self.inputs, name) self._check_xor(spec, name, value) if not isdefined(value) and spec.xor is None: - msg = ("%s requires a value for input '%s'. " - "For a list of required inputs, see %s.help()" % - (self.__class__.__name__, name, - self.__class__.__name__)) + msg = ( + "%s requires a value for input '%s'. " + "For a list of required inputs, see %s.help()" + % (self.__class__.__name__, name, self.__class__.__name__) + ) raise ValueError(msg) if isdefined(value): self._check_requires(spec, name, value) for name, spec in list( - self.inputs.traits(mandatory=None, transient=None).items()): + self.inputs.traits(mandatory=None, transient=None).items() + ): self._check_requires(spec, name, getattr(self.inputs, name)) def _check_version_requirements(self, trait_object, raise_exception=True): @@ -253,16 +274,16 @@ def _check_version_requirements(self, trait_object, raise_exception=True): if names and self.version: version = LooseVersion(str(self.version)) for name in names: - min_ver = LooseVersion( - str(trait_object.traits()[name].min_ver)) + min_ver = LooseVersion(str(trait_object.traits()[name].min_ver)) if min_ver > version: unavailable_traits.append(name) if not isdefined(getattr(trait_object, name)): continue if raise_exception: raise Exception( - 'Trait %s (%s) (version %s < required %s)' % - (name, self.__class__.__name__, version, min_ver)) + "Trait %s (%s) (version %s < required %s)" + % (name, self.__class__.__name__, version, min_ver) + ) # check maximum version check = dict(max_ver=lambda t: t is not None) @@ -270,16 +291,16 @@ def _check_version_requirements(self, trait_object, raise_exception=True): if names and self.version: version = LooseVersion(str(self.version)) for name in names: - max_ver = LooseVersion( - str(trait_object.traits()[name].max_ver)) + max_ver = LooseVersion(str(trait_object.traits()[name].max_ver)) if max_ver < version: unavailable_traits.append(name) if not isdefined(getattr(trait_object, name)): continue if raise_exception: raise Exception( - 'Trait %s (%s) (version %s > required %s)' % - (name, self.__class__.__name__, version, max_ver)) + "Trait %s (%s) (version %s > required %s)" + % (name, self.__class__.__name__, version, max_ver) + ) return unavailable_traits def _run_interface(self, runtime): @@ -291,7 +312,7 @@ def _duecredit_cite(self): """ Add the interface references to the duecredit citations """ for r in self.references_: - r['path'] = self.__module__ + r["path"] = self.__module__ due.cite(**r) def run(self, cwd=None, ignore_exception=None, **inputs): @@ -333,10 +354,11 @@ def run(self, cwd=None, ignore_exception=None, **inputs): # initialize provenance tracking store_provenance = str2bool( - config.get('execution', 'write_provenance', 'false')) + config.get("execution", "write_provenance", "false") + ) env = deepcopy(dict(os.environ)) if self._redirect_x: - env['DISPLAY'] = config.get_display() + env["DISPLAY"] = config.get_display() runtime = Bunch( cwd=cwd, @@ -348,17 +370,19 @@ def run(self, cwd=None, ignore_exception=None, **inputs): endTime=None, platform=platform.platform(), hostname=platform.node(), - version=self.version) + version=self.version, + ) runtime_attrs = set(runtime.dictcopy()) mon_sp = None if enable_rm: - mon_freq = float( - config.get('execution', 'resource_monitor_frequency', 1)) + mon_freq = float(config.get("execution", "resource_monitor_frequency", 1)) proc_pid = os.getpid() iflogger.debug( - 'Creating a ResourceMonitor on a %s interface, PID=%d.', - self.__class__.__name__, proc_pid) + "Creating a ResourceMonitor on a %s interface, PID=%d.", + self.__class__.__name__, + proc_pid, + ) mon_sp = ResourceMonitor(proc_pid, freq=mon_freq) mon_sp.start() @@ -373,38 +397,37 @@ def run(self, cwd=None, ignore_exception=None, **inputs): outputs = self.aggregate_outputs(runtime) except Exception as e: import traceback + # Retrieve the maximum info fast runtime.traceback = traceback.format_exc() # Gather up the exception arguments and append nipype info. - exc_args = e.args if getattr(e, 'args') else tuple() + exc_args = e.args if getattr(e, "args") else tuple() exc_args += ( - 'An exception of type %s occurred while running interface %s.' - % (type(e).__name__, self.__class__.__name__), ) - if config.get('logging', 'interface_level', - 'info').lower() == 'debug': - exc_args += ('Inputs: %s' % str(self.inputs), ) + "An exception of type %s occurred while running interface %s." + % (type(e).__name__, self.__class__.__name__), + ) + if config.get("logging", "interface_level", "info").lower() == "debug": + exc_args += ("Inputs: %s" % str(self.inputs),) - runtime.traceback_args = ('\n'.join( - ['%s' % arg for arg in exc_args]), ) + runtime.traceback_args = ("\n".join(["%s" % arg for arg in exc_args]),) if not ignore_exception: raise finally: if runtime is None or runtime_attrs - set(runtime.dictcopy()): - raise RuntimeError("{} interface failed to return valid " - "runtime object".format( - interface.__class__.__name__)) + raise RuntimeError( + "{} interface failed to return valid " + "runtime object".format(interface.__class__.__name__) + ) # This needs to be done always runtime.endTime = dt.isoformat(dt.utcnow()) timediff = parseutc(runtime.endTime) - parseutc(runtime.startTime) - runtime.duration = (timediff.days * 86400 + timediff.seconds + - timediff.microseconds / 1e6) + runtime.duration = ( + timediff.days * 86400 + timediff.seconds + timediff.microseconds / 1e6 + ) results = InterfaceResult( - interface, - runtime, - inputs=inputs, - outputs=outputs, - provenance=None) + interface, runtime, inputs=inputs, outputs=outputs, provenance=None + ) # Add provenance (if required) if store_provenance: @@ -414,23 +437,24 @@ def run(self, cwd=None, ignore_exception=None, **inputs): # Make sure runtime profiler is shut down if enable_rm: import numpy as np + mon_sp.stop() runtime.mem_peak_gb = None runtime.cpu_percent = None # Read .prof file in and set runtime values - vals = np.loadtxt(mon_sp.fname, delimiter=',') + vals = np.loadtxt(mon_sp.fname, delimiter=",") if vals.size: vals = np.atleast_2d(vals) runtime.mem_peak_gb = vals[:, 2].max() / 1024 runtime.cpu_percent = vals[:, 1].max() runtime.prof_dict = { - 'time': vals[:, 0].tolist(), - 'cpus': vals[:, 1].tolist(), - 'rss_GiB': (vals[:, 2] / 1024).tolist(), - 'vms_GiB': (vals[:, 3] / 1024).tolist(), + "time": vals[:, 0].tolist(), + "cpus": vals[:, 1].tolist(), + "rss_GiB": (vals[:, 2] / 1024).tolist(), + "vms_GiB": (vals[:, 3] / 1024).tolist(), } os.chdir(syscwd) @@ -461,18 +485,23 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): na_names = aggregate_names.intersection(_na_outputs) if na_names: # XXX Change to TypeError in Nipype 2.0 - raise KeyError("""\ + raise KeyError( + """\ Output trait(s) %s not available in version %s of interface %s.\ -""" % (', '.join(na_names), self.version, self.__class__.__name__)) +""" + % (", ".join(na_names), self.version, self.__class__.__name__) + ) for key in aggregate_names: # Final aggregation val = predicted_outputs[key] try: setattr(outputs, key, val) except TraitError as error: - if 'an existing' in getattr(error, 'info', 'default'): - msg = "No such file or directory '%s' for output '%s' of a %s interface" % \ - (val, key, self.__class__.__name__) + if "an existing" in getattr(error, "info", "default"): + msg = ( + "No such file or directory '%s' for output '%s' of a %s interface" + % (val, key, self.__class__.__name__) + ) raise FileNotFoundError(msg) raise error return outputs @@ -480,9 +509,10 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): @property def version(self): if self._version is None: - if str2bool(config.get('execution', 'stop_on_unknown_version')): - raise ValueError('Interface %s has no version information' % - self.__class__.__name__) + if str2bool(config.get("execution", "stop_on_unknown_version")): + raise ValueError( + "Interface %s has no version information" % self.__class__.__name__ + ) return self._version def load_inputs_from_json(self, json_file, overwrite=True): @@ -507,8 +537,8 @@ def save_inputs_to_json(self, json_file): A convenient way to save current inputs to a JSON file. """ inputs = self.inputs.get_traitsfree() - iflogger.debug('saving inputs %s', inputs) - with open(json_file, 'w') as fhandle: + iflogger.debug("saving inputs %s", inputs) + with open(json_file, "w") as fhandle: json.dump(inputs, fhandle, indent=4, ensure_ascii=False) def _pre_run_hook(self, runtime): @@ -576,7 +606,8 @@ class SimpleInterface(BaseInterface): def __init__(self, from_file=None, resource_monitor=None, **inputs): super(SimpleInterface, self).__init__( - from_file=from_file, resource_monitor=resource_monitor, **inputs) + from_file=from_file, resource_monitor=resource_monitor, **inputs + ) self._results = {} def _list_outputs(self): @@ -617,11 +648,12 @@ class must be instantiated with a command argument '11c37f97649cd61627f4afe5136af8c0' """ + input_spec = CommandLineInputSpec - _cmd_prefix = '' + _cmd_prefix = "" _cmd = None _version = None - _terminal_output = 'stream' + _terminal_output = "stream" @classmethod def set_default_terminal_output(cls, output_type): @@ -636,18 +668,16 @@ def set_default_terminal_output(cls, output_type): if output_type in VALID_TERMINAL_OUTPUT: cls._terminal_output = output_type else: - raise AttributeError( - 'Invalid terminal output_type: %s' % output_type) + raise AttributeError("Invalid terminal output_type: %s" % output_type) def __init__(self, command=None, terminal_output=None, **inputs): super(CommandLine, self).__init__(**inputs) self._environ = None # Set command. Input argument takes precedence - self._cmd = command or getattr(self, '_cmd', None) + self._cmd = command or getattr(self, "_cmd", None) # Store dependencies in runtime object - self._ldd = str2bool( - config.get('execution', 'get_linked_libs', 'true')) + self._ldd = str2bool(config.get("execution", "get_linked_libs", "true")) if self._cmd is None: raise Exception("Missing command") @@ -660,8 +690,9 @@ def cmd(self): """sets base command, immutable""" if not self._cmd: raise NotImplementedError( - 'CommandLineInterface should wrap an executable, but ' - 'none has been set.') + "CommandLineInterface should wrap an executable, but " + "none has been set." + ) return self._cmd @property @@ -670,7 +701,7 @@ def cmdline(self): validates arguments and generates command line""" self._check_mandatory_inputs() allargs = [self._cmd_prefix + self.cmd] + self._parse_inputs() - return ' '.join(allargs) + return " ".join(allargs) @property def terminal_output(self): @@ -681,23 +712,26 @@ def terminal_output(self, value): if value not in VALID_TERMINAL_OUTPUT: raise RuntimeError( 'Setting invalid value "%s" for terminal_output. Valid values are ' - '%s.' % (value, - ', '.join(['"%s"' % v - for v in VALID_TERMINAL_OUTPUT]))) + "%s." % (value, ", ".join(['"%s"' % v for v in VALID_TERMINAL_OUTPUT])) + ) self._terminal_output = value def raise_exception(self, runtime): raise RuntimeError( - ('Command:\n{cmdline}\nStandard output:\n{stdout}\n' - 'Standard error:\n{stderr}\nReturn code: {returncode}' - ).format(**runtime.dictcopy())) + ( + "Command:\n{cmdline}\nStandard output:\n{stdout}\n" + "Standard error:\n{stderr}\nReturn code: {returncode}" + ).format(**runtime.dictcopy()) + ) def _get_environ(self): - return getattr(self.inputs, 'environ', {}) + return getattr(self.inputs, "environ", {}) - def version_from_command(self, flag='-v', cmd=None): - iflogger.warning('version_from_command member of CommandLine was ' - 'Deprecated in nipype-1.0.0 and deleted in 1.1.0') + def version_from_command(self, flag="-v", cmd=None): + iflogger.warning( + "version_from_command member of CommandLine was " + "Deprecated in nipype-1.0.0 and deleted in 1.1.0" + ) if cmd is None: cmd = self.cmd.split()[0] @@ -706,7 +740,7 @@ def version_from_command(self, flag='-v', cmd=None): out_environ = self._get_environ() env.update(out_environ) proc = sp.Popen( - ' '.join((cmd, flag)), + " ".join((cmd, flag)), shell=True, env=env, stdout=sp.PIPE, @@ -715,7 +749,7 @@ def version_from_command(self, flag='-v', cmd=None): o, e = proc.communicate() return o - def _run_interface(self, runtime, correct_return_codes=(0, )): + def _run_interface(self, runtime, correct_return_codes=(0,)): """Execute command via subprocess Parameters @@ -743,16 +777,18 @@ def _run_interface(self, runtime, correct_return_codes=(0, )): if cmd_path is None: raise IOError( 'No command "%s" found on host %s. Please check that the ' - 'corresponding package is installed.' % (executable_name, - runtime.hostname)) + "corresponding package is installed." + % (executable_name, runtime.hostname) + ) runtime.command_path = cmd_path - runtime.dependencies = (get_dependencies(executable_name, - runtime.environ) - if self._ldd else '') + runtime.dependencies = ( + get_dependencies(executable_name, runtime.environ) + if self._ldd + else "" + ) runtime = run_command(runtime, output=self.terminal_output) - if runtime.returncode is None or \ - runtime.returncode not in correct_return_codes: + if runtime.returncode is None or runtime.returncode not in correct_return_codes: self.raise_exception(runtime) return runtime @@ -763,15 +799,15 @@ def _format_arg(self, name, trait_spec, value): Formats a trait containing argstr metadata """ argstr = trait_spec.argstr - iflogger.debug('%s_%s', name, value) + iflogger.debug("%s_%s", name, value) if trait_spec.is_trait_type(traits.Bool) and "%" not in argstr: # Boolean options have no format string. Just append options if True. return argstr if value else None # traits.Either turns into traits.TraitCompound and does not have any # inner_traits - elif trait_spec.is_trait_type(traits.List) \ - or (trait_spec.is_trait_type(traits.TraitCompound) and - isinstance(value, list)): + elif trait_spec.is_trait_type(traits.List) or ( + trait_spec.is_trait_type(traits.TraitCompound) and isinstance(value, list) + ): # This is a bit simple-minded at present, and should be # construed as the default. If more sophisticated behavior # is needed, it can be accomplished with metadata (e.g. @@ -781,13 +817,13 @@ def _format_arg(self, name, trait_spec, value): # Depending on whether we stick with traitlets, and whether or # not we beef up traitlets.List, we may want to put some # type-checking code here as well - sep = trait_spec.sep if trait_spec.sep is not None else ' ' + sep = trait_spec.sep if trait_spec.sep is not None else " " - if argstr.endswith('...'): + if argstr.endswith("..."): # repeatable option # --id %d... will expand to # --id 1 --id 2 --id 3 etc.,. - argstr = argstr.replace('...', '') + argstr = argstr.replace("...", "") return sep.join([argstr % elt for elt in value]) else: return argstr % sep.join(str(elt) for elt in value) @@ -807,13 +843,16 @@ def _filename_from_source(self, name, chain=None): return retval # Do not generate filename when excluded by other inputs - if any(isdefined(getattr(self.inputs, field)) - for field in trait_spec.xor or ()): + if any( + isdefined(getattr(self.inputs, field)) for field in trait_spec.xor or () + ): return retval # Do not generate filename when required fields are missing - if not all(isdefined(getattr(self.inputs, field)) - for field in trait_spec.requires or ()): + if not all( + isdefined(getattr(self.inputs, field)) + for field in trait_spec.requires or () + ): return retval if isdefined(retval) and "%s" in retval: @@ -826,15 +865,14 @@ def _filename_from_source(self, name, chain=None): ns = trait_spec.name_source while isinstance(ns, (list, tuple)): if len(ns) > 1: - iflogger.warning( - 'Only one name_source per trait is allowed') + iflogger.warning("Only one name_source per trait is allowed") ns = ns[0] if not isinstance(ns, (str, bytes)): raise ValueError( - 'name_source of \'{}\' trait should be an input trait ' - 'name, but a type {} object was found'.format( - name, type(ns))) + "name_source of '{}' trait should be an input trait " + "name, but a type {} object was found".format(name, type(ns)) + ) if isdefined(getattr(self.inputs, ns)): name_source = ns @@ -849,8 +887,7 @@ def _filename_from_source(self, name, chain=None): base = source else: if name in chain: - raise NipypeInterfaceError( - 'Mutually pointing name_sources') + raise NipypeInterfaceError("Mutually pointing name_sources") chain.append(name) base = self._filename_from_source(ns, chain) @@ -938,7 +975,7 @@ class StdOutCommandLine(CommandLine): input_spec = StdOutCommandLineInputSpec def _gen_filename(self, name): - return self._gen_outfilename() if name == 'out_file' else None + return self._gen_outfilename() if name == "out_file" else None def _gen_outfilename(self): raise NotImplementedError @@ -961,6 +998,7 @@ class MpiCommandLine(CommandLine): >>> mpi_cli.cmdline 'mpiexec -n 8 my_mpi_prog -v' """ + input_spec = MpiCommandLineInputSpec @property @@ -968,11 +1006,11 @@ def cmdline(self): """Adds 'mpiexec' to begining of command""" result = [] if self.inputs.use_mpi: - result.append('mpiexec') + result.append("mpiexec") if self.inputs.n_procs: - result.append('-n %d' % self.inputs.n_procs) + result.append("-n %d" % self.inputs.n_procs) result.append(super(MpiCommandLine, self).cmdline) - return ' '.join(result) + return " ".join(result) class SEMLikeCommandLine(CommandLine): @@ -992,10 +1030,8 @@ def _outputs_from_inputs(self, outputs): for name in list(outputs.keys()): corresponding_input = getattr(self.inputs, name) if isdefined(corresponding_input): - if (isinstance(corresponding_input, bool) - and corresponding_input): - outputs[name] = \ - os.path.abspath(self._outputs_filenames[name]) + if isinstance(corresponding_input, bool) and corresponding_input: + outputs[name] = os.path.abspath(self._outputs_filenames[name]) else: if isinstance(corresponding_input, list): outputs[name] = [ @@ -1023,18 +1059,23 @@ def __init__(self, check_import=True, *args, **kwargs): super(LibraryBaseInterface, self).__init__(*args, **kwargs) if check_import: import pkgutil + failed_imports = [] for pkg in (self._pkg,) + tuple(self.imports): if pkgutil.find_loader(pkg) is None: failed_imports.append(pkg) if failed_imports: - iflogger.warning('Unable to import %s; %s interface may fail to ' - 'run', failed_imports, self.__class__.__name__) + iflogger.warning( + "Unable to import %s; %s interface may fail to " "run", + failed_imports, + self.__class__.__name__, + ) @property def version(self): if self._version is None: import importlib + try: self._version = importlib.import_module(self._pkg).__version__ except (ImportError, AttributeError): @@ -1055,14 +1096,15 @@ def version(klass): clout = CommandLine( command=klass.version_cmd, resource_monitor=False, - terminal_output='allatonce').run() + terminal_output="allatonce", + ).run() except IOError: return None raw_info = clout.runtime.stdout elif klass.version_file is not None: try: - with open(klass.version_file, 'rt') as fobj: + with open(klass.version_file, "rt") as fobj: raw_info = fobj.read() except OSError: return None diff --git a/nipype/interfaces/base/specs.py b/nipype/interfaces/base/specs.py index 4c9b36cc26..579f97def8 100644 --- a/nipype/interfaces/base/specs.py +++ b/nipype/interfaces/base/specs.py @@ -31,7 +31,7 @@ from ... import config, __version__ -FLOAT_FORMAT = '{:.10f}'.format +FLOAT_FORMAT = "{:.10f}".format nipype_version = Version(__version__) @@ -55,6 +55,7 @@ class BaseTraitedSpec(traits.HasTraits): XXX Reconsider this in the long run, but it seems like the best solution to move forward on the refactoring. """ + package_version = nipype_version def __init__(self, **kwargs): @@ -83,8 +84,8 @@ def __repr__(self): """ Return a well-formatted representation of the traits """ outstr = [] for name, value in sorted(self.trait_get().items()): - outstr.append('%s = %s' % (name, value)) - return '\n{}\n'.format('\n'.join(outstr)) + outstr.append("%s = %s" % (name, value)) + return "\n{}\n".format("\n".join(outstr)) def _generate_handlers(self): """Find all traits with the 'xor' metadata and attach an event @@ -111,11 +112,12 @@ def _xor_warn(self, obj, name, old, new): continue if isdefined(getattr(self, trait_name)): self.trait_set( - trait_change_notify=False, **{ - '%s' % name: Undefined - }) - msg = ('Input "%s" is mutually exclusive with input "%s", ' - 'which is already set') % (name, trait_name) + trait_change_notify=False, **{"%s" % name: Undefined} + ) + msg = ( + 'Input "%s" is mutually exclusive with input "%s", ' + "which is already set" + ) % (name, trait_name) raise IOError(msg) def _deprecated_warn(self, obj, name, old, new): @@ -123,32 +125,37 @@ def _deprecated_warn(self, obj, name, old, new): """ if isdefined(new): trait_spec = self.traits()[name] - msg1 = ('Input %s in interface %s is deprecated.' % - (name, self.__class__.__name__.split('InputSpec')[0])) - msg2 = ('Will be removed or raise an error as of release %s' % - trait_spec.deprecated) + msg1 = "Input %s in interface %s is deprecated." % ( + name, + self.__class__.__name__.split("InputSpec")[0], + ) + msg2 = ( + "Will be removed or raise an error as of release %s" + % trait_spec.deprecated + ) if trait_spec.new_name: if trait_spec.new_name not in self.copyable_trait_names(): - raise TraitError(msg1 + ' Replacement trait %s not found' % - trait_spec.new_name) - msg3 = 'It has been replaced by %s.' % trait_spec.new_name + raise TraitError( + msg1 + " Replacement trait %s not found" % trait_spec.new_name + ) + msg3 = "It has been replaced by %s." % trait_spec.new_name else: - msg3 = '' - msg = ' '.join((msg1, msg2, msg3)) + msg3 = "" + msg = " ".join((msg1, msg2, msg3)) if Version(str(trait_spec.deprecated)) < self.package_version: raise TraitError(msg) else: if trait_spec.new_name: - msg += 'Unsetting old value %s; setting new value %s.' % ( - name, trait_spec.new_name) + msg += "Unsetting old value %s; setting new value %s." % ( + name, + trait_spec.new_name, + ) warn(msg) if trait_spec.new_name: self.trait_set( trait_change_notify=False, - **{ - '%s' % name: Undefined, - '%s' % trait_spec.new_name: new - }) + **{"%s" % name: Undefined, "%s" % trait_spec.new_name: new} + ) def trait_get(self, **kwargs): """ Returns traited class as a dict @@ -184,8 +191,11 @@ def _clean_container(self, objekt, undefinedval=None, skipundefined=False): else: if not skipundefined: out[key] = undefinedval - elif (isinstance(objekt, TraitListObject) or isinstance(objekt, list) or - isinstance(objekt, tuple)): + elif ( + isinstance(objekt, TraitListObject) + or isinstance(objekt, list) + or isinstance(objekt, tuple) + ): out = [] for val in objekt: if isdefined(val): @@ -211,8 +221,7 @@ def has_metadata(self, name, metadata, value=None, recursive=True): Return has_metadata for the requested trait name in this interface """ - return has_metadata( - self.trait(name).trait_type, metadata, value, recursive) + return has_metadata(self.trait(name).trait_type, metadata, value, recursive) def get_hashval(self, hash_method=None): """Return a dictionary of our items with hashes for each file. @@ -241,36 +250,45 @@ def get_hashval(self, hash_method=None): # skip undefined traits and traits with nohash=True continue - hash_files = (not self.has_metadata(name, "hash_files", False) and - not self.has_metadata(name, "name_source")) - list_nofilename.append((name, - self._get_sorteddict( - val, - hash_method=hash_method, - hash_files=hash_files))) - list_withhash.append((name, - self._get_sorteddict( - val, - True, - hash_method=hash_method, - hash_files=hash_files))) + hash_files = not self.has_metadata( + name, "hash_files", False + ) and not self.has_metadata(name, "name_source") + list_nofilename.append( + ( + name, + self._get_sorteddict( + val, hash_method=hash_method, hash_files=hash_files + ), + ) + ) + list_withhash.append( + ( + name, + self._get_sorteddict( + val, True, hash_method=hash_method, hash_files=hash_files + ), + ) + ) return list_withhash, md5(str(list_nofilename).encode()).hexdigest() - def _get_sorteddict(self, - objekt, - dictwithhash=False, - hash_method=None, - hash_files=True): + def _get_sorteddict( + self, objekt, dictwithhash=False, hash_method=None, hash_files=True + ): if isinstance(objekt, dict): out = [] for key, val in sorted(objekt.items()): if isdefined(val): - out.append((key, - self._get_sorteddict( - val, - dictwithhash, - hash_method=hash_method, - hash_files=hash_files))) + out.append( + ( + key, + self._get_sorteddict( + val, + dictwithhash, + hash_method=hash_method, + hash_files=hash_files, + ), + ) + ) elif isinstance(objekt, (list, tuple)): out = [] for val in objekt: @@ -280,24 +298,28 @@ def _get_sorteddict(self, val, dictwithhash, hash_method=hash_method, - hash_files=hash_files)) + hash_files=hash_files, + ) + ) if isinstance(objekt, tuple): out = tuple(out) else: out = None if isdefined(objekt): - if (hash_files and isinstance(objekt, (str, bytes)) and - os.path.isfile(objekt)): + if ( + hash_files + and isinstance(objekt, (str, bytes)) + and os.path.isfile(objekt) + ): if hash_method is None: - hash_method = config.get('execution', 'hash_method') + hash_method = config.get("execution", "hash_method") - if hash_method.lower() == 'timestamp': + if hash_method.lower() == "timestamp": hash = hash_timestamp(objekt) - elif hash_method.lower() == 'content': + elif hash_method.lower() == "content": hash = hash_infile(objekt) else: - raise Exception( - "Unknown hash method: %s" % hash_method) + raise Exception("Unknown hash method: %s" % hash_method) if dictwithhash: out = (objekt, hash) else: @@ -344,6 +366,7 @@ class TraitedSpec(BaseTraitedSpec): This is used in 90% of the cases. """ + _ = traits.Disallow @@ -357,6 +380,7 @@ class DynamicTraitedSpec(BaseTraitedSpec): This class is a workaround for add_traits and clone_traits not functioning well together. """ + def __deepcopy__(self, memo): """ Replace the ``__deepcopy__`` member with a traits-friendly implementation. @@ -385,9 +409,10 @@ def __deepcopy__(self, memo): class CommandLineInputSpec(BaseInterfaceInputSpec): - args = Str(argstr='%s', desc='Additional parameters to the command') + args = Str(argstr="%s", desc="Additional parameters to the command") environ = traits.DictStrStr( - desc='Environment variables', usedefault=True, nohash=True) + desc="Environment variables", usedefault=True, nohash=True + ) class StdOutCommandLineInputSpec(CommandLineInputSpec): @@ -396,12 +421,13 @@ class StdOutCommandLineInputSpec(CommandLineInputSpec): class MpiCommandLineInputSpec(CommandLineInputSpec): use_mpi = traits.Bool( - False, - desc="Whether or not to run the command with mpiexec", - usedefault=True) - n_procs = traits.Int(desc="Num processors to specify to mpiexec. Do not " - "specify if this is managed externally (e.g. through " - "SGE)") + False, desc="Whether or not to run the command with mpiexec", usedefault=True + ) + n_procs = traits.Int( + desc="Num processors to specify to mpiexec. Do not " + "specify if this is managed externally (e.g. through " + "SGE)" + ) def get_filecopy_info(cls): @@ -412,7 +438,7 @@ def get_filecopy_info(cls): return None # normalize_filenames is not a classmethod, hence check first - if not isclass(cls) and hasattr(cls, 'normalize_filenames'): + if not isclass(cls) and hasattr(cls, "normalize_filenames"): cls.normalize_filenames() info = [] inputs = cls.input_spec() if isclass(cls) else cls.inputs diff --git a/nipype/interfaces/base/support.py b/nipype/interfaces/base/support.py index 18d2471c48..e3e1a229f6 100644 --- a/nipype/interfaces/base/support.py +++ b/nipype/interfaces/base/support.py @@ -15,10 +15,12 @@ from ... import logging from ...utils.misc import is_container from ...utils.filemanip import md5, hash_infile -iflogger = logging.getLogger('nipype.interface') + +iflogger = logging.getLogger("nipype.interface") HELP_LINEWIDTH = 70 + class NipypeInterfaceError(Exception): """Custom error for interfaces""" @@ -26,7 +28,7 @@ def __init__(self, value): self.value = value def __str__(self): - return '{}'.format(self.value) + return "{}".format(self.value) class Bunch(object): @@ -70,7 +72,7 @@ def items(self): def iteritems(self): """iterates over bunch attributes as key, value pairs""" - iflogger.warning('iteritems is deprecated, use items instead') + iflogger.warning("iteritems is deprecated, use items instead") return list(self.items()) def get(self, *args): @@ -95,22 +97,22 @@ def __repr__(self): needs setting or not. Till that mechanism changes, only alter this after careful consideration. """ - outstr = ['Bunch('] + outstr = ["Bunch("] first = True for k, v in sorted(self.items()): if not first: - outstr.append(', ') + outstr.append(", ") if isinstance(v, dict): pairs = [] for key, value in sorted(v.items()): pairs.append("'%s': %s" % (key, value)) - v = '{' + ', '.join(pairs) + '}' - outstr.append('%s=%s' % (k, v)) + v = "{" + ", ".join(pairs) + "}" + outstr.append("%s=%s" % (k, v)) else: - outstr.append('%s=%r' % (k, v)) + outstr.append("%s=%r" % (k, v)) first = False - outstr.append(')') - return ''.join(outstr) + outstr.append(")") + return "".join(outstr) def _get_bunch_hash(self): """Return a dictionary of our items with hashes for each file. @@ -143,7 +145,7 @@ def _get_bunch_hash(self): item = None else: if len(val) == 0: - raise AttributeError('%s attribute is empty' % key) + raise AttributeError("%s attribute is empty" % key) item = val[0] else: item = val @@ -167,18 +169,18 @@ def _get_bunch_hash(self): def _repr_pretty_(self, p, cycle): """Support for the pretty module from ipython.externals""" if cycle: - p.text('Bunch(...)') + p.text("Bunch(...)") else: - p.begin_group(6, 'Bunch(') + p.begin_group(6, "Bunch(") first = True for k, v in sorted(self.items()): if not first: - p.text(',') + p.text(",") p.breakable() - p.text(k + '=') + p.text(k + "=") p.pretty(v) first = False - p.end_group(6, ')') + p.end_group(6, ")") def _hash_bunch_dict(adict, key): @@ -216,12 +218,7 @@ class InterfaceResult(object): """ - def __init__(self, - interface, - runtime, - inputs=None, - outputs=None, - provenance=None): + def __init__(self, interface, runtime, inputs=None, outputs=None, provenance=None): self._version = 2.0 self.interface = interface self.runtime = runtime @@ -252,18 +249,20 @@ def format_help(cls): from ...utils.misc import trim docstring = [] - cmd = getattr(cls, '_cmd', None) + cmd = getattr(cls, "_cmd", None) if cmd: - docstring += ['Wraps the executable command ``%s``.' % cmd, ''] + docstring += ["Wraps the executable command ``%s``." % cmd, ""] if cls.__doc__: - docstring += trim(cls.__doc__).split('\n') + [''] - - allhelp = '\n'.join( - docstring + - _inputs_help(cls) + [''] + - _outputs_help(cls) + [''] + - _refs_help(cls) + docstring += trim(cls.__doc__).split("\n") + [""] + + allhelp = "\n".join( + docstring + + _inputs_help(cls) + + [""] + + _outputs_help(cls) + + [""] + + _refs_help(cls) ) return allhelp.expandtabs(8) @@ -277,7 +276,7 @@ def _inputs_help(cls): ['Inputs::', '', '\t[Mandatory]', '\tin_file: (a pathlike object or string... """ - helpstr = ['Inputs::'] + helpstr = ["Inputs::"] mandatory_keys = [] optional_items = [] @@ -285,19 +284,21 @@ def _inputs_help(cls): inputs = cls.input_spec() mandatory_items = list(inputs.traits(mandatory=True).items()) if mandatory_items: - helpstr += ['', '\t[Mandatory]'] + helpstr += ["", "\t[Mandatory]"] for name, spec in mandatory_items: helpstr += get_trait_desc(inputs, name, spec) mandatory_keys = {item[0] for item in mandatory_items} - optional_items = ['\n'.join(get_trait_desc(inputs, name, val)) - for name, val in inputs.traits(transient=None).items() - if name not in mandatory_keys] + optional_items = [ + "\n".join(get_trait_desc(inputs, name, val)) + for name, val in inputs.traits(transient=None).items() + if name not in mandatory_keys + ] if optional_items: - helpstr += ['', '\t[Optional]'] + optional_items + helpstr += ["", "\t[Optional]"] + optional_items if not mandatory_keys and not optional_items: - helpstr += ['', '\tNone'] + helpstr += ["", "\tNone"] return helpstr @@ -310,12 +311,13 @@ def _outputs_help(cls): ['Outputs::', '', '\tout: (a float)\n\t\tglobal correlation value'] """ - helpstr = ['Outputs::', '', '\tNone'] + helpstr = ["Outputs::", "", "\tNone"] if cls.output_spec: outputs = cls.output_spec() outhelpstr = [ - '\n'.join(get_trait_desc(outputs, name, spec)) - for name, spec in outputs.traits(transient=None).items()] + "\n".join(get_trait_desc(outputs, name, spec)) + for name, spec in outputs.traits(transient=None).items() + ] if outhelpstr: helpstr = helpstr[:-1] + outhelpstr return helpstr @@ -323,13 +325,13 @@ def _outputs_help(cls): def _refs_help(cls): """Prints interface references.""" - references = getattr(cls, 'references_', None) + references = getattr(cls, "references_", None) if not references: return [] - helpstr = ['References:', '-----------'] + helpstr = ["References:", "-----------"] for r in references: - helpstr += ['{}'.format(r['entry'])] + helpstr += ["{}".format(r["entry"])] return helpstr @@ -341,59 +343,62 @@ def get_trait_desc(inputs, name, spec): requires = spec.requires argstr = spec.argstr - manhelpstr = ['\t%s' % name] + manhelpstr = ["\t%s" % name] type_info = spec.full_info(inputs, name, None) - default = '' + default = "" if spec.usedefault: - default = ', nipype default value: %s' % str( - spec.default_value()[1]) + default = ", nipype default value: %s" % str(spec.default_value()[1]) line = "(%s%s)" % (type_info, default) manhelpstr = wrap( line, HELP_LINEWIDTH, - initial_indent=manhelpstr[0] + ': ', - subsequent_indent='\t\t ') + initial_indent=manhelpstr[0] + ": ", + subsequent_indent="\t\t ", + ) if desc: - for line in desc.split('\n'): + for line in desc.split("\n"): line = re.sub(r"\s+", " ", line) manhelpstr += wrap( - line, HELP_LINEWIDTH, - initial_indent='\t\t', - subsequent_indent='\t\t') + line, HELP_LINEWIDTH, initial_indent="\t\t", subsequent_indent="\t\t" + ) if argstr: pos = spec.position if pos is not None: manhelpstr += wrap( - 'argument: ``%s``, position: %s' % (argstr, pos), + "argument: ``%s``, position: %s" % (argstr, pos), HELP_LINEWIDTH, - initial_indent='\t\t', - subsequent_indent='\t\t') + initial_indent="\t\t", + subsequent_indent="\t\t", + ) else: manhelpstr += wrap( - 'argument: ``%s``' % argstr, + "argument: ``%s``" % argstr, HELP_LINEWIDTH, - initial_indent='\t\t', - subsequent_indent='\t\t') + initial_indent="\t\t", + subsequent_indent="\t\t", + ) if xor: - line = '%s' % ', '.join(xor) + line = "%s" % ", ".join(xor) manhelpstr += wrap( line, HELP_LINEWIDTH, - initial_indent='\t\tmutually_exclusive: ', - subsequent_indent='\t\t ') + initial_indent="\t\tmutually_exclusive: ", + subsequent_indent="\t\t ", + ) if requires: others = [field for field in requires if field != name] - line = '%s' % ', '.join(others) + line = "%s" % ", ".join(others) manhelpstr += wrap( line, HELP_LINEWIDTH, - initial_indent='\t\trequires: ', - subsequent_indent='\t\t ') + initial_indent="\t\trequires: ", + subsequent_indent="\t\t ", + ) return manhelpstr diff --git a/nipype/interfaces/base/tests/test_auto_CommandLine.py b/nipype/interfaces/base/tests/test_auto_CommandLine.py index 22d7406921..b003543a3e 100644 --- a/nipype/interfaces/base/tests/test_auto_CommandLine.py +++ b/nipype/interfaces/base/tests/test_auto_CommandLine.py @@ -4,11 +4,7 @@ def test_CommandLine_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), environ=dict(nohash=True, usedefault=True,), ) inputs = CommandLine.input_spec() diff --git a/nipype/interfaces/base/tests/test_auto_MpiCommandLine.py b/nipype/interfaces/base/tests/test_auto_MpiCommandLine.py index 4084a19945..7ab181458f 100644 --- a/nipype/interfaces/base/tests/test_auto_MpiCommandLine.py +++ b/nipype/interfaces/base/tests/test_auto_MpiCommandLine.py @@ -4,13 +4,10 @@ def test_MpiCommandLine_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), n_procs=dict(), - use_mpi=dict(usedefault=True, ), + use_mpi=dict(usedefault=True,), ) inputs = MpiCommandLine.input_spec() diff --git a/nipype/interfaces/base/tests/test_auto_SEMLikeCommandLine.py b/nipype/interfaces/base/tests/test_auto_SEMLikeCommandLine.py index 18e788f3a4..e17eed4db1 100644 --- a/nipype/interfaces/base/tests/test_auto_SEMLikeCommandLine.py +++ b/nipype/interfaces/base/tests/test_auto_SEMLikeCommandLine.py @@ -4,11 +4,7 @@ def test_SEMLikeCommandLine_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), environ=dict(nohash=True, usedefault=True,), ) inputs = SEMLikeCommandLine.input_spec() diff --git a/nipype/interfaces/base/tests/test_auto_StdOutCommandLine.py b/nipype/interfaces/base/tests/test_auto_StdOutCommandLine.py index de6ef5450a..bc0771ac78 100644 --- a/nipype/interfaces/base/tests/test_auto_StdOutCommandLine.py +++ b/nipype/interfaces/base/tests/test_auto_StdOutCommandLine.py @@ -4,17 +4,9 @@ def test_StdOutCommandLine_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), ) inputs = StdOutCommandLine.input_spec() diff --git a/nipype/interfaces/base/tests/test_core.py b/nipype/interfaces/base/tests/test_core.py index a265d8f8a9..d7e2620c9b 100644 --- a/nipype/interfaces/base/tests/test_core.py +++ b/nipype/interfaces/base/tests/test_core.py @@ -53,27 +53,27 @@ def __init__(self): def test_BaseInterface(): - config.set('monitoring', 'enable', '0') + config.set("monitoring", "enable", "0") assert nib.BaseInterface.help() is None class InputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int') - goo = nib.traits.Int(desc='a random int', mandatory=True) - moo = nib.traits.Int(desc='a random int', mandatory=False) - hoo = nib.traits.Int(desc='a random int', usedefault=True) - zoo = nib.File(desc='a file', copyfile=False) - woo = nib.File(desc='a file', copyfile=True) + foo = nib.traits.Int(desc="a random int") + goo = nib.traits.Int(desc="a random int", mandatory=True) + moo = nib.traits.Int(desc="a random int", mandatory=False) + hoo = nib.traits.Int(desc="a random int", usedefault=True) + zoo = nib.File(desc="a file", copyfile=False) + woo = nib.File(desc="a file", copyfile=True) class OutputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int') + foo = nib.traits.Int(desc="a random int") class DerivedInterface(nib.BaseInterface): input_spec = InputSpec resource_monitor = False assert DerivedInterface.help() is None - assert 'moo' in ''.join(_inputs_help(DerivedInterface)) + assert "moo" in "".join(_inputs_help(DerivedInterface)) assert DerivedInterface()._outputs() is None assert DerivedInterface().inputs.foo == nib.Undefined with pytest.raises(ValueError): @@ -103,7 +103,7 @@ def _run_interface(self, runtime): def test_BaseInterface_load_save_inputs(tmpdir): - tmp_json = tmpdir.join('settings.json').strpath + tmp_json = tmpdir.join("settings.json").strpath class InputSpec(nib.TraitedSpec): input1 = nib.traits.Int() @@ -117,7 +117,7 @@ class DerivedInterface(nib.BaseInterface): def __init__(self, **inputs): super(DerivedInterface, self).__init__(**inputs) - inputs_dict = {'input1': 12, 'input3': True, 'input4': 'some string'} + inputs_dict = {"input1": 12, "input3": True, "input4": "some string"} bif = DerivedInterface(**inputs_dict) bif.save_inputs_to_json(tmp_json) bif2 = DerivedInterface() @@ -128,22 +128,22 @@ def __init__(self, **inputs): assert bif3.inputs.get_traitsfree() == inputs_dict inputs_dict2 = inputs_dict.copy() - inputs_dict2.update({'input4': 'some other string'}) - bif4 = DerivedInterface(from_file=tmp_json, input4=inputs_dict2['input4']) + inputs_dict2.update({"input4": "some other string"}) + bif4 = DerivedInterface(from_file=tmp_json, input4=inputs_dict2["input4"]) assert bif4.inputs.get_traitsfree() == inputs_dict2 - bif5 = DerivedInterface(input4=inputs_dict2['input4']) + bif5 = DerivedInterface(input4=inputs_dict2["input4"]) bif5.load_inputs_from_json(tmp_json, overwrite=False) assert bif5.inputs.get_traitsfree() == inputs_dict2 - bif6 = DerivedInterface(input4=inputs_dict2['input4']) + bif6 = DerivedInterface(input4=inputs_dict2["input4"]) bif6.load_inputs_from_json(tmp_json) assert bif6.inputs.get_traitsfree() == inputs_dict # test get hashval in a complex interface from nipype.interfaces.ants import Registration - settings = example_data( - example_data('smri_ants_registration_settings.json')) + + settings = example_data(example_data("smri_ants_registration_settings.json")) with open(settings) as setf: data_dict = json.load(setf) @@ -154,15 +154,16 @@ def __init__(self, **inputs): tsthash2 = Registration(from_file=settings) assert {} == check_dict(data_dict, tsthash2.inputs.get_traitsfree()) - _, hashvalue = tsthash.inputs.get_hashval(hash_method='timestamp') - assert '8562a5623562a871115eb14822ee8d02' == hashvalue + _, hashvalue = tsthash.inputs.get_hashval(hash_method="timestamp") + assert "8562a5623562a871115eb14822ee8d02" == hashvalue class MinVerInputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int', min_ver='0.9') + foo = nib.traits.Int(desc="a random int", min_ver="0.9") + class MaxVerInputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int', max_ver='0.7') + foo = nib.traits.Int(desc="a random int", max_ver="0.7") def test_input_version_1(): @@ -172,7 +173,7 @@ class DerivedInterface1(nib.BaseInterface): obj = DerivedInterface1() obj._check_version_requirements(obj.inputs) - config.set('execution', 'stop_on_unknown_version', True) + config.set("execution", "stop_on_unknown_version", True) with pytest.raises(ValueError) as excinfo: obj._check_version_requirements(obj.inputs) @@ -184,7 +185,7 @@ class DerivedInterface1(nib.BaseInterface): def test_input_version_2(): class DerivedInterface1(nib.BaseInterface): input_spec = MinVerInputSpec - _version = '0.8' + _version = "0.8" obj = DerivedInterface1() obj.inputs.foo = 1 @@ -196,7 +197,7 @@ class DerivedInterface1(nib.BaseInterface): def test_input_version_3(): class DerivedInterface1(nib.BaseInterface): input_spec = MinVerInputSpec - _version = '0.10' + _version = "0.10" obj = DerivedInterface1() obj._check_version_requirements(obj.inputs) @@ -205,7 +206,7 @@ class DerivedInterface1(nib.BaseInterface): def test_input_version_4(): class DerivedInterface1(nib.BaseInterface): input_spec = MinVerInputSpec - _version = '0.9' + _version = "0.9" obj = DerivedInterface1() obj.inputs.foo = 1 @@ -215,7 +216,7 @@ class DerivedInterface1(nib.BaseInterface): def test_input_version_5(): class DerivedInterface2(nib.BaseInterface): input_spec = MaxVerInputSpec - _version = '0.8' + _version = "0.8" obj = DerivedInterface2() obj.inputs.foo = 1 @@ -227,7 +228,7 @@ class DerivedInterface2(nib.BaseInterface): def test_input_version_6(): class DerivedInterface1(nib.BaseInterface): input_spec = MaxVerInputSpec - _version = '0.7' + _version = "0.7" obj = DerivedInterface1() obj.inputs.foo = 1 @@ -236,52 +237,52 @@ class DerivedInterface1(nib.BaseInterface): def test_output_version(): class InputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int') + foo = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int', min_ver='0.9') + foo = nib.traits.Int(desc="a random int", min_ver="0.9") class DerivedInterface1(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec - _version = '0.10' + _version = "0.10" resource_monitor = False obj = DerivedInterface1() assert obj._check_version_requirements(obj._outputs()) == [] class InputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int') + foo = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int', min_ver='0.11') + foo = nib.traits.Int(desc="a random int", min_ver="0.11") class DerivedInterface1(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec - _version = '0.10' + _version = "0.10" resource_monitor = False obj = DerivedInterface1() - assert obj._check_version_requirements(obj._outputs()) == ['foo'] + assert obj._check_version_requirements(obj._outputs()) == ["foo"] class InputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int') + foo = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int', min_ver='0.11') + foo = nib.traits.Int(desc="a random int", min_ver="0.11") class DerivedInterface1(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec - _version = '0.10' + _version = "0.10" resource_monitor = False def _run_interface(self, runtime): return runtime def _list_outputs(self): - return {'foo': 1} + return {"foo": 1} obj = DerivedInterface1() with pytest.raises(KeyError): @@ -291,51 +292,50 @@ def _list_outputs(self): def test_Commandline(): with pytest.raises(Exception): nib.CommandLine() - ci = nib.CommandLine(command='which') - assert ci.cmd == 'which' + ci = nib.CommandLine(command="which") + assert ci.cmd == "which" assert ci.inputs.args == nib.Undefined - ci2 = nib.CommandLine(command='which', args='ls') - assert ci2.cmdline == 'which ls' - ci3 = nib.CommandLine(command='echo') + ci2 = nib.CommandLine(command="which", args="ls") + assert ci2.cmdline == "which ls" + ci3 = nib.CommandLine(command="echo") ci3.resource_monitor = False - ci3.inputs.environ = {'MYENV': 'foo'} + ci3.inputs.environ = {"MYENV": "foo"} res = ci3.run() - assert res.runtime.environ['MYENV'] == 'foo' + assert res.runtime.environ["MYENV"] == "foo" assert res.outputs is None class CommandLineInputSpec1(nib.CommandLineInputSpec): - foo = nib.Str(argstr='%s', desc='a str') - goo = nib.traits.Bool(argstr='-g', desc='a bool', position=0) - hoo = nib.traits.List(argstr='-l %s', desc='a list') - moo = nib.traits.List( - argstr='-i %d...', desc='a repeated list', position=-1) - noo = nib.traits.Int(argstr='-x %d', desc='an int') - roo = nib.traits.Str(desc='not on command line') + foo = nib.Str(argstr="%s", desc="a str") + goo = nib.traits.Bool(argstr="-g", desc="a bool", position=0) + hoo = nib.traits.List(argstr="-l %s", desc="a list") + moo = nib.traits.List(argstr="-i %d...", desc="a repeated list", position=-1) + noo = nib.traits.Int(argstr="-x %d", desc="an int") + roo = nib.traits.Str(desc="not on command line") soo = nib.traits.Bool(argstr="-soo") nib.CommandLine.input_spec = CommandLineInputSpec1 - ci4 = nib.CommandLine(command='cmd') - ci4.inputs.foo = 'foo' + ci4 = nib.CommandLine(command="cmd") + ci4.inputs.foo = "foo" ci4.inputs.goo = True - ci4.inputs.hoo = ['a', 'b'] + ci4.inputs.hoo = ["a", "b"] ci4.inputs.moo = [1, 2, 3] ci4.inputs.noo = 0 - ci4.inputs.roo = 'hello' + ci4.inputs.roo = "hello" ci4.inputs.soo = False cmd = ci4._parse_inputs() - assert cmd[0] == '-g' - assert cmd[-1] == '-i 1 -i 2 -i 3' - assert 'hello' not in ' '.join(cmd) - assert '-soo' not in ' '.join(cmd) + assert cmd[0] == "-g" + assert cmd[-1] == "-i 1 -i 2 -i 3" + assert "hello" not in " ".join(cmd) + assert "-soo" not in " ".join(cmd) ci4.inputs.soo = True cmd = ci4._parse_inputs() - assert '-soo' in ' '.join(cmd) + assert "-soo" in " ".join(cmd) class CommandLineInputSpec2(nib.CommandLineInputSpec): - foo = nib.File(argstr='%s', desc='a str', genfile=True) + foo = nib.File(argstr="%s", desc="a str", genfile=True) nib.CommandLine.input_spec = CommandLineInputSpec2 - ci5 = nib.CommandLine(command='cmd') + ci5 = nib.CommandLine(command="cmd") with pytest.raises(NotImplementedError): ci5._parse_inputs() @@ -343,102 +343,106 @@ class DerivedClass(nib.CommandLine): input_spec = CommandLineInputSpec2 def _gen_filename(self, name): - return 'filename' + return "filename" - ci6 = DerivedClass(command='cmd') - assert ci6._parse_inputs()[0] == 'filename' + ci6 = DerivedClass(command="cmd") + assert ci6._parse_inputs()[0] == "filename" nib.CommandLine.input_spec = nib.CommandLineInputSpec def test_Commandline_environ(monkeypatch, tmpdir): from nipype import config + config.set_default_config() tmpdir.chdir() - monkeypatch.setitem(os.environ, 'DISPLAY', ':1') + monkeypatch.setitem(os.environ, "DISPLAY", ":1") # Test environment - ci3 = nib.CommandLine(command='echo') + ci3 = nib.CommandLine(command="echo") res = ci3.run() - assert res.runtime.environ['DISPLAY'] == ':1' + assert res.runtime.environ["DISPLAY"] == ":1" # Test display_variable option - monkeypatch.delitem(os.environ, 'DISPLAY', raising=False) - config.set('execution', 'display_variable', ':3') + monkeypatch.delitem(os.environ, "DISPLAY", raising=False) + config.set("execution", "display_variable", ":3") res = ci3.run() - assert 'DISPLAY' not in ci3.inputs.environ - assert 'DISPLAY' not in res.runtime.environ + assert "DISPLAY" not in ci3.inputs.environ + assert "DISPLAY" not in res.runtime.environ # If the interface has _redirect_x then yes, it should be set ci3._redirect_x = True res = ci3.run() - assert res.runtime.environ['DISPLAY'] == ':3' + assert res.runtime.environ["DISPLAY"] == ":3" # Test overwrite - monkeypatch.setitem(os.environ, 'DISPLAY', ':1') - ci3.inputs.environ = {'DISPLAY': ':2'} + monkeypatch.setitem(os.environ, "DISPLAY", ":1") + ci3.inputs.environ = {"DISPLAY": ":2"} res = ci3.run() - assert res.runtime.environ['DISPLAY'] == ':2' + assert res.runtime.environ["DISPLAY"] == ":2" def test_CommandLine_output(tmpdir): # Create one file tmpdir.chdir() - file = tmpdir.join('foo.txt') - file.write('123456\n') + file = tmpdir.join("foo.txt") + file.write("123456\n") name = os.path.basename(file.strpath) - ci = nib.CommandLine(command='ls -l') - ci.terminal_output = 'allatonce' + ci = nib.CommandLine(command="ls -l") + ci.terminal_output = "allatonce" res = ci.run() - assert res.runtime.merged == '' + assert res.runtime.merged == "" assert name in res.runtime.stdout # Check stdout is written - ci = nib.CommandLine(command='ls -l') - ci.terminal_output = 'file_stdout' + ci = nib.CommandLine(command="ls -l") + ci.terminal_output = "file_stdout" res = ci.run() - assert os.path.isfile('stdout.nipype') + assert os.path.isfile("stdout.nipype") assert name in res.runtime.stdout - tmpdir.join('stdout.nipype').remove(ignore_errors=True) + tmpdir.join("stdout.nipype").remove(ignore_errors=True) # Check stderr is written - ci = nib.CommandLine(command='ls -l') - ci.terminal_output = 'file_stderr' + ci = nib.CommandLine(command="ls -l") + ci.terminal_output = "file_stderr" res = ci.run() - assert os.path.isfile('stderr.nipype') - tmpdir.join('stderr.nipype').remove(ignore_errors=True) + assert os.path.isfile("stderr.nipype") + tmpdir.join("stderr.nipype").remove(ignore_errors=True) # Check outputs are thrown away - ci = nib.CommandLine(command='ls -l') - ci.terminal_output = 'none' + ci = nib.CommandLine(command="ls -l") + ci.terminal_output = "none" res = ci.run() - assert res.runtime.stdout == '' and \ - res.runtime.stderr == '' and \ - res.runtime.merged == '' + assert ( + res.runtime.stdout == "" + and res.runtime.stderr == "" + and res.runtime.merged == "" + ) # Check that new interfaces are set to default 'stream' - ci = nib.CommandLine(command='ls -l') + ci = nib.CommandLine(command="ls -l") res = ci.run() - assert ci.terminal_output == 'stream' - assert name in res.runtime.stdout and \ - res.runtime.stderr == '' + assert ci.terminal_output == "stream" + assert name in res.runtime.stdout and res.runtime.stderr == "" # Check only one file is generated - ci = nib.CommandLine(command='ls -l') - ci.terminal_output = 'file' + ci = nib.CommandLine(command="ls -l") + ci.terminal_output = "file" res = ci.run() - assert os.path.isfile('output.nipype') - assert name in res.runtime.merged and \ - res.runtime.stdout == '' and \ - res.runtime.stderr == '' - tmpdir.join('output.nipype').remove(ignore_errors=True) + assert os.path.isfile("output.nipype") + assert ( + name in res.runtime.merged + and res.runtime.stdout == "" + and res.runtime.stderr == "" + ) + tmpdir.join("output.nipype").remove(ignore_errors=True) # Check split files are generated - ci = nib.CommandLine(command='ls -l') - ci.terminal_output = 'file_split' + ci = nib.CommandLine(command="ls -l") + ci.terminal_output = "file_split" res = ci.run() - assert os.path.isfile('stdout.nipype') - assert os.path.isfile('stderr.nipype') + assert os.path.isfile("stdout.nipype") + assert os.path.isfile("stderr.nipype") assert name in res.runtime.stdout @@ -446,34 +450,34 @@ def test_global_CommandLine_output(tmpdir): """Ensures CommandLine.set_default_terminal_output works""" from nipype.interfaces.fsl import BET - ci = nib.CommandLine(command='ls -l') - assert ci.terminal_output == 'stream' # default case + ci = nib.CommandLine(command="ls -l") + assert ci.terminal_output == "stream" # default case ci = BET() - assert ci.terminal_output == 'stream' # default case + assert ci.terminal_output == "stream" # default case - nib.CommandLine.set_default_terminal_output('allatonce') - ci = nib.CommandLine(command='ls -l') - assert ci.terminal_output == 'allatonce' + nib.CommandLine.set_default_terminal_output("allatonce") + ci = nib.CommandLine(command="ls -l") + assert ci.terminal_output == "allatonce" - nib.CommandLine.set_default_terminal_output('file') - ci = nib.CommandLine(command='ls -l') - assert ci.terminal_output == 'file' + nib.CommandLine.set_default_terminal_output("file") + ci = nib.CommandLine(command="ls -l") + assert ci.terminal_output == "file" # Check default affects derived interfaces ci = BET() - assert ci.terminal_output == 'file' + assert ci.terminal_output == "file" def test_CommandLine_prefix(tmpdir): tmpdir.chdir() - oop = 'out/of/path' + oop = "out/of/path" os.makedirs(oop) - script_name = 'test_script.sh' + script_name = "test_script.sh" script_path = os.path.join(oop, script_name) - with open(script_path, 'w') as script_f: - script_f.write('#!/usr/bin/env bash\necho Success!') + with open(script_path, "w") as script_f: + script_f.write("#!/usr/bin/env bash\necho Success!") os.chmod(script_path, 0o755) ci = nib.CommandLine(command=script_name) @@ -481,19 +485,19 @@ def test_CommandLine_prefix(tmpdir): ci.run() class OOPCLI(nib.CommandLine): - _cmd_prefix = oop + '/' + _cmd_prefix = oop + "/" ci = OOPCLI(command=script_name) ci.run() class OOPShell(nib.CommandLine): - _cmd_prefix = 'bash {}/'.format(oop) + _cmd_prefix = "bash {}/".format(oop) ci = OOPShell(command=script_name) ci.run() class OOPBadShell(nib.CommandLine): - _cmd_prefix = 'shell_dne {}/'.format(oop) + _cmd_prefix = "shell_dne {}/".format(oop) ci = OOPBadShell(command=script_name) with pytest.raises(IOError): @@ -504,6 +508,7 @@ def test_runtime_checks(): class TestInterface(nib.BaseInterface): class input_spec(nib.TraitedSpec): a = nib.traits.Any() + class output_spec(nib.TraitedSpec): b = nib.traits.Any() @@ -516,7 +521,7 @@ def _run_interface(self, runtime): class BrokenRuntime(TestInterface): def _run_interface(self, runtime): - del runtime.__dict__['cwd'] + del runtime.__dict__["cwd"] return runtime with pytest.raises(RuntimeError): diff --git a/nipype/interfaces/base/tests/test_resource_monitor.py b/nipype/interfaces/base/tests/test_resource_monitor.py index a6c79b091b..47a515f64c 100644 --- a/nipype/interfaces/base/tests/test_resource_monitor.py +++ b/nipype/interfaces/base/tests/test_resource_monitor.py @@ -28,22 +28,26 @@ def use_resource_monitor(): class UseResourcesInputSpec(CommandLineInputSpec): mem_gb = traits.Float( - desc='Number of GB of RAM to use', argstr='-g %f', mandatory=True) + desc="Number of GB of RAM to use", argstr="-g %f", mandatory=True + ) n_procs = traits.Int( - desc='Number of threads to use', argstr='-p %d', mandatory=True) + desc="Number of threads to use", argstr="-p %d", mandatory=True + ) class UseResources(CommandLine): """ use_resources cmd interface """ + from nipype import __path__ + # Init attributes input_spec = UseResourcesInputSpec # Get path of executable exec_dir = os.path.realpath(__path__[0]) - exec_path = os.path.join(exec_dir, 'utils', 'tests', 'use_resources') + exec_path = os.path.join(exec_dir, "utils", "tests", "use_resources") # Init cmd _cmd = exec_path @@ -51,39 +55,41 @@ class UseResources(CommandLine): @pytest.mark.skip(reason="inconsistent readings") -@pytest.mark.skipif( - os.getenv('CI_SKIP_TEST', False), reason='disabled in CI tests') -@pytest.mark.parametrize("mem_gb,n_procs", [(0.5, 3), (2.2, 8), (0.8, 4), - (1.5, 1)]) +@pytest.mark.skipif(os.getenv("CI_SKIP_TEST", False), reason="disabled in CI tests") +@pytest.mark.parametrize("mem_gb,n_procs", [(0.5, 3), (2.2, 8), (0.8, 4), (1.5, 1)]) def test_cmdline_profiling(tmpdir, mem_gb, n_procs, use_resource_monitor): """ Test runtime profiler correctly records workflow RAM/CPUs consumption of a CommandLine-derived interface """ from nipype import config - config.set('monitoring', 'sample_frequency', '0.2') # Force sampling fast + + config.set("monitoring", "sample_frequency", "0.2") # Force sampling fast tmpdir.chdir() iface = UseResources(mem_gb=mem_gb, n_procs=n_procs) result = iface.run() - assert abs(mem_gb - result.runtime.mem_peak_gb - ) < 0.3, 'estimated memory error above .3GB' - assert int(result.runtime.cpu_percent / 100 + 0.2 - ) == n_procs, 'wrong number of threads estimated' + assert ( + abs(mem_gb - result.runtime.mem_peak_gb) < 0.3 + ), "estimated memory error above .3GB" + assert ( + int(result.runtime.cpu_percent / 100 + 0.2) == n_procs + ), "wrong number of threads estimated" @pytest.mark.skipif( - True, reason='test disabled temporarily, until funcion profiling works') -@pytest.mark.parametrize("mem_gb,n_procs", [(0.5, 3), (2.2, 8), (0.8, 4), - (1.5, 1)]) + True, reason="test disabled temporarily, until funcion profiling works" +) +@pytest.mark.parametrize("mem_gb,n_procs", [(0.5, 3), (2.2, 8), (0.8, 4), (1.5, 1)]) def test_function_profiling(tmpdir, mem_gb, n_procs, use_resource_monitor): """ Test runtime profiler correctly records workflow RAM/CPUs consumption of a Function interface """ from nipype import config - config.set('monitoring', 'sample_frequency', '0.2') # Force sampling fast + + config.set("monitoring", "sample_frequency", "0.2") # Force sampling fast tmpdir.chdir() iface = niu.Function(function=_use_resources) @@ -91,6 +97,7 @@ def test_function_profiling(tmpdir, mem_gb, n_procs, use_resource_monitor): iface.inputs.n_procs = n_procs result = iface.run() - assert abs(mem_gb - result.runtime.mem_peak_gb - ) < 0.3, 'estimated memory error above .3GB' + assert ( + abs(mem_gb - result.runtime.mem_peak_gb) < 0.3 + ), "estimated memory error above .3GB" assert int(result.runtime.cpu_percent / 100 + 0.2) >= n_procs diff --git a/nipype/interfaces/base/tests/test_specs.py b/nipype/interfaces/base/tests/test_specs.py index f1721fa226..d94f97ed1b 100644 --- a/nipype/interfaces/base/tests/test_specs.py +++ b/nipype/interfaces/base/tests/test_specs.py @@ -17,10 +17,10 @@ @pytest.fixture(scope="module") def setup_file(request, tmpdir_factory): - tmp_dir = tmpdir_factory.mktemp('files') - tmp_infile = tmp_dir.join('foo.txt') - with tmp_infile.open('w') as fp: - fp.writelines(['123456789']) + tmp_dir = tmpdir_factory.mktemp("files") + tmp_infile = tmp_dir.join("foo.txt") + with tmp_infile.open("w") as fp: + fp.writelines(["123456789"]) tmp_dir.chdir() @@ -29,7 +29,7 @@ def setup_file(request, tmpdir_factory): def test_TraitedSpec(): assert nib.TraitedSpec().get_hashval() - assert nib.TraitedSpec().__repr__() == '\n\n' + assert nib.TraitedSpec().__repr__() == "\n\n" class spec(nib.TraitedSpec): foo = nib.traits.Int @@ -41,14 +41,16 @@ class spec(nib.TraitedSpec): with pytest.raises(nib.traits.TraitError): specfunc(1) infields = spec(foo=1) - hashval = ([('foo', 1), ('goo', '0.0000000000')], - 'e89433b8c9141aa0fda2f8f4d662c047') + hashval = ( + [("foo", 1), ("goo", "0.0000000000")], + "e89433b8c9141aa0fda2f8f4d662c047", + ) assert infields.get_hashval() == hashval - assert infields.__repr__() == '\nfoo = 1\ngoo = 0.0\n' + assert infields.__repr__() == "\nfoo = 1\ngoo = 0.0\n" def test_TraitedSpec_tab_completion(): - bet_nd = Node(fsl.BET(), name='bet') + bet_nd = Node(fsl.BET(), name="bet") bet_interface = fsl.BET() bet_inputs = bet_nd.inputs.class_editable_traits() bet_outputs = bet_nd.outputs.class_editable_traits() @@ -64,15 +66,16 @@ def test_TraitedSpec_tab_completion(): @pytest.mark.skip def test_TraitedSpec_dynamic(): from pickle import dumps, loads + a = nib.BaseTraitedSpec() - a.add_trait('foo', nib.traits.Int) + a.add_trait("foo", nib.traits.Int) a.foo = 1 - assign_a = lambda: setattr(a, 'foo', 'a') + assign_a = lambda: setattr(a, "foo", "a") with pytest.raises(Exception): assign_a pkld_a = dumps(a) unpkld_a = loads(pkld_a) - assign_a_again = lambda: setattr(unpkld_a, 'foo', 'a') + assign_a_again = lambda: setattr(unpkld_a, "foo", "a") with pytest.raises(Exception): assign_a_again @@ -82,39 +85,43 @@ def extract_func(list_out): return list_out[0] # Define interface - func_interface = Function(input_names=["list_out"], - output_names=["out_file", "another_file"], - function=extract_func) + func_interface = Function( + input_names=["list_out"], + output_names=["out_file", "another_file"], + function=extract_func, + ) # Define node - list_extract = Node(Function( - input_names=["list_out"], output_names=["out_file"], - function=extract_func), name="list_extract") + list_extract = Node( + Function( + input_names=["list_out"], output_names=["out_file"], function=extract_func + ), + name="list_extract", + ) # Check __all__ for interface inputs expected_input = set(list_extract.inputs.editable_traits()) - assert(set(func_interface.inputs.__all__) == expected_input) + assert set(func_interface.inputs.__all__) == expected_input # Check __all__ for node inputs - assert(set(list_extract.inputs.__all__) == expected_input) + assert set(list_extract.inputs.__all__) == expected_input # Check __all__ for node outputs expected_output = set(list_extract.outputs.editable_traits()) - assert(set(list_extract.outputs.__all__) == expected_output) + assert set(list_extract.outputs.__all__) == expected_output # Add trait and retest - list_extract._interface._output_names.append('added_out_trait') - expected_output.add('added_out_trait') - assert(set(list_extract.outputs.__all__) == expected_output) + list_extract._interface._output_names.append("added_out_trait") + expected_output.add("added_out_trait") + assert set(list_extract.outputs.__all__) == expected_output def test_TraitedSpec_logic(): class spec3(nib.TraitedSpec): - _xor_inputs = ('foo', 'bar') + _xor_inputs = ("foo", "bar") - foo = nib.traits.Int(xor=_xor_inputs, desc='foo or bar, not both') - bar = nib.traits.Int(xor=_xor_inputs, desc='bar or foo, not both') - kung = nib.traits.Float( - requires=('foo', ), position=0, desc='kung foo') + foo = nib.traits.Int(xor=_xor_inputs, desc="foo or bar, not both") + bar = nib.traits.Int(xor=_xor_inputs, desc="bar or foo, not both") + kung = nib.traits.Float(requires=("foo",), position=0, desc="kung foo") class out3(nib.TraitedSpec): output = nib.traits.Int @@ -129,7 +136,7 @@ class MyInterface(nib.BaseInterface): # setattr(myif.inputs, 'kung', 10.0) myif.inputs.foo = 1 assert myif.inputs.foo == 1 - set_bar = lambda: setattr(myif.inputs, 'bar', 1) + set_bar = lambda: setattr(myif.inputs, "bar", 1) with pytest.raises(IOError): set_bar() assert myif.inputs.foo == 1 @@ -139,34 +146,34 @@ class MyInterface(nib.BaseInterface): def test_deprecation(): with warnings.catch_warnings(record=True) as w: - warnings.filterwarnings('always', '', UserWarning) + warnings.filterwarnings("always", "", UserWarning) class DeprecationSpec1(nib.TraitedSpec): - foo = nib.traits.Int(deprecated='0.1') + foo = nib.traits.Int(deprecated="0.1") spec_instance = DeprecationSpec1() - set_foo = lambda: setattr(spec_instance, 'foo', 1) + set_foo = lambda: setattr(spec_instance, "foo", 1) with pytest.raises(nib.TraitError): set_foo() - assert len(w) == 0, 'no warnings, just errors' + assert len(w) == 0, "no warnings, just errors" with warnings.catch_warnings(record=True) as w: - warnings.filterwarnings('always', '', UserWarning) + warnings.filterwarnings("always", "", UserWarning) class DeprecationSpec2(nib.TraitedSpec): - foo = nib.traits.Int(deprecated='100', new_name='bar') + foo = nib.traits.Int(deprecated="100", new_name="bar") spec_instance = DeprecationSpec2() - set_foo = lambda: setattr(spec_instance, 'foo', 1) + set_foo = lambda: setattr(spec_instance, "foo", 1) with pytest.raises(nib.TraitError): set_foo() - assert len(w) == 0, 'no warnings, just errors' + assert len(w) == 0, "no warnings, just errors" with warnings.catch_warnings(record=True) as w: - warnings.filterwarnings('always', '', UserWarning) + warnings.filterwarnings("always", "", UserWarning) class DeprecationSpec3(nib.TraitedSpec): - foo = nib.traits.Int(deprecated='1000', new_name='bar') + foo = nib.traits.Int(deprecated="1000", new_name="bar") bar = nib.traits.Int() spec_instance = DeprecationSpec3() @@ -176,14 +183,13 @@ class DeprecationSpec3(nib.TraitedSpec): except nib.TraitError: not_raised = False assert not_raised - assert len( - w) == 1, 'deprecated warning 1 %s' % [w1.message for w1 in w] + assert len(w) == 1, "deprecated warning 1 %s" % [w1.message for w1 in w] with warnings.catch_warnings(record=True) as w: - warnings.filterwarnings('always', '', UserWarning) + warnings.filterwarnings("always", "", UserWarning) class DeprecationSpec3(nib.TraitedSpec): - foo = nib.traits.Int(deprecated='1000', new_name='bar') + foo = nib.traits.Int(deprecated="1000", new_name="bar") bar = nib.traits.Int() spec_instance = DeprecationSpec3() @@ -195,8 +201,7 @@ class DeprecationSpec3(nib.TraitedSpec): assert not_raised assert spec_instance.foo == Undefined assert spec_instance.bar == 1 - assert len( - w) == 1, 'deprecated warning 2 %s' % [w1.message for w1 in w] + assert len(w) == 1, "deprecated warning 2 %s" % [w1.message for w1 in w] def test_namesource(setup_file): @@ -204,12 +209,10 @@ def test_namesource(setup_file): tmpd, nme, ext = split_filename(tmp_infile) class spec2(nib.CommandLineInputSpec): - moo = nib.File( - name_source=['doo'], hash_files=False, argstr="%s", position=2) + moo = nib.File(name_source=["doo"], hash_files=False, argstr="%s", position=2) doo = nib.File(exists=True, argstr="%s", position=1) goo = traits.Int(argstr="%d", position=4) - poo = nib.File( - name_source=['goo'], hash_files=False, argstr="%s", position=3) + poo = nib.File(name_source=["goo"], hash_files=False, argstr="%s", position=3) class TestName(nib.CommandLine): _cmd = "mycommand" @@ -218,10 +221,10 @@ class TestName(nib.CommandLine): testobj = TestName() testobj.inputs.doo = tmp_infile testobj.inputs.goo = 99 - assert '%s_generated' % nme in testobj.cmdline - assert '%d_generated' % testobj.inputs.goo in testobj.cmdline + assert "%s_generated" % nme in testobj.cmdline + assert "%d_generated" % testobj.inputs.goo in testobj.cmdline testobj.inputs.moo = "my_%s_template" - assert 'my_%s_template' % nme in testobj.cmdline + assert "my_%s_template" % nme in testobj.cmdline def test_chained_namesource(setup_file): @@ -231,13 +234,13 @@ def test_chained_namesource(setup_file): class spec2(nib.CommandLineInputSpec): doo = nib.File(exists=True, argstr="%s", position=1) moo = nib.File( - name_source=['doo'], + name_source=["doo"], hash_files=False, argstr="%s", position=2, - name_template='%s_mootpl') - poo = nib.File( - name_source=['moo'], hash_files=False, argstr="%s", position=3) + name_template="%s_mootpl", + ) + poo = nib.File(name_source=["moo"], hash_files=False, argstr="%s", position=3) class TestName(nib.CommandLine): _cmd = "mycommand" @@ -246,9 +249,9 @@ class TestName(nib.CommandLine): testobj = TestName() testobj.inputs.doo = tmp_infile res = testobj.cmdline - assert '%s' % tmp_infile in res - assert '%s_mootpl ' % nme in res - assert '%s_mootpl_generated' % nme in res + assert "%s" % tmp_infile in res + assert "%s_mootpl " % nme in res + assert "%s_mootpl_generated" % nme in res def test_cycle_namesource1(setup_file): @@ -257,15 +260,14 @@ def test_cycle_namesource1(setup_file): class spec3(nib.CommandLineInputSpec): moo = nib.File( - name_source=['doo'], + name_source=["doo"], hash_files=False, argstr="%s", position=1, - name_template='%s_mootpl') - poo = nib.File( - name_source=['moo'], hash_files=False, argstr="%s", position=2) - doo = nib.File( - name_source=['poo'], hash_files=False, argstr="%s", position=3) + name_template="%s_mootpl", + ) + poo = nib.File(name_source=["moo"], hash_files=False, argstr="%s", position=2) + doo = nib.File(name_source=["poo"], hash_files=False, argstr="%s", position=3) class TestCycle(nib.CommandLine): _cmd = "mycommand" @@ -287,15 +289,14 @@ def test_cycle_namesource2(setup_file): class spec3(nib.CommandLineInputSpec): moo = nib.File( - name_source=['doo'], + name_source=["doo"], hash_files=False, argstr="%s", position=1, - name_template='%s_mootpl') - poo = nib.File( - name_source=['moo'], hash_files=False, argstr="%s", position=2) - doo = nib.File( - name_source=['poo'], hash_files=False, argstr="%s", position=3) + name_template="%s_mootpl", + ) + poo = nib.File(name_source=["moo"], hash_files=False, argstr="%s", position=2) + doo = nib.File(name_source=["poo"], hash_files=False, argstr="%s", position=3) class TestCycle(nib.CommandLine): _cmd = "mycommand" @@ -313,9 +314,9 @@ class TestCycle(nib.CommandLine): print(res) assert not_raised - assert '%s' % tmp_infile in res - assert '%s_generated' % nme in res - assert '%s_generated_mootpl' % nme in res + assert "%s" % tmp_infile in res + assert "%s_generated" % nme in res + assert "%s_generated_mootpl" % nme in res def test_namesource_constraints(setup_file): @@ -324,30 +325,30 @@ def test_namesource_constraints(setup_file): class constrained_spec(nib.CommandLineInputSpec): in_file = nib.File(argstr="%s", position=1) - threshold = traits.Float( - argstr="%g", - xor=['mask_file'], - position=2) + threshold = traits.Float(argstr="%g", xor=["mask_file"], position=2) mask_file = nib.File( argstr="%s", - name_source=['in_file'], - name_template='%s_mask', + name_source=["in_file"], + name_template="%s_mask", keep_extension=True, - xor=['threshold'], - position=2) + xor=["threshold"], + position=2, + ) out_file1 = nib.File( argstr="%s", - name_source=['in_file'], - name_template='%s_out1', + name_source=["in_file"], + name_template="%s_out1", keep_extension=True, - position=3) + position=3, + ) out_file2 = nib.File( argstr="%s", - name_source=['in_file'], - name_template='%s_out2', + name_source=["in_file"], + name_template="%s_out2", keep_extension=True, - requires=['threshold'], - position=4) + requires=["threshold"], + position=4, + ) class TestConstrained(nib.CommandLine): _cmd = "mycommand" @@ -356,15 +357,15 @@ class TestConstrained(nib.CommandLine): tc = TestConstrained() # name_source undefined, so template traits remain undefined - assert tc.cmdline == 'mycommand' + assert tc.cmdline == "mycommand" # mask_file and out_file1 enabled by name_source definition tc.inputs.in_file = os.path.basename(tmp_infile) - assert tc.cmdline == 'mycommand foo.txt foo_mask.txt foo_out1.txt' + assert tc.cmdline == "mycommand foo.txt foo_mask.txt foo_out1.txt" # mask_file disabled by threshold, out_file2 enabled by threshold - tc.inputs.threshold = 10. - assert tc.cmdline == 'mycommand foo.txt 10 foo_out1.txt foo_out2.txt' + tc.inputs.threshold = 10.0 + assert tc.cmdline == "mycommand foo.txt 10 foo_out1.txt foo_out2.txt" def test_TraitedSpec_withFile(setup_file): @@ -377,8 +378,8 @@ class spec2(nib.TraitedSpec): doo = nib.traits.List(nib.File(exists=True)) infields = spec2(moo=tmp_infile, doo=[tmp_infile]) - hashval = infields.get_hashval(hash_method='content') - assert hashval[1] == 'a00e9ee24f5bfa9545a515b7a759886b' + hashval = infields.get_hashval(hash_method="content") + assert hashval[1] == "a00e9ee24f5bfa9545a515b7a759886b" def test_TraitedSpec_withNoFileHashing(setup_file): @@ -391,22 +392,22 @@ class spec2(nib.TraitedSpec): doo = nib.traits.List(nib.File(exists=True)) infields = spec2(moo=nme, doo=[tmp_infile]) - hashval = infields.get_hashval(hash_method='content') - assert hashval[1] == '8da4669ff5d72f670a46ea3e7a203215' + hashval = infields.get_hashval(hash_method="content") + assert hashval[1] == "8da4669ff5d72f670a46ea3e7a203215" class spec3(nib.TraitedSpec): moo = nib.File(exists=True, name_source="doo") doo = nib.traits.List(nib.File(exists=True)) infields = spec3(moo=nme, doo=[tmp_infile]) - hashval1 = infields.get_hashval(hash_method='content') + hashval1 = infields.get_hashval(hash_method="content") class spec4(nib.TraitedSpec): moo = nib.File(exists=True) doo = nib.traits.List(nib.File(exists=True)) infields = spec4(moo=nme, doo=[tmp_infile]) - hashval2 = infields.get_hashval(hash_method='content') + hashval2 = infields.get_hashval(hash_method="content") assert hashval1[1] != hashval2[1] @@ -414,30 +415,29 @@ def test_ImageFile(): x = nib.BaseInterface().inputs # setup traits - x.add_trait('nifti', nib.ImageFile(types=['nifti1', 'dicom'])) - x.add_trait('anytype', nib.ImageFile()) + x.add_trait("nifti", nib.ImageFile(types=["nifti1", "dicom"])) + x.add_trait("anytype", nib.ImageFile()) with pytest.raises(ValueError): - x.add_trait('newtype', nib.ImageFile(types=['nifti10'])) - x.add_trait('nocompress', - nib.ImageFile(types=['mgh'], allow_compressed=False)) + x.add_trait("newtype", nib.ImageFile(types=["nifti10"])) + x.add_trait("nocompress", nib.ImageFile(types=["mgh"], allow_compressed=False)) with pytest.raises(nib.TraitError): - x.nifti = 'test.mgz' - x.nifti = 'test.nii' - x.anytype = 'test.xml' + x.nifti = "test.mgz" + x.nifti = "test.nii" + x.anytype = "test.xml" with pytest.raises(nib.TraitError): - x.nocompress = 'test.mgz' - x.nocompress = 'test.mgh' + x.nocompress = "test.mgz" + x.nocompress = "test.mgh" def test_filecopy_info(): class InputSpec(nib.TraitedSpec): - foo = nib.traits.Int(desc='a random int') - goo = nib.traits.Int(desc='a random int', mandatory=True) - moo = nib.traits.Int(desc='a random int', mandatory=False) - hoo = nib.traits.Int(desc='a random int', usedefault=True) - zoo = nib.File(desc='a file', copyfile=False) - woo = nib.File(desc='a file', copyfile=True) + foo = nib.traits.Int(desc="a random int") + goo = nib.traits.Int(desc="a random int", mandatory=True) + moo = nib.traits.Int(desc="a random int", mandatory=False) + hoo = nib.traits.Int(desc="a random int", usedefault=True) + zoo = nib.File(desc="a file", copyfile=False) + woo = nib.File(desc="a file", copyfile=True) class DerivedInterface(nib.BaseInterface): input_spec = InputSpec @@ -445,16 +445,16 @@ class DerivedInterface(nib.BaseInterface): def normalize_filenames(self): """A mock normalize_filenames for freesurfer interfaces that have one""" - self.inputs.zoo = 'normalized_filename.ext' + self.inputs.zoo = "normalized_filename.ext" assert get_filecopy_info(nib.BaseInterface) == [] # Test on interface class, not instantiated info = get_filecopy_info(DerivedInterface) - assert info[0]['key'] == 'woo' - assert info[0]['copy'] - assert info[1]['key'] == 'zoo' - assert not info[1]['copy'] + assert info[0]["key"] == "woo" + assert info[0]["copy"] + assert info[1]["key"] == "zoo" + assert not info[1]["copy"] info = None # Test with instantiated interface @@ -464,9 +464,9 @@ def normalize_filenames(self): # After the first call to get_filecopy_info zoo is defined info = get_filecopy_info(derived) # Ensure that normalize_filenames was called - assert derived.inputs.zoo == 'normalized_filename.ext' + assert derived.inputs.zoo == "normalized_filename.ext" # Check the results are consistent - assert info[0]['key'] == 'woo' - assert info[0]['copy'] - assert info[1]['key'] == 'zoo' - assert not info[1]['copy'] + assert info[0]["key"] == "woo" + assert info[0]["copy"] + assert info[1]["key"] == "zoo" + assert not info[1]["copy"] diff --git a/nipype/interfaces/base/tests/test_support.py b/nipype/interfaces/base/tests/test_support.py index fbd6dcc209..878794b04f 100644 --- a/nipype/interfaces/base/tests/test_support.py +++ b/nipype/interfaces/base/tests/test_support.py @@ -10,7 +10,7 @@ from ... import base as nib -@pytest.mark.parametrize("args", [{}, {'a': 1, 'b': [2, 3]}]) +@pytest.mark.parametrize("args", [{}, {"a": 1, "b": [2, 3]}]) def test_bunch(args): b = nib.Bunch(**args) assert b.__dict__ == args @@ -33,25 +33,24 @@ def test_bunch_methods(): b.update(a=3) newb = b.dictcopy() assert b.a == 3 - assert b.get('a') == 3 - assert b.get('badkey', 'otherthing') == 'otherthing' + assert b.get("a") == 3 + assert b.get("badkey", "otherthing") == "otherthing" assert b != newb assert type(dict()) == type(newb) - assert newb['a'] == 3 + assert newb["a"] == 3 def test_bunch_hash(): # NOTE: Since the path to the json file is included in the Bunch, # the hash will be unique to each machine. - json_pth = pkgrf('nipype', - os.path.join('testing', 'data', 'realign_json.json')) + json_pth = pkgrf("nipype", os.path.join("testing", "data", "realign_json.json")) - b = nib.Bunch(infile=json_pth, otherthing='blue', yat=True) + b = nib.Bunch(infile=json_pth, otherthing="blue", yat=True) newbdict, bhash = b._get_bunch_hash() - assert bhash == 'd1f46750044c3de102efc847720fc35f' + assert bhash == "d1f46750044c3de102efc847720fc35f" # Make sure the hash stored in the json file for `infile` is correct. jshash = md5() - with open(json_pth, 'r') as fp: - jshash.update(fp.read().encode('utf-8')) - assert newbdict['infile'][0][1] == jshash.hexdigest() - assert newbdict['yat'] is True + with open(json_pth, "r") as fp: + jshash.update(fp.read().encode("utf-8")) + assert newbdict["infile"][0][1] == jshash.hexdigest() + assert newbdict["yat"] is True diff --git a/nipype/interfaces/base/tests/test_traits_extension.py b/nipype/interfaces/base/tests/test_traits_extension.py index ac9c36adf7..ec0574ad9c 100644 --- a/nipype/interfaces/base/tests/test_traits_extension.py +++ b/nipype/interfaces/base/tests/test_traits_extension.py @@ -16,8 +16,11 @@ class _test_spec(nib.TraitedSpec): g = nib.traits.Either(nib.File, nib.Str) h = nib.Str i = nib.traits.Either(nib.File, nib.traits.Tuple(nib.File, nib.traits.Int)) - j = nib.traits.Either(nib.File, nib.traits.Tuple(nib.File, nib.traits.Int), - nib.traits.Dict(nib.Str, nib.File())) + j = nib.traits.Either( + nib.File, + nib.traits.Tuple(nib.File, nib.traits.Int), + nib.traits.Dict(nib.Str, nib.File()), + ) k = nib.DictStrStr @@ -25,286 +28,295 @@ def test_rebase_resolve_path_traits(): """Check rebase_path_traits and resolve_path_traits and idempotence.""" spec = _test_spec() - v = '/some/path/f1.txt' - a = rebase_path_traits(spec.trait('a'), v, '/some/path') - assert a == Path('f1.txt') + v = "/some/path/f1.txt" + a = rebase_path_traits(spec.trait("a"), v, "/some/path") + assert a == Path("f1.txt") # Idempotence - assert rebase_path_traits(spec.trait('a'), a, '/some/path') == a + assert rebase_path_traits(spec.trait("a"), a, "/some/path") == a - a = resolve_path_traits(spec.trait('a'), a, '/some/path') + a = resolve_path_traits(spec.trait("a"), a, "/some/path") assert a == Path(v) # Idempotence - assert resolve_path_traits(spec.trait('a'), a, '/some/path') == a + assert resolve_path_traits(spec.trait("a"), a, "/some/path") == a - a = rebase_path_traits(spec.trait('a'), v, '/some/other/path') + a = rebase_path_traits(spec.trait("a"), v, "/some/other/path") assert a == Path(v) # Idempotence - assert rebase_path_traits(spec.trait('a'), a, '/some/other/path') == a + assert rebase_path_traits(spec.trait("a"), a, "/some/other/path") == a - a = resolve_path_traits(spec.trait('a'), a, '/some/path') + a = resolve_path_traits(spec.trait("a"), a, "/some/path") assert a == Path(v) # Idempotence - assert resolve_path_traits(spec.trait('a'), a, '/some/path') == a + assert resolve_path_traits(spec.trait("a"), a, "/some/path") == a - v = ('/some/path/f1.txt', '/some/path/f2.txt') - b = rebase_path_traits(spec.trait('b'), v, '/some/path') - assert b == (Path('f1.txt'), Path('f2.txt')) + v = ("/some/path/f1.txt", "/some/path/f2.txt") + b = rebase_path_traits(spec.trait("b"), v, "/some/path") + assert b == (Path("f1.txt"), Path("f2.txt")) # Idempotence - assert rebase_path_traits(spec.trait('b'), b, '/some/path') == b + assert rebase_path_traits(spec.trait("b"), b, "/some/path") == b - b = resolve_path_traits(spec.trait('b'), b, '/some/path') + b = resolve_path_traits(spec.trait("b"), b, "/some/path") assert b == (Path(v[0]), Path(v[1])) # Idempotence - assert resolve_path_traits(spec.trait('b'), b, '/some/path') == b + assert resolve_path_traits(spec.trait("b"), b, "/some/path") == b - v = ['/some/path/f1.txt', '/some/path/f2.txt', '/some/path/f3.txt'] - c = rebase_path_traits(spec.trait('c'), v, '/some/path') - assert c == [Path('f1.txt'), Path('f2.txt'), Path('f3.txt')] + v = ["/some/path/f1.txt", "/some/path/f2.txt", "/some/path/f3.txt"] + c = rebase_path_traits(spec.trait("c"), v, "/some/path") + assert c == [Path("f1.txt"), Path("f2.txt"), Path("f3.txt")] # Idempotence - assert rebase_path_traits(spec.trait('c'), c, '/some/path') == c + assert rebase_path_traits(spec.trait("c"), c, "/some/path") == c - c = resolve_path_traits(spec.trait('c'), c, '/some/path') + c = resolve_path_traits(spec.trait("c"), c, "/some/path") assert c == [Path(vp) for vp in v] # Idempotence - assert resolve_path_traits(spec.trait('c'), c, '/some/path') == c + assert resolve_path_traits(spec.trait("c"), c, "/some/path") == c v = 2.0 - d = rebase_path_traits(spec.trait('d'), v, '/some/path') + d = rebase_path_traits(spec.trait("d"), v, "/some/path") assert d == v - d = resolve_path_traits(spec.trait('d'), d, '/some/path') + d = resolve_path_traits(spec.trait("d"), d, "/some/path") assert d == v - v = '/some/path/either.txt' - d = rebase_path_traits(spec.trait('d'), v, '/some/path') - assert d == Path('either.txt') + v = "/some/path/either.txt" + d = rebase_path_traits(spec.trait("d"), v, "/some/path") + assert d == Path("either.txt") # Idempotence - assert rebase_path_traits(spec.trait('d'), d, '/some/path') == d + assert rebase_path_traits(spec.trait("d"), d, "/some/path") == d - d = resolve_path_traits(spec.trait('d'), d, '/some/path') + d = resolve_path_traits(spec.trait("d"), d, "/some/path") assert d == Path(v) # Idempotence - assert resolve_path_traits(spec.trait('d'), d, '/some/path') == d + assert resolve_path_traits(spec.trait("d"), d, "/some/path") == d - v = ['/some/path/f1.txt', '/some/path/f2.txt', '/some/path/f3.txt'] - e = rebase_path_traits(spec.trait('e'), v, '/some/path') - assert e == [Path('f1.txt'), Path('f2.txt'), Path('f3.txt')] + v = ["/some/path/f1.txt", "/some/path/f2.txt", "/some/path/f3.txt"] + e = rebase_path_traits(spec.trait("e"), v, "/some/path") + assert e == [Path("f1.txt"), Path("f2.txt"), Path("f3.txt")] # Idempotence - assert rebase_path_traits(spec.trait('e'), e, '/some/path') == e + assert rebase_path_traits(spec.trait("e"), e, "/some/path") == e - e = resolve_path_traits(spec.trait('e'), e, '/some/path') + e = resolve_path_traits(spec.trait("e"), e, "/some/path") assert e == [Path(vp) for vp in v] # Idempotence - assert resolve_path_traits(spec.trait('e'), e, '/some/path') == e + assert resolve_path_traits(spec.trait("e"), e, "/some/path") == e - v = [['/some/path/f1.txt', '/some/path/f2.txt'], [['/some/path/f3.txt']]] - e = rebase_path_traits(spec.trait('e'), v, '/some/path') - assert e == [[Path('f1.txt'), Path('f2.txt')], [[Path('f3.txt')]]] + v = [["/some/path/f1.txt", "/some/path/f2.txt"], [["/some/path/f3.txt"]]] + e = rebase_path_traits(spec.trait("e"), v, "/some/path") + assert e == [[Path("f1.txt"), Path("f2.txt")], [[Path("f3.txt")]]] # Idempotence - assert rebase_path_traits(spec.trait('e'), e, '/some/path') == e + assert rebase_path_traits(spec.trait("e"), e, "/some/path") == e - e = resolve_path_traits(spec.trait('e'), e, '/some/path') - assert e == [[[Path(vpp) for vpp in vp] if isinstance(vp, list) else Path(vp) for vp in inner] - for inner in v] + e = resolve_path_traits(spec.trait("e"), e, "/some/path") + assert e == [ + [ + [Path(vpp) for vpp in vp] if isinstance(vp, list) else Path(vp) + for vp in inner + ] + for inner in v + ] # Idempotence - assert resolve_path_traits(spec.trait('e'), e, '/some/path') == e + assert resolve_path_traits(spec.trait("e"), e, "/some/path") == e # These are Str - no rebasing/resolving should happen - v = [['/some/path/f1.txt', '/some/path/f2.txt'], [['/some/path/f3.txt']]] - ee = rebase_path_traits(spec.trait('ee'), v, '/some/path') + v = [["/some/path/f1.txt", "/some/path/f2.txt"], [["/some/path/f3.txt"]]] + ee = rebase_path_traits(spec.trait("ee"), v, "/some/path") assert ee == v # Idempotence - assert rebase_path_traits(spec.trait('ee'), ee, '/some/path') == ee + assert rebase_path_traits(spec.trait("ee"), ee, "/some/path") == ee - ee = resolve_path_traits(spec.trait('ee'), [['f1.txt', 'f2.txt'], [['f3.txt']]], '/some/path') - assert ee == [['f1.txt', 'f2.txt'], [['f3.txt']]] + ee = resolve_path_traits( + spec.trait("ee"), [["f1.txt", "f2.txt"], [["f3.txt"]]], "/some/path" + ) + assert ee == [["f1.txt", "f2.txt"], [["f3.txt"]]] # Idempotence - assert resolve_path_traits(spec.trait('ee'), ee, '/some/path') == ee + assert resolve_path_traits(spec.trait("ee"), ee, "/some/path") == ee - v = {'1': '/some/path/f1.txt'} - f = rebase_path_traits(spec.trait('f'), v, '/some') - assert f == {'1': Path('path/f1.txt')} + v = {"1": "/some/path/f1.txt"} + f = rebase_path_traits(spec.trait("f"), v, "/some") + assert f == {"1": Path("path/f1.txt")} # Idempotence - assert rebase_path_traits(spec.trait('f'), f, '/some') == f + assert rebase_path_traits(spec.trait("f"), f, "/some") == f - f = resolve_path_traits(spec.trait('f'), f, '/some') + f = resolve_path_traits(spec.trait("f"), f, "/some") assert f == {k: Path(val) for k, val in v.items()} # Idempotence - assert resolve_path_traits(spec.trait('f'), f, '/some') == f + assert resolve_path_traits(spec.trait("f"), f, "/some") == f # Either(Str, File): passing in path-like apply manipulation - v = '/some/path/either.txt' - g = rebase_path_traits(spec.trait('g'), v, '/some/path') - assert g == Path('either.txt') + v = "/some/path/either.txt" + g = rebase_path_traits(spec.trait("g"), v, "/some/path") + assert g == Path("either.txt") # Idempotence - assert rebase_path_traits(spec.trait('g'), g, '/some/path') == g + assert rebase_path_traits(spec.trait("g"), g, "/some/path") == g - g = resolve_path_traits(spec.trait('g'), g, '/some/path') + g = resolve_path_traits(spec.trait("g"), g, "/some/path") assert g == Path(v) # Idempotence - assert resolve_path_traits(spec.trait('g'), g, '/some/path') == g + assert resolve_path_traits(spec.trait("g"), g, "/some/path") == g - g = rebase_path_traits(spec.trait('g'), v, '/some') - assert g == Path('path/either.txt') + g = rebase_path_traits(spec.trait("g"), v, "/some") + assert g == Path("path/either.txt") # Idempotence - assert rebase_path_traits(spec.trait('g'), g, '/some/path') == g + assert rebase_path_traits(spec.trait("g"), g, "/some/path") == g - g = resolve_path_traits(spec.trait('g'), g, '/some') + g = resolve_path_traits(spec.trait("g"), g, "/some") assert g == Path(v) # Idempotence - assert resolve_path_traits(spec.trait('g'), g, '/some/path') == g + assert resolve_path_traits(spec.trait("g"), g, "/some/path") == g # Either(Str, File): passing str discards File - v = 'either.txt' - g = rebase_path_traits(spec.trait('g'), v, '/some/path') + v = "either.txt" + g = rebase_path_traits(spec.trait("g"), v, "/some/path") assert g == v # Idempotence - assert rebase_path_traits(spec.trait('g'), g, '/some/path') == g + assert rebase_path_traits(spec.trait("g"), g, "/some/path") == g # This is a problematic case, it is impossible to know whether this # was meant to be a string or a file. # In this implementation, strings take precedence - g = resolve_path_traits(spec.trait('g'), g, '/some/path') + g = resolve_path_traits(spec.trait("g"), g, "/some/path") assert g == v # Idempotence - assert resolve_path_traits(spec.trait('g'), g, '/some/path') == g + assert resolve_path_traits(spec.trait("g"), g, "/some/path") == g - v = 'string' - g = rebase_path_traits(spec.trait('g'), v, '/some') + v = "string" + g = rebase_path_traits(spec.trait("g"), v, "/some") assert g == v # Idempotence - assert rebase_path_traits(spec.trait('g'), g, '/some') == g + assert rebase_path_traits(spec.trait("g"), g, "/some") == g # This is a problematic case, it is impossible to know whether this # was meant to be a string or a file. - g = resolve_path_traits(spec.trait('g'), v, '/some') + g = resolve_path_traits(spec.trait("g"), v, "/some") assert g == v # Idempotence - assert resolve_path_traits(spec.trait('g'), g, '/some') == g + assert resolve_path_traits(spec.trait("g"), g, "/some") == g - g = rebase_path_traits(spec.trait('g'), v, '/some/path') + g = rebase_path_traits(spec.trait("g"), v, "/some/path") assert g == v # You dont want this one to be a Path # Idempotence - assert rebase_path_traits(spec.trait('g'), g, '/some/path') == g + assert rebase_path_traits(spec.trait("g"), g, "/some/path") == g # This is a problematic case, it is impossible to know whether this # was meant to be a string or a file. - g = resolve_path_traits(spec.trait('g'), g, '/some/path') + g = resolve_path_traits(spec.trait("g"), g, "/some/path") assert g == v # You dont want this one to be a Path # Idempotence - assert resolve_path_traits(spec.trait('g'), g, '/some/path') == g + assert resolve_path_traits(spec.trait("g"), g, "/some/path") == g - h = rebase_path_traits(spec.trait('h'), v, '/some/path') + h = rebase_path_traits(spec.trait("h"), v, "/some/path") assert h == v # Idempotence - assert rebase_path_traits(spec.trait('h'), h, '/some/path') == h + assert rebase_path_traits(spec.trait("h"), h, "/some/path") == h - h = resolve_path_traits(spec.trait('h'), h, '/some/path') + h = resolve_path_traits(spec.trait("h"), h, "/some/path") assert h == v # Idempotence - assert resolve_path_traits(spec.trait('h'), h, '/some/path') == h + assert resolve_path_traits(spec.trait("h"), h, "/some/path") == h - v = '/some/path/either/file.txt' - i = rebase_path_traits(spec.trait('i'), v, '/some/path') - assert i == Path('either/file.txt') + v = "/some/path/either/file.txt" + i = rebase_path_traits(spec.trait("i"), v, "/some/path") + assert i == Path("either/file.txt") # Idempotence - assert rebase_path_traits(spec.trait('i'), i, '/some/path') == i + assert rebase_path_traits(spec.trait("i"), i, "/some/path") == i - i = resolve_path_traits(spec.trait('i'), i, '/some/path') + i = resolve_path_traits(spec.trait("i"), i, "/some/path") assert i == Path(v) # Idempotence - assert resolve_path_traits(spec.trait('i'), i, '/some/path') == i + assert resolve_path_traits(spec.trait("i"), i, "/some/path") == i - v = ('/some/path/either/tuple/file.txt', 2) - i = rebase_path_traits(spec.trait('i'), v, '/some/path') - assert i == (Path('either/tuple/file.txt'), 2) + v = ("/some/path/either/tuple/file.txt", 2) + i = rebase_path_traits(spec.trait("i"), v, "/some/path") + assert i == (Path("either/tuple/file.txt"), 2) # Idempotence - assert rebase_path_traits(spec.trait('i'), i, '/some/path') == i + assert rebase_path_traits(spec.trait("i"), i, "/some/path") == i - i = resolve_path_traits(spec.trait('i'), i, '/some/path') + i = resolve_path_traits(spec.trait("i"), i, "/some/path") assert i == (Path(v[0]), v[1]) # Idempotence - assert resolve_path_traits(spec.trait('i'), i, '/some/path') == i + assert resolve_path_traits(spec.trait("i"), i, "/some/path") == i - v = '/some/path/either/file.txt' - j = rebase_path_traits(spec.trait('j'), v, '/some/path') - assert j == Path('either/file.txt') + v = "/some/path/either/file.txt" + j = rebase_path_traits(spec.trait("j"), v, "/some/path") + assert j == Path("either/file.txt") # Idempotence - assert rebase_path_traits(spec.trait('j'), j, '/some/path') == j + assert rebase_path_traits(spec.trait("j"), j, "/some/path") == j - j = resolve_path_traits(spec.trait('j'), j, '/some/path') + j = resolve_path_traits(spec.trait("j"), j, "/some/path") assert j == Path(v) # Idempotence - assert resolve_path_traits(spec.trait('j'), j, '/some/path') == j + assert resolve_path_traits(spec.trait("j"), j, "/some/path") == j - v = ('/some/path/either/tuple/file.txt', 2) - j = rebase_path_traits(spec.trait('j'), ('/some/path/either/tuple/file.txt', 2), '/some/path') - assert j == (Path('either/tuple/file.txt'), 2) + v = ("/some/path/either/tuple/file.txt", 2) + j = rebase_path_traits( + spec.trait("j"), ("/some/path/either/tuple/file.txt", 2), "/some/path" + ) + assert j == (Path("either/tuple/file.txt"), 2) # Idempotence - assert rebase_path_traits(spec.trait('j'), j, '/some/path') == j + assert rebase_path_traits(spec.trait("j"), j, "/some/path") == j - j = resolve_path_traits(spec.trait('j'), j, '/some/path') + j = resolve_path_traits(spec.trait("j"), j, "/some/path") assert j == (Path(v[0]), v[1]) # Idempotence - assert resolve_path_traits(spec.trait('j'), j, '/some/path') == j + assert resolve_path_traits(spec.trait("j"), j, "/some/path") == j - v = {'a': '/some/path/either/dict/file.txt'} - j = rebase_path_traits(spec.trait('j'), v, '/some/path') - assert j == {'a': Path('either/dict/file.txt')} + v = {"a": "/some/path/either/dict/file.txt"} + j = rebase_path_traits(spec.trait("j"), v, "/some/path") + assert j == {"a": Path("either/dict/file.txt")} # Idempotence - assert rebase_path_traits(spec.trait('j'), j, '/some/path') == j + assert rebase_path_traits(spec.trait("j"), j, "/some/path") == j - j = resolve_path_traits(spec.trait('j'), j, '/some/path') + j = resolve_path_traits(spec.trait("j"), j, "/some/path") assert j == {k: Path(val) for k, val in v.items()} # Idempotence - assert resolve_path_traits(spec.trait('j'), j, '/some/path') == j + assert resolve_path_traits(spec.trait("j"), j, "/some/path") == j - v = {'path': '/some/path/f1.txt'} - k = rebase_path_traits(spec.trait('k'), v, '/some/path') + v = {"path": "/some/path/f1.txt"} + k = rebase_path_traits(spec.trait("k"), v, "/some/path") assert k == v # Idempotence - assert rebase_path_traits(spec.trait('k'), k, '/some/path') == k + assert rebase_path_traits(spec.trait("k"), k, "/some/path") == k - k = resolve_path_traits(spec.trait('k'), k, '/some/path') + k = resolve_path_traits(spec.trait("k"), k, "/some/path") assert k == v diff --git a/nipype/interfaces/base/traits_extension.py b/nipype/interfaces/base/traits_extension.py index 2e176e8bd4..0ffab07a03 100644 --- a/nipype/interfaces/base/traits_extension.py +++ b/nipype/interfaces/base/traits_extension.py @@ -31,20 +31,20 @@ from pathlib import Path from ...utils.filemanip import path_resolve -if traits_version < '3.7.0': - raise ImportError('Traits version 3.7.0 or higher must be installed') +if traits_version < "3.7.0": + raise ImportError("Traits version 3.7.0 or higher must be installed") IMG_FORMATS = { - 'afni': ('.HEAD', '.BRIK'), - 'cifti2': ('.nii', '.nii.gz'), - 'dicom': ('.dcm', '.IMA', '.tar', '.tar.gz'), - 'gifti': ('.gii', '.gii.gz'), - 'mgh': ('.mgh', '.mgz', '.mgh.gz'), - 'nifti1': ('.nii', '.nii.gz', '.hdr', '.img', '.img.gz'), - 'nifti2': ('.nii', '.nii.gz'), - 'nrrd': ('.nrrd', '.nhdr'), + "afni": (".HEAD", ".BRIK"), + "cifti2": (".nii", ".nii.gz"), + "dicom": (".dcm", ".IMA", ".tar", ".tar.gz"), + "gifti": (".gii", ".gii.gz"), + "mgh": (".mgh", ".mgz", ".mgh.gz"), + "nifti1": (".nii", ".nii.gz", ".hdr", ".img", ".img.gz"), + "nifti2": (".nii", ".nii.gz"), + "nrrd": (".nrrd", ".nhdr"), } -IMG_ZIP_FMT = set(['.nii.gz', 'tar.gz', '.gii.gz', '.mgz', '.mgh.gz', 'img.gz']) +IMG_ZIP_FMT = set([".nii.gz", "tar.gz", ".gii.gz", ".mgz", ".mgh.gz", "img.gz"]) """ The functions that pop-up the Traits GUIs, edit_traits and @@ -98,17 +98,17 @@ class BasePath(TraitType): @property def info_text(self): """Create the trait's general description.""" - info_text = 'a pathlike object or string' + info_text = "a pathlike object or string" if any((self.exists, self._is_file, self._is_dir)): - info_text += ' representing a' + info_text += " representing a" if self.exists: - info_text += 'n existing' + info_text += "n existing" if self._is_file: - info_text += ' file' + info_text += " file" elif self._is_dir: - info_text += ' directory' + info_text += " directory" else: - info_text += ' file or directory' + info_text += " file or directory" return info_text def __init__(self, value=Undefined, exists=False, resolve=False, **metadata): @@ -142,6 +142,7 @@ def validate(self, objekt, name, value, return_pathlike=False): return value + class Directory(BasePath): """ Defines a trait whose value must be a directory path. @@ -284,8 +285,15 @@ class File(BasePath): _is_file = True _exts = None - def __init__(self, value=NoDefaultSpecified, exists=False, resolve=False, - allow_compressed=True, extensions=None, **metadata): + def __init__( + self, + value=NoDefaultSpecified, + exists=False, + resolve=False, + allow_compressed=True, + extensions=None, + **metadata + ): """Create a File trait.""" if extensions is not None: if isinstance(extensions, (bytes, str)): @@ -294,11 +302,22 @@ def __init__(self, value=NoDefaultSpecified, exists=False, resolve=False, if allow_compressed is False: extensions = list(set(extensions) - IMG_ZIP_FMT) - self._exts = sorted(set(['.%s' % ext if not ext.startswith('.') else ext - for ext in extensions])) - - super(File, self).__init__(value=value, exists=exists, resolve=resolve, - extensions=self._exts, **metadata) + self._exts = sorted( + set( + [ + ".%s" % ext if not ext.startswith(".") else ext + for ext in extensions + ] + ) + ) + + super(File, self).__init__( + value=value, + exists=exists, + resolve=resolve, + extensions=self._exts, + **metadata + ) def validate(self, objekt, name, value, return_pathlike=False): """Validate a value change.""" @@ -317,8 +336,14 @@ def validate(self, objekt, name, value, return_pathlike=False): class ImageFile(File): """Defines a trait whose value must be a known neuroimaging file.""" - def __init__(self, value=NoDefaultSpecified, exists=False, - resolve=False, types=None, **metadata): + def __init__( + self, + value=NoDefaultSpecified, + exists=False, + resolve=False, + types=None, + **metadata + ): """Create an ImageFile trait.""" extensions = None if types is not None: @@ -327,14 +352,21 @@ def __init__(self, value=NoDefaultSpecified, exists=False, if set(types) - set(IMG_FORMATS.keys()): invalid = set(types) - set(IMG_FORMATS.keys()) - raise ValueError("""\ + raise ValueError( + """\ Unknown value(s) %s for metadata type of an ImageFile input.\ -""" % ', '.join(['"%s"' % t for t in invalid])) +""" + % ", ".join(['"%s"' % t for t in invalid]) + ) extensions = [ext for t in types for ext in IMG_FORMATS[t]] super(ImageFile, self).__init__( - value=value, exists=exists, extensions=extensions, - resolve=resolve, **metadata) + value=value, + exists=exists, + extensions=extensions, + resolve=resolve, + **metadata + ) def isdefined(objekt): @@ -342,20 +374,21 @@ def isdefined(objekt): def has_metadata(trait, metadata, value=None, recursive=True): - ''' + """ Checks if a given trait has a metadata (and optionally if it is set to particular value) - ''' + """ count = 0 - if hasattr(trait, "_metadata") and metadata in list( - trait._metadata.keys()) and (trait._metadata[metadata] == value - or value is None): + if ( + hasattr(trait, "_metadata") + and metadata in list(trait._metadata.keys()) + and (trait._metadata[metadata] == value or value is None) + ): count += 1 if recursive: - if hasattr(trait, 'inner_traits'): + if hasattr(trait, "inner_traits"): for inner_trait in trait.inner_traits(): - count += has_metadata(inner_trait.trait_type, metadata, - recursive) - if hasattr(trait, 'handlers') and trait.handlers is not None: + count += has_metadata(inner_trait.trait_type, metadata, recursive) + if hasattr(trait, "handlers") and trait.handlers is not None: for handler in trait.handlers: count += has_metadata(handler, metadata, recursive) @@ -369,21 +402,20 @@ class MultiObject(traits.List): def validate(self, objekt, name, value): # want to treat range and other sequences (except str) as list - if not isinstance(value, (str, bytes)) and isinstance( - value, Sequence): + if not isinstance(value, (str, bytes)) and isinstance(value, Sequence): value = list(value) - if not isdefined(value) or \ - (isinstance(value, list) and len(value) == 0): + if not isdefined(value) or (isinstance(value, list) and len(value) == 0): return Undefined newvalue = value inner_trait = self.inner_traits()[0] - if not isinstance(value, list) \ - or (isinstance(inner_trait.trait_type, traits.List) and - not isinstance(inner_trait.trait_type, InputMultiObject) and - not isinstance(value[0], list)): + if not isinstance(value, list) or ( + isinstance(inner_trait.trait_type, traits.List) + and not isinstance(inner_trait.trait_type, InputMultiObject) + and not isinstance(value[0], list) + ): newvalue = [value] value = super(MultiObject, self).validate(objekt, name, newvalue) @@ -470,6 +502,7 @@ class InputMultiObject(MultiObject): ['/software/temp/foo.txt', '/software/temp/goo.txt'] """ + pass @@ -512,23 +545,34 @@ def _recurse_on_path_traits(func, thistrait, value, cwd): if thistrait.is_trait_type(BasePath): value = func(value, cwd) elif thistrait.is_trait_type(traits.List): - innertrait, = thistrait.inner_traits + (innertrait,) = thistrait.inner_traits if not isinstance(value, (list, tuple)): return _recurse_on_path_traits(func, innertrait, value, cwd) - value = [_recurse_on_path_traits(func, innertrait, v, cwd) - for v in value] + value = [_recurse_on_path_traits(func, innertrait, v, cwd) for v in value] elif isinstance(value, dict) and thistrait.is_trait_type(traits.Dict): _, innertrait = thistrait.inner_traits - value = {k: _recurse_on_path_traits(func, innertrait, v, cwd) - for k, v in value.items()} + value = { + k: _recurse_on_path_traits(func, innertrait, v, cwd) + for k, v in value.items() + } elif isinstance(value, tuple) and thistrait.is_trait_type(traits.Tuple): - value = tuple([_recurse_on_path_traits(func, subtrait, v, cwd) - for subtrait, v in zip(thistrait.handler.types, value)]) + value = tuple( + [ + _recurse_on_path_traits(func, subtrait, v, cwd) + for subtrait, v in zip(thistrait.handler.types, value) + ] + ) elif thistrait.is_trait_type(traits.TraitCompound): - is_str = [isinstance(f, (traits.String, traits.BaseStr, traits.BaseBytes, Str)) - for f in thistrait.handler.handlers] - if any(is_str) and isinstance(value, (bytes, str)) and not value.startswith('/'): + is_str = [ + isinstance(f, (traits.String, traits.BaseStr, traits.BaseBytes, Str)) + for f in thistrait.handler.handlers + ] + if ( + any(is_str) + and isinstance(value, (bytes, str)) + and not value.startswith("/") + ): return value for subtrait in thistrait.handler.handlers: diff --git a/nipype/interfaces/brainsuite/__init__.py b/nipype/interfaces/brainsuite/__init__.py index 6611aedff4..45bcf5fc65 100644 --- a/nipype/interfaces/brainsuite/__init__.py +++ b/nipype/interfaces/brainsuite/__init__.py @@ -1,4 +1,18 @@ # -*- coding: utf-8 -*- -from .brainsuite import (Bse, Bfc, Pvc, Cerebro, Cortex, Scrubmask, Tca, - Dewisp, Dfs, Pialmesh, Skullfinder, Hemisplit, SVReg, - BDP, ThicknessPVC) +from .brainsuite import ( + Bse, + Bfc, + Pvc, + Cerebro, + Cortex, + Scrubmask, + Tca, + Dewisp, + Dfs, + Pialmesh, + Skullfinder, + Hemisplit, + SVReg, + BDP, + ThicknessPVC, +) diff --git a/nipype/interfaces/brainsuite/brainsuite.py b/nipype/interfaces/brainsuite/brainsuite.py index 919e9aba1a..0d26017ea7 100644 --- a/nipype/interfaces/brainsuite/brainsuite.py +++ b/nipype/interfaces/brainsuite/brainsuite.py @@ -3,7 +3,15 @@ import os import re as regex -from ..base import TraitedSpec, CommandLineInputSpec, CommandLine, File, traits, isdefined +from ..base import ( + TraitedSpec, + CommandLineInputSpec, + CommandLine, + File, + traits, + isdefined, +) + """This script provides interfaces for BrainSuite command line tools. Please see brainsuite.org for more information. @@ -13,59 +21,60 @@ class BseInputSpec(CommandLineInputSpec): - inputMRIFile = File( - mandatory=True, argstr='-i %s', desc='input MRI volume') + inputMRIFile = File(mandatory=True, argstr="-i %s", desc="input MRI volume") outputMRIVolume = File( - desc= - 'output brain-masked MRI volume. If unspecified, output file name will be auto generated.', - argstr='-o %s', + desc="output brain-masked MRI volume. If unspecified, output file name will be auto generated.", + argstr="-o %s", hash_files=False, - genfile=True) + genfile=True, + ) outputMaskFile = File( - desc= - 'save smooth brain mask. If unspecified, output file name will be auto generated.', - argstr='--mask %s', + desc="save smooth brain mask. If unspecified, output file name will be auto generated.", + argstr="--mask %s", hash_files=False, - genfile=True) + genfile=True, + ) diffusionConstant = traits.Float( - 25, usedefault=True, desc='diffusion constant', argstr='-d %f') + 25, usedefault=True, desc="diffusion constant", argstr="-d %f" + ) diffusionIterations = traits.Int( - 3, usedefault=True, desc='diffusion iterations', argstr='-n %d') + 3, usedefault=True, desc="diffusion iterations", argstr="-n %d" + ) edgeDetectionConstant = traits.Float( - 0.64, usedefault=True, desc='edge detection constant', argstr='-s %f') + 0.64, usedefault=True, desc="edge detection constant", argstr="-s %f" + ) radius = traits.Float( - 1, - usedefault=True, - desc='radius of erosion/dilation filter', - argstr='-r %f') + 1, usedefault=True, desc="radius of erosion/dilation filter", argstr="-r %f" + ) dilateFinalMask = traits.Bool( - True, usedefault=True, desc='dilate final mask', argstr='-p') - trim = traits.Bool( - True, usedefault=True, desc='trim brainstem', argstr='--trim') + True, usedefault=True, desc="dilate final mask", argstr="-p" + ) + trim = traits.Bool(True, usedefault=True, desc="trim brainstem", argstr="--trim") outputDiffusionFilter = File( - desc='diffusion filter output', argstr='--adf %s', hash_files=False) - outputEdgeMap = File( - desc='edge map output', argstr='--edge %s', hash_files=False) + desc="diffusion filter output", argstr="--adf %s", hash_files=False + ) + outputEdgeMap = File(desc="edge map output", argstr="--edge %s", hash_files=False) outputDetailedBrainMask = File( - desc='save detailed brain mask', argstr='--hires %s', hash_files=False) - outputCortexFile = File( - desc='cortex file', argstr='--cortex %s', hash_files=False) + desc="save detailed brain mask", argstr="--hires %s", hash_files=False + ) + outputCortexFile = File(desc="cortex file", argstr="--cortex %s", hash_files=False) verbosityLevel = traits.Float( - 1, usedefault=True, desc=' verbosity level (0=silent)', argstr='-v %f') + 1, usedefault=True, desc=" verbosity level (0=silent)", argstr="-v %f" + ) noRotate = traits.Bool( - desc= - 'retain original orientation(default behavior will auto-rotate input NII files to LPI orientation)', - argstr='--norotate') - timer = traits.Bool(desc='show timing', argstr='--timer') + desc="retain original orientation(default behavior will auto-rotate input NII files to LPI orientation)", + argstr="--norotate", + ) + timer = traits.Bool(desc="show timing", argstr="--timer") class BseOutputSpec(TraitedSpec): - outputMRIVolume = File(desc='path/name of brain-masked MRI volume') - outputMaskFile = File(desc='path/name of smooth brain mask') - outputDiffusionFilter = File(desc='path/name of diffusion filter output') - outputEdgeMap = File(desc='path/name of edge map output') - outputDetailedBrainMask = File(desc='path/name of detailed brain mask') - outputCortexFile = File(desc='path/name of cortex file') + outputMRIVolume = File(desc="path/name of brain-masked MRI volume") + outputMaskFile = File(desc="path/name of smooth brain mask") + outputDiffusionFilter = File(desc="path/name of diffusion filter output") + outputEdgeMap = File(desc="path/name of edge map output") + outputDetailedBrainMask = File(desc="path/name of detailed brain mask") + outputCortexFile = File(desc="path/name of cortex file") class Bse(CommandLine): @@ -88,7 +97,7 @@ class Bse(CommandLine): input_spec = BseInputSpec output_spec = BseOutputSpec - _cmd = 'bse' + _cmd = "bse" def _gen_filename(self, name): inputs = self.inputs.get() @@ -96,8 +105,8 @@ def _gen_filename(self, name): return os.path.abspath(inputs[name]) fileToSuffixMap = { - 'outputMRIVolume': '.bse.nii.gz', - 'outputMaskFile': '.mask.nii.gz' + "outputMRIVolume": ".bse.nii.gz", + "outputMaskFile": ".mask.nii.gz", } if name in fileToSuffixMap: @@ -111,87 +120,90 @@ def _list_outputs(self): class BfcInputSpec(CommandLineInputSpec): inputMRIFile = File( - mandatory=True, desc='input skull-stripped MRI volume', argstr='-i %s') - inputMaskFile = File(desc='mask file', argstr='-m %s', hash_files=False) + mandatory=True, desc="input skull-stripped MRI volume", argstr="-i %s" + ) + inputMaskFile = File(desc="mask file", argstr="-m %s", hash_files=False) outputMRIVolume = File( - desc= - 'output bias-corrected MRI volume.If unspecified, output file name will be auto generated.', - argstr='-o %s', + desc="output bias-corrected MRI volume.If unspecified, output file name will be auto generated.", + argstr="-o %s", hash_files=False, - genfile=True) + genfile=True, + ) outputBiasField = File( - desc='save bias field estimate', argstr='--bias %s', hash_files=False) + desc="save bias field estimate", argstr="--bias %s", hash_files=False + ) outputMaskedBiasField = File( - desc='save bias field estimate (masked)', - argstr='--maskedbias %s', - hash_files=False) - histogramRadius = traits.Int( - desc='histogram radius (voxels)', argstr='-r %d') + desc="save bias field estimate (masked)", + argstr="--maskedbias %s", + hash_files=False, + ) + histogramRadius = traits.Int(desc="histogram radius (voxels)", argstr="-r %d") biasEstimateSpacing = traits.Int( - desc='bias sample spacing (voxels)', argstr='-s %d') + desc="bias sample spacing (voxels)", argstr="-s %d" + ) controlPointSpacing = traits.Int( - desc='control point spacing (voxels)', argstr='-c %d') + desc="control point spacing (voxels)", argstr="-c %d" + ) splineLambda = traits.Float( - desc='spline stiffness weighting parameter', argstr='-w %f') + desc="spline stiffness weighting parameter", argstr="-w %f" + ) histogramType = traits.Enum( - 'ellipse', - 'block', - desc= - 'Options for type of histogram\nellipse: use ellipsoid for ROI histogram\nblock :use block for ROI histogram', - argstr='%s') + "ellipse", + "block", + desc="Options for type of histogram\nellipse: use ellipsoid for ROI histogram\nblock :use block for ROI histogram", + argstr="%s", + ) iterativeMode = traits.Bool( - desc='iterative mode (overrides -r, -s, -c, -w settings)', - argstr='--iterate') - correctionScheduleFile = File( - desc='list of parameters ', argstr='--schedule %s') + desc="iterative mode (overrides -r, -s, -c, -w settings)", argstr="--iterate" + ) + correctionScheduleFile = File(desc="list of parameters ", argstr="--schedule %s") biasFieldEstimatesOutputPrefix = traits.Str( - desc='save iterative bias field estimates as .n.field.nii.gz', - argstr='--biasprefix %s') + desc="save iterative bias field estimates as .n.field.nii.gz", + argstr="--biasprefix %s", + ) correctedImagesOutputPrefix = traits.Str( - desc='save iterative corrected images as .n.bfc.nii.gz', - argstr='--prefix %s') + desc="save iterative corrected images as .n.bfc.nii.gz", + argstr="--prefix %s", + ) correctWholeVolume = traits.Bool( - desc='apply correction field to entire volume', argstr='--extrapolate') + desc="apply correction field to entire volume", argstr="--extrapolate" + ) minBias = traits.Float( - 0.5, - usedefault=True, - desc='minimum allowed bias value', - argstr='-L %f') + 0.5, usedefault=True, desc="minimum allowed bias value", argstr="-L %f" + ) maxBias = traits.Float( - 1.5, - usedefault=True, - desc='maximum allowed bias value', - argstr='-U %f') + 1.5, usedefault=True, desc="maximum allowed bias value", argstr="-U %f" + ) biasRange = traits.Enum( "low", "medium", "high", - desc= - 'Preset options for bias_model\n low: small bias model [0.95,1.05]\n' - 'medium: medium bias model [0.90,1.10]\n high: high bias model [0.80,1.20]', - argstr='%s') + desc="Preset options for bias_model\n low: small bias model [0.95,1.05]\n" + "medium: medium bias model [0.90,1.10]\n high: high bias model [0.80,1.20]", + argstr="%s", + ) intermediate_file_type = traits.Enum( "analyze", "nifti", "gzippedAnalyze", "gzippedNifti", - desc='Options for the format in which intermediate files are generated', - argstr='%s') - convergenceThreshold = traits.Float( - desc='convergence threshold', argstr='--eps %f') + desc="Options for the format in which intermediate files are generated", + argstr="%s", + ) + convergenceThreshold = traits.Float(desc="convergence threshold", argstr="--eps %f") biasEstimateConvergenceThreshold = traits.Float( - desc='bias estimate convergence threshold (values > 0.1 disable)', - argstr='--beps %f') - verbosityLevel = traits.Int( - desc='verbosity level (0=silent)', argstr='-v %d') - timer = traits.Bool(desc='display timing information', argstr='--timer') + desc="bias estimate convergence threshold (values > 0.1 disable)", + argstr="--beps %f", + ) + verbosityLevel = traits.Int(desc="verbosity level (0=silent)", argstr="-v %d") + timer = traits.Bool(desc="display timing information", argstr="--timer") class BfcOutputSpec(TraitedSpec): - outputMRIVolume = File(desc='path/name of output file') - outputBiasField = File(desc='path/name of bias field output file') - outputMaskedBiasField = File(desc='path/name of masked bias field output') - correctionScheduleFile = File(desc='path/name of schedule file') + outputMRIVolume = File(desc="path/name of output file") + outputBiasField = File(desc="path/name of bias field output file") + outputMaskedBiasField = File(desc="path/name of masked bias field output") + correctionScheduleFile = File(desc="path/name of schedule file") class Bfc(CommandLine): @@ -215,38 +227,37 @@ class Bfc(CommandLine): input_spec = BfcInputSpec output_spec = BfcOutputSpec - _cmd = 'bfc' + _cmd = "bfc" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) - fileToSuffixMap = {'outputMRIVolume': '.bfc.nii.gz'} + fileToSuffixMap = {"outputMRIVolume": ".bfc.nii.gz"} if name in fileToSuffixMap: return getFileName(self.inputs.inputMRIFile, fileToSuffixMap[name]) return None def _format_arg(self, name, spec, value): - if name == 'histogramType': - return spec.argstr % { - "ellipse": "--ellipse", - "block": "--block" - }[value] - if name == 'biasRange': - return spec.argstr % { - "low": "--low", - "medium": "--medium", - "high": "--high" - }[value] - if name == 'intermediate_file_type': - return spec.argstr % { - "analyze": "--analyze", - "nifti": "--nifti", - "gzippedAnalyze": "--analyzegz", - "gzippedNifti": "--niftigz" - }[value] + if name == "histogramType": + return spec.argstr % {"ellipse": "--ellipse", "block": "--block"}[value] + if name == "biasRange": + return ( + spec.argstr + % {"low": "--low", "medium": "--medium", "high": "--high"}[value] + ) + if name == "intermediate_file_type": + return ( + spec.argstr + % { + "analyze": "--analyze", + "nifti": "--nifti", + "gzippedAnalyze": "--analyzegz", + "gzippedNifti": "--niftigz", + }[value] + ) return super(Bfc, self)._format_arg(name, spec, value) @@ -255,25 +266,27 @@ def _list_outputs(self): class PvcInputSpec(CommandLineInputSpec): - inputMRIFile = File(mandatory=True, desc='MRI file', argstr='-i %s') - inputMaskFile = File(desc='brain mask file', argstr='-m %s') + inputMRIFile = File(mandatory=True, desc="MRI file", argstr="-i %s") + inputMaskFile = File(desc="brain mask file", argstr="-m %s") outputLabelFile = File( - desc= - 'output label file. If unspecified, output file name will be auto generated.', - argstr='-o %s', - genfile=True) + desc="output label file. If unspecified, output file name will be auto generated.", + argstr="-o %s", + genfile=True, + ) outputTissueFractionFile = File( - desc='output tissue fraction file', argstr='-f %s', genfile=True) - spatialPrior = traits.Float(desc='spatial prior strength', argstr='-l %f') - verbosity = traits.Int(desc='verbosity level (0 = silent)', argstr='-v %d') + desc="output tissue fraction file", argstr="-f %s", genfile=True + ) + spatialPrior = traits.Float(desc="spatial prior strength", argstr="-l %f") + verbosity = traits.Int(desc="verbosity level (0 = silent)", argstr="-v %d") threeClassFlag = traits.Bool( - desc='use a three-class (CSF=0,GM=1,WM=2) labeling', argstr='-3') - timer = traits.Bool(desc='time processing', argstr='--timer') + desc="use a three-class (CSF=0,GM=1,WM=2) labeling", argstr="-3" + ) + timer = traits.Bool(desc="time processing", argstr="--timer") class PvcOutputSpec(TraitedSpec): - outputLabelFile = File(desc='path/name of label file') - outputTissueFractionFile = File(desc='path/name of tissue fraction file') + outputLabelFile = File(desc="path/name of label file") + outputTissueFractionFile = File(desc="path/name of tissue fraction file") class Pvc(CommandLine): @@ -298,7 +311,7 @@ class Pvc(CommandLine): input_spec = PvcInputSpec output_spec = PvcOutputSpec - _cmd = 'pvc' + _cmd = "pvc" def _gen_filename(self, name): inputs = self.inputs.get() @@ -306,8 +319,8 @@ def _gen_filename(self, name): return os.path.abspath(inputs[name]) fileToSuffixMap = { - 'outputLabelFile': '.pvc.label.nii.gz', - 'outputTissueFractionFile': '.pvc.frac.nii.gz' + "outputLabelFile": ".pvc.label.nii.gz", + "outputTissueFractionFile": ".pvc.frac.nii.gz", } if name in fileToSuffixMap: return getFileName(self.inputs.inputMRIFile, fileToSuffixMap[name]) @@ -319,53 +332,53 @@ def _list_outputs(self): class CerebroInputSpec(CommandLineInputSpec): - inputMRIFile = File( - mandatory=True, desc='input 3D MRI volume', argstr='-i %s') + inputMRIFile = File(mandatory=True, desc="input 3D MRI volume", argstr="-i %s") inputAtlasMRIFile = File( - mandatory=True, desc='atlas MRI volume', argstr='--atlas %s') + mandatory=True, desc="atlas MRI volume", argstr="--atlas %s" + ) inputAtlasLabelFile = File( - mandatory=True, desc='atlas labeling', argstr='--atlaslabels %s') - inputBrainMaskFile = File(desc='brain mask file', argstr='-m %s') + mandatory=True, desc="atlas labeling", argstr="--atlaslabels %s" + ) + inputBrainMaskFile = File(desc="brain mask file", argstr="-m %s") outputCerebrumMaskFile = File( - desc= - 'output cerebrum mask volume. If unspecified, output file name will be auto generated.', - argstr='-o %s', - genfile=True) + desc="output cerebrum mask volume. If unspecified, output file name will be auto generated.", + argstr="-o %s", + genfile=True, + ) outputLabelVolumeFile = File( - desc= - 'output labeled hemisphere/cerebrum volume. If unspecified, output file name will be auto generated.', - argstr='-l %s', - genfile=True) - costFunction = traits.Int(2, usedefault=True, desc='0,1,2', argstr='-c %d') + desc="output labeled hemisphere/cerebrum volume. If unspecified, output file name will be auto generated.", + argstr="-l %s", + genfile=True, + ) + costFunction = traits.Int(2, usedefault=True, desc="0,1,2", argstr="-c %d") useCentroids = traits.Bool( - desc='use centroids of data to initialize position', - argstr='--centroids') + desc="use centroids of data to initialize position", argstr="--centroids" + ) outputAffineTransformFile = File( - desc='save affine transform to file.', argstr='--air %s', genfile=True) + desc="save affine transform to file.", argstr="--air %s", genfile=True + ) outputWarpTransformFile = File( - desc='save warp transform to file.', argstr='--warp %s', genfile=True) - verbosity = traits.Int(desc='verbosity level (0=silent)', argstr='-v %d') - linearConvergence = traits.Float( - desc='linear convergence', argstr='--linconv %f') - warpLabel = traits.Int( - desc='warp order (2,3,4,5,6,7,8)', argstr='--warplevel %d') - warpConvergence = traits.Float( - desc='warp convergence', argstr='--warpconv %f') - keepTempFiles = traits.Bool( - desc="don't remove temporary files", argstr='--keep') + desc="save warp transform to file.", argstr="--warp %s", genfile=True + ) + verbosity = traits.Int(desc="verbosity level (0=silent)", argstr="-v %d") + linearConvergence = traits.Float(desc="linear convergence", argstr="--linconv %f") + warpLabel = traits.Int(desc="warp order (2,3,4,5,6,7,8)", argstr="--warplevel %d") + warpConvergence = traits.Float(desc="warp convergence", argstr="--warpconv %f") + keepTempFiles = traits.Bool(desc="don't remove temporary files", argstr="--keep") tempDirectory = traits.Str( - desc='specify directory to use for temporary files', - argstr='--tempdir %s') + desc="specify directory to use for temporary files", argstr="--tempdir %s" + ) tempDirectoryBase = traits.Str( - desc='create a temporary directory within this directory', - argstr='--tempdirbase %s') + desc="create a temporary directory within this directory", + argstr="--tempdirbase %s", + ) class CerebroOutputSpec(TraitedSpec): - outputCerebrumMaskFile = File(desc='path/name of cerebrum mask file') - outputLabelVolumeFile = File(desc='path/name of label mask file') - outputAffineTransformFile = File(desc='path/name of affine transform file') - outputWarpTransformFile = File(desc='path/name of warp transform file') + outputCerebrumMaskFile = File(desc="path/name of cerebrum mask file") + outputLabelVolumeFile = File(desc="path/name of label mask file") + outputAffineTransformFile = File(desc="path/name of affine transform file") + outputWarpTransformFile = File(desc="path/name of warp transform file") class Cerebro(CommandLine): @@ -393,7 +406,7 @@ class Cerebro(CommandLine): input_spec = CerebroInputSpec output_spec = CerebroOutputSpec - _cmd = 'cerebro' + _cmd = "cerebro" def _gen_filename(self, name): inputs = self.inputs.get() @@ -401,10 +414,10 @@ def _gen_filename(self, name): return os.path.abspath(inputs[name]) fileToSuffixMap = { - 'outputCerebrumMaskFile': '.cerebrum.mask.nii.gz', - 'outputLabelVolumeFile': '.hemi.label.nii.gz', - 'outputWarpTransformFile': '.warp', - 'outputAffineTransformFile': '.air' + "outputCerebrumMaskFile": ".cerebrum.mask.nii.gz", + "outputLabelVolumeFile": ".hemi.label.nii.gz", + "outputWarpTransformFile": ".warp", + "outputAffineTransformFile": ".air", } if name in fileToSuffixMap: return getFileName(self.inputs.inputMRIFile, fileToSuffixMap[name]) @@ -417,36 +430,38 @@ def _list_outputs(self): class CortexInputSpec(CommandLineInputSpec): inputHemisphereLabelFile = File( - mandatory=True, desc='hemisphere / lobe label volume', argstr='-h %s') + mandatory=True, desc="hemisphere / lobe label volume", argstr="-h %s" + ) outputCerebrumMask = File( - desc= - 'output structure mask. If unspecified, output file name will be auto generated.', - argstr='-o %s', - genfile=True) + desc="output structure mask. If unspecified, output file name will be auto generated.", + argstr="-o %s", + genfile=True, + ) inputTissueFractionFile = File( - mandatory=True, - desc='tissue fraction file (32-bit float)', - argstr='-f %s') + mandatory=True, desc="tissue fraction file (32-bit float)", argstr="-f %s" + ) tissueFractionThreshold = traits.Float( 50.0, usedefault=True, - desc='tissue fraction threshold (percentage)', - argstr='-p %f') + desc="tissue fraction threshold (percentage)", + argstr="-p %f", + ) computeWGBoundary = traits.Bool( - True, usedefault=True, desc='compute WM/GM boundary', argstr='-w') - computeGCBoundary = traits.Bool( - desc='compute GM/CSF boundary', argstr='-g') + True, usedefault=True, desc="compute WM/GM boundary", argstr="-w" + ) + computeGCBoundary = traits.Bool(desc="compute GM/CSF boundary", argstr="-g") includeAllSubcorticalAreas = traits.Bool( True, usedefault=True, - desc='include all subcortical areas in WM mask', - argstr='-a') - verbosity = traits.Int(desc='verbosity level', argstr='-v %d') - timer = traits.Bool(desc='timing function', argstr='--timer') + desc="include all subcortical areas in WM mask", + argstr="-a", + ) + verbosity = traits.Int(desc="verbosity level", argstr="-v %d") + timer = traits.Bool(desc="timing function", argstr="--timer") class CortexOutputSpec(TraitedSpec): - outputCerebrumMask = File(desc='path/name of cerebrum mask') + outputCerebrumMask = File(desc="path/name of cerebrum mask") class Cortex(CommandLine): @@ -471,16 +486,17 @@ class Cortex(CommandLine): input_spec = CortexInputSpec output_spec = CortexOutputSpec - _cmd = 'cortex' + _cmd = "cortex" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) - if name == 'outputCerebrumMask': - return getFileName(self.inputs.inputHemisphereLabelFile, - '.init.cortex.mask.nii.gz') + if name == "outputCerebrumMask": + return getFileName( + self.inputs.inputHemisphereLabelFile, ".init.cortex.mask.nii.gz" + ) return None def _list_outputs(self): @@ -489,23 +505,26 @@ def _list_outputs(self): class ScrubmaskInputSpec(CommandLineInputSpec): inputMaskFile = File( - mandatory=True, desc='input structure mask file', argstr='-i %s') + mandatory=True, desc="input structure mask file", argstr="-i %s" + ) outputMaskFile = File( - desc= - 'output structure mask file. If unspecified, output file name will be auto generated.', - argstr='-o %s', - genfile=True) + desc="output structure mask file. If unspecified, output file name will be auto generated.", + argstr="-o %s", + genfile=True, + ) backgroundFillThreshold = traits.Int( - 2, usedefault=True, desc='background fill threshold', argstr='-b %d') + 2, usedefault=True, desc="background fill threshold", argstr="-b %d" + ) foregroundTrimThreshold = traits.Int( - 0, usedefault=True, desc='foreground trim threshold', argstr='-f %d') - numberIterations = traits.Int(desc='number of iterations', argstr='-n %d') - verbosity = traits.Int(desc='verbosity (0=silent)', argstr='-v %d') - timer = traits.Bool(desc='timing function', argstr='--timer') + 0, usedefault=True, desc="foreground trim threshold", argstr="-f %d" + ) + numberIterations = traits.Int(desc="number of iterations", argstr="-n %d") + verbosity = traits.Int(desc="verbosity (0=silent)", argstr="-v %d") + timer = traits.Bool(desc="timing function", argstr="--timer") class ScrubmaskOutputSpec(TraitedSpec): - outputMaskFile = File(desc='path/name of mask file') + outputMaskFile = File(desc="path/name of mask file") class Scrubmask(CommandLine): @@ -526,18 +545,20 @@ class Scrubmask(CommandLine): >>> results = scrubmask.run() #doctest: +SKIP """ + input_spec = ScrubmaskInputSpec output_spec = ScrubmaskOutputSpec - _cmd = 'scrubmask' + _cmd = "scrubmask" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) - if name == 'outputMaskFile': - return getFileName(self.inputs.inputMaskFile, - '.cortex.scrubbed.mask.nii.gz') + if name == "outputMaskFile": + return getFileName( + self.inputs.inputMaskFile, ".cortex.scrubbed.mask.nii.gz" + ) return None @@ -546,25 +567,25 @@ def _list_outputs(self): class TcaInputSpec(CommandLineInputSpec): - inputMaskFile = File( - mandatory=True, desc='input mask volume', argstr='-i %s') + inputMaskFile = File(mandatory=True, desc="input mask volume", argstr="-i %s") outputMaskFile = File( - desc= - 'output mask volume. If unspecified, output file name will be auto generated.', - argstr='-o %s', - genfile=True) + desc="output mask volume. If unspecified, output file name will be auto generated.", + argstr="-o %s", + genfile=True, + ) minCorrectionSize = traits.Int( - 2500, usedefault=True, desc='maximum correction size', argstr='-m %d') - maxCorrectionSize = traits.Int( - desc='minimum correction size', argstr='-n %d') + 2500, usedefault=True, desc="maximum correction size", argstr="-m %d" + ) + maxCorrectionSize = traits.Int(desc="minimum correction size", argstr="-n %d") foregroundDelta = traits.Int( - 20, usedefault=True, desc='foreground delta', argstr='--delta %d') - verbosity = traits.Int(desc='verbosity (0 = quiet)', argstr='-v %d') - timer = traits.Bool(desc='timing function', argstr='--timer') + 20, usedefault=True, desc="foreground delta", argstr="--delta %d" + ) + verbosity = traits.Int(desc="verbosity (0 = quiet)", argstr="-v %d") + timer = traits.Bool(desc="timing function", argstr="--timer") class TcaOutputSpec(TraitedSpec): - outputMaskFile = File(desc='path/name of mask file') + outputMaskFile = File(desc="path/name of mask file") class Tca(CommandLine): @@ -583,18 +604,18 @@ class Tca(CommandLine): >>> results = tca.run() #doctest: +SKIP """ + input_spec = TcaInputSpec output_spec = TcaOutputSpec - _cmd = 'tca' + _cmd = "tca" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) - if name == 'outputMaskFile': - return getFileName(self.inputs.inputMaskFile, - '.cortex.tca.mask.nii.gz') + if name == "outputMaskFile": + return getFileName(self.inputs.inputMaskFile, ".cortex.tca.mask.nii.gz") return None @@ -603,21 +624,20 @@ def _list_outputs(self): class DewispInputSpec(CommandLineInputSpec): - inputMaskFile = File(mandatory=True, desc='input file', argstr='-i %s') + inputMaskFile = File(mandatory=True, desc="input file", argstr="-i %s") outputMaskFile = File( - desc= - 'output file. If unspecified, output file name will be auto generated.', - argstr='-o %s', - genfile=True) - verbosity = traits.Int(desc='verbosity', argstr='-v %d') - sizeThreshold = traits.Int(desc='size threshold', argstr='-t %d') - maximumIterations = traits.Int( - desc='maximum number of iterations', argstr='-n %d') - timer = traits.Bool(desc='time processing', argstr='--timer') + desc="output file. If unspecified, output file name will be auto generated.", + argstr="-o %s", + genfile=True, + ) + verbosity = traits.Int(desc="verbosity", argstr="-v %d") + sizeThreshold = traits.Int(desc="size threshold", argstr="-t %d") + maximumIterations = traits.Int(desc="maximum number of iterations", argstr="-n %d") + timer = traits.Bool(desc="time processing", argstr="--timer") class DewispOutputSpec(TraitedSpec): - outputMaskFile = File(desc='path/name of mask file') + outputMaskFile = File(desc="path/name of mask file") class Dewisp(CommandLine): @@ -645,16 +665,15 @@ class Dewisp(CommandLine): input_spec = DewispInputSpec output_spec = DewispOutputSpec - _cmd = 'dewisp' + _cmd = "dewisp" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) - if name == 'outputMaskFile': - return getFileName(self.inputs.inputMaskFile, - '.cortex.dewisp.mask.nii.gz') + if name == "outputMaskFile": + return getFileName(self.inputs.inputMaskFile, ".cortex.dewisp.mask.nii.gz") return None @@ -663,55 +682,59 @@ def _list_outputs(self): class DfsInputSpec(CommandLineInputSpec): - inputVolumeFile = File( - mandatory=True, desc='input 3D volume', argstr='-i %s') + inputVolumeFile = File(mandatory=True, desc="input 3D volume", argstr="-i %s") outputSurfaceFile = File( - desc= - 'output surface mesh file. If unspecified, output file name will be auto generated.', - argstr='-o %s', - genfile=True) + desc="output surface mesh file. If unspecified, output file name will be auto generated.", + argstr="-o %s", + genfile=True, + ) inputShadingVolume = File( - desc='shade surface model with data from image volume', argstr='-c %s') + desc="shade surface model with data from image volume", argstr="-c %s" + ) smoothingIterations = traits.Int( - 10, - usedefault=True, - desc='number of smoothing iterations', - argstr='-n %d') + 10, usedefault=True, desc="number of smoothing iterations", argstr="-n %d" + ) smoothingConstant = traits.Float( - 0.5, usedefault=True, desc='smoothing constant', argstr='-a %f') + 0.5, usedefault=True, desc="smoothing constant", argstr="-a %f" + ) curvatureWeighting = traits.Float( - 5.0, usedefault=True, desc='curvature weighting', argstr='-w %f') - scalingPercentile = traits.Float(desc='scaling percentile', argstr='-f %f') + 5.0, usedefault=True, desc="curvature weighting", argstr="-w %f" + ) + scalingPercentile = traits.Float(desc="scaling percentile", argstr="-f %f") nonZeroTessellation = traits.Bool( - desc='tessellate non-zero voxels', - argstr='-nz', - xor=('nonZeroTessellation', 'specialTessellation')) + desc="tessellate non-zero voxels", + argstr="-nz", + xor=("nonZeroTessellation", "specialTessellation"), + ) tessellationThreshold = traits.Float( - desc= - 'To be used with specialTessellation. Set this value first, then set specialTessellation value.\nUsage: tessellate voxels greater_than, less_than, or equal_to ', - argstr='%f') + desc="To be used with specialTessellation. Set this value first, then set specialTessellation value.\nUsage: tessellate voxels greater_than, less_than, or equal_to ", + argstr="%f", + ) specialTessellation = traits.Enum( - 'greater_than', - 'less_than', - 'equal_to', - desc= - 'To avoid throwing a UserWarning, set tessellationThreshold first. Then set this attribute.\nUsage: tessellate voxels greater_than, less_than, or equal_to ', - argstr='%s', - xor=('nonZeroTessellation', 'specialTessellation'), - requires=['tessellationThreshold'], - position=-1) + "greater_than", + "less_than", + "equal_to", + desc="To avoid throwing a UserWarning, set tessellationThreshold first. Then set this attribute.\nUsage: tessellate voxels greater_than, less_than, or equal_to ", + argstr="%s", + xor=("nonZeroTessellation", "specialTessellation"), + requires=["tessellationThreshold"], + position=-1, + ) zeroPadFlag = traits.Bool( - desc='zero-pad volume (avoids clipping at edges)', argstr='-z') + desc="zero-pad volume (avoids clipping at edges)", argstr="-z" + ) noNormalsFlag = traits.Bool( - desc='do not compute vertex normals', argstr='--nonormals') + desc="do not compute vertex normals", argstr="--nonormals" + ) postSmoothFlag = traits.Bool( - desc='smooth vertices after coloring', argstr='--postsmooth') - verbosity = traits.Int(desc='verbosity (0 = quiet)', argstr='-v %d') - timer = traits.Bool(desc='timing function', argstr='--timer') + desc="smooth vertices after coloring", argstr="--postsmooth" + ) + verbosity = traits.Int(desc="verbosity (0 = quiet)", argstr="-v %d") + timer = traits.Bool(desc="timing function", argstr="--timer") class DfsOutputSpec(TraitedSpec): - outputSurfaceFile = File(desc='path/name of surface file') + outputSurfaceFile = File(desc="path/name of surface file") class Dfs(CommandLine): @@ -734,18 +757,21 @@ class Dfs(CommandLine): input_spec = DfsInputSpec output_spec = DfsOutputSpec - _cmd = 'dfs' + _cmd = "dfs" def _format_arg(self, name, spec, value): - if name == 'tessellationThreshold': - return '' # blank argstr - if name == 'specialTessellation': + if name == "tessellationThreshold": + return "" # blank argstr + if name == "specialTessellation": threshold = self.inputs.tessellationThreshold - return spec.argstr % { - "greater_than": ''.join(("-gt %f" % threshold)), - "less_than": ''.join(("-lt %f" % threshold)), - "equal_to": ''.join(("-eq %f" % threshold)) - }[value] + return ( + spec.argstr + % { + "greater_than": "".join(("-gt %f" % threshold)), + "less_than": "".join(("-lt %f" % threshold)), + "equal_to": "".join(("-eq %f" % threshold)), + }[value] + ) return super(Dfs, self)._format_arg(name, spec, value) def _gen_filename(self, name): @@ -753,9 +779,8 @@ def _gen_filename(self, name): if isdefined(inputs[name]): return os.path.abspath(inputs[name]) - if name == 'outputSurfaceFile': - return getFileName(self.inputs.inputVolumeFile, - '.inner.cortex.dfs') + if name == "outputSurfaceFile": + return getFileName(self.inputs.inputVolumeFile, ".inner.cortex.dfs") return None @@ -764,60 +789,57 @@ def _list_outputs(self): class PialmeshInputSpec(CommandLineInputSpec): - inputSurfaceFile = File(mandatory=True, desc='input file', argstr='-i %s') + inputSurfaceFile = File(mandatory=True, desc="input file", argstr="-i %s") outputSurfaceFile = File( - desc= - 'output file. If unspecified, output file name will be auto generated.', - argstr='-o %s', - genfile=True) - verbosity = traits.Int(desc='verbosity', argstr='-v %d') + desc="output file. If unspecified, output file name will be auto generated.", + argstr="-o %s", + genfile=True, + ) + verbosity = traits.Int(desc="verbosity", argstr="-v %d") inputTissueFractionFile = File( - mandatory=True, - desc='floating point (32) tissue fraction image', - argstr='-f %s') + mandatory=True, desc="floating point (32) tissue fraction image", argstr="-f %s" + ) numIterations = traits.Int( - 100, usedefault=True, desc='number of iterations', argstr='-n %d') + 100, usedefault=True, desc="number of iterations", argstr="-n %d" + ) searchRadius = traits.Float( - 1, usedefault=True, desc='search radius', argstr='-r %f') - stepSize = traits.Float( - 0.4, usedefault=True, desc='step size', argstr='-s %f') + 1, usedefault=True, desc="search radius", argstr="-r %f" + ) + stepSize = traits.Float(0.4, usedefault=True, desc="step size", argstr="-s %f") inputMaskFile = File( - mandatory=True, - desc='restrict growth to mask file region', - argstr='-m %s') + mandatory=True, desc="restrict growth to mask file region", argstr="-m %s" + ) maxThickness = traits.Float( - 20, - usedefault=True, - desc='maximum allowed tissue thickness', - argstr='--max %f') + 20, usedefault=True, desc="maximum allowed tissue thickness", argstr="--max %f" + ) tissueThreshold = traits.Float( - 1.05, usedefault=True, desc='tissue threshold', argstr='-t %f') + 1.05, usedefault=True, desc="tissue threshold", argstr="-t %f" + ) # output interval is not an output -- it specifies how frequently the # output surfaces are generated outputInterval = traits.Int( - 10, usedefault=True, desc='output interval', argstr='--interval %d') + 10, usedefault=True, desc="output interval", argstr="--interval %d" + ) exportPrefix = traits.Str( - desc='prefix for exporting surfaces if interval is set', - argstr='--prefix %s') + desc="prefix for exporting surfaces if interval is set", argstr="--prefix %s" + ) laplacianSmoothing = traits.Float( - 0.025, - usedefault=True, - desc='apply Laplacian smoothing', - argstr='--smooth %f') - timer = traits.Bool(desc='show timing', argstr='--timer') + 0.025, usedefault=True, desc="apply Laplacian smoothing", argstr="--smooth %f" + ) + timer = traits.Bool(desc="show timing", argstr="--timer") recomputeNormals = traits.Bool( - desc='recompute normals at each iteration', argstr='--norm') + desc="recompute normals at each iteration", argstr="--norm" + ) normalSmoother = traits.Float( - 0.2, - usedefault=True, - desc='strength of normal smoother.', - argstr='--nc %f') + 0.2, usedefault=True, desc="strength of normal smoother.", argstr="--nc %f" + ) tangentSmoother = traits.Float( - desc='strength of tangential smoother.', argstr='--tc %f') + desc="strength of tangential smoother.", argstr="--tc %f" + ) class PialmeshOutputSpec(TraitedSpec): - outputSurfaceFile = File(desc='path/name of surface file') + outputSurfaceFile = File(desc="path/name of surface file") class Pialmesh(CommandLine): @@ -842,16 +864,15 @@ class Pialmesh(CommandLine): input_spec = PialmeshInputSpec output_spec = PialmeshOutputSpec - _cmd = 'pialmesh' + _cmd = "pialmesh" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) - if name == 'outputSurfaceFile': - return getFileName(self.inputs.inputSurfaceFile, - '.pial.cortex.dfs') + if name == "outputSurfaceFile": + return getFileName(self.inputs.inputSurfaceFile, ".pial.cortex.dfs") return None @@ -860,42 +881,43 @@ def _list_outputs(self): class HemisplitInputSpec(CommandLineInputSpec): - inputSurfaceFile = File( - mandatory=True, desc='input surface', argstr='-i %s') + inputSurfaceFile = File(mandatory=True, desc="input surface", argstr="-i %s") inputHemisphereLabelFile = File( - mandatory=True, desc='input hemisphere label volume', argstr='-l %s') + mandatory=True, desc="input hemisphere label volume", argstr="-l %s" + ) outputLeftHemisphere = File( - desc= - 'output surface file, left hemisphere. If unspecified, output file name will be auto generated.', - argstr='--left %s', - genfile=True) + desc="output surface file, left hemisphere. If unspecified, output file name will be auto generated.", + argstr="--left %s", + genfile=True, + ) outputRightHemisphere = File( - desc= - 'output surface file, right hemisphere. If unspecified, output file name will be auto generated.', - argstr='--right %s', - genfile=True) + desc="output surface file, right hemisphere. If unspecified, output file name will be auto generated.", + argstr="--right %s", + genfile=True, + ) pialSurfaceFile = File( - desc='pial surface file -- must have same geometry as input surface', - argstr='-p %s') + desc="pial surface file -- must have same geometry as input surface", + argstr="-p %s", + ) outputLeftPialHemisphere = File( - desc= - 'output pial surface file, left hemisphere. If unspecified, output file name will be auto generated.', - argstr='-pl %s', - genfile=True) + desc="output pial surface file, left hemisphere. If unspecified, output file name will be auto generated.", + argstr="-pl %s", + genfile=True, + ) outputRightPialHemisphere = File( - desc= - 'output pial surface file, right hemisphere. If unspecified, output file name will be auto generated.', - argstr='-pr %s', - genfile=True) - verbosity = traits.Int(desc='verbosity (0 = silent)', argstr='-v %d') - timer = traits.Bool(desc='timing function', argstr='--timer') + desc="output pial surface file, right hemisphere. If unspecified, output file name will be auto generated.", + argstr="-pr %s", + genfile=True, + ) + verbosity = traits.Int(desc="verbosity (0 = silent)", argstr="-v %d") + timer = traits.Bool(desc="timing function", argstr="--timer") class HemisplitOutputSpec(TraitedSpec): - outputLeftHemisphere = File(desc='path/name of left hemisphere') - outputRightHemisphere = File(desc='path/name of right hemisphere') - outputLeftPialHemisphere = File(desc='path/name of left pial hemisphere') - outputRightPialHemisphere = File(desc='path/name of right pial hemisphere') + outputLeftHemisphere = File(desc="path/name of left hemisphere") + outputRightHemisphere = File(desc="path/name of right hemisphere") + outputLeftPialHemisphere = File(desc="path/name of left pial hemisphere") + outputRightPialHemisphere = File(desc="path/name of right pial hemisphere") class Hemisplit(CommandLine): @@ -920,7 +942,7 @@ class Hemisplit(CommandLine): input_spec = HemisplitInputSpec output_spec = HemisplitOutputSpec - _cmd = 'hemisplit' + _cmd = "hemisplit" def _gen_filename(self, name): inputs = self.inputs.get() @@ -928,14 +950,13 @@ def _gen_filename(self, name): return os.path.abspath(inputs[name]) fileToSuffixMap = { - 'outputLeftHemisphere': '.left.inner.cortex.dfs', - 'outputLeftPialHemisphere': '.left.pial.cortex.dfs', - 'outputRightHemisphere': '.right.inner.cortex.dfs', - 'outputRightPialHemisphere': '.right.pial.cortex.dfs' + "outputLeftHemisphere": ".left.inner.cortex.dfs", + "outputLeftPialHemisphere": ".left.pial.cortex.dfs", + "outputRightHemisphere": ".right.inner.cortex.dfs", + "outputRightPialHemisphere": ".right.pial.cortex.dfs", } if name in fileToSuffixMap: - return getFileName(self.inputs.inputSurfaceFile, - fileToSuffixMap[name]) + return getFileName(self.inputs.inputSurfaceFile, fileToSuffixMap[name]) return None @@ -944,42 +965,48 @@ def _list_outputs(self): class SkullfinderInputSpec(CommandLineInputSpec): - inputMRIFile = File(mandatory=True, desc='input file', argstr='-i %s') + inputMRIFile = File(mandatory=True, desc="input file", argstr="-i %s") inputMaskFile = File( mandatory=True, - desc='A brain mask file, 8-bit image (0=non-brain, 255=brain)', - argstr='-m %s') + desc="A brain mask file, 8-bit image (0=non-brain, 255=brain)", + argstr="-m %s", + ) outputLabelFile = File( - desc= - 'output multi-colored label volume segmenting brain, scalp, inner skull & outer skull ' - 'If unspecified, output file name will be auto generated.', - argstr='-o %s', - genfile=True) - verbosity = traits.Int(desc='verbosity', argstr='-v %d') - lowerThreshold = traits.Int( - desc='Lower threshold for segmentation', argstr='-l %d') - upperThreshold = traits.Int( - desc='Upper threshold for segmentation', argstr='-u %d') + desc="output multi-colored label volume segmenting brain, scalp, inner skull & outer skull " + "If unspecified, output file name will be auto generated.", + argstr="-o %s", + genfile=True, + ) + verbosity = traits.Int(desc="verbosity", argstr="-v %d") + lowerThreshold = traits.Int(desc="Lower threshold for segmentation", argstr="-l %d") + upperThreshold = traits.Int(desc="Upper threshold for segmentation", argstr="-u %d") surfaceFilePrefix = traits.Str( - desc='if specified, generate surface files for brain, skull, and scalp', - argstr='-s %s') + desc="if specified, generate surface files for brain, skull, and scalp", + argstr="-s %s", + ) bgLabelValue = traits.Int( - desc='background label value (0-255)', argstr='--bglabel %d') + desc="background label value (0-255)", argstr="--bglabel %d" + ) scalpLabelValue = traits.Int( - desc='scalp label value (0-255)', argstr='--scalplabel %d') + desc="scalp label value (0-255)", argstr="--scalplabel %d" + ) skullLabelValue = traits.Int( - desc='skull label value (0-255)', argstr='--skulllabel %d') + desc="skull label value (0-255)", argstr="--skulllabel %d" + ) spaceLabelValue = traits.Int( - desc='space label value (0-255)', argstr='--spacelabel %d') + desc="space label value (0-255)", argstr="--spacelabel %d" + ) brainLabelValue = traits.Int( - desc='brain label value (0-255)', argstr='--brainlabel %d') + desc="brain label value (0-255)", argstr="--brainlabel %d" + ) performFinalOpening = traits.Bool( - desc='perform a final opening operation on the scalp mask', - argstr='--finalOpening') + desc="perform a final opening operation on the scalp mask", + argstr="--finalOpening", + ) class SkullfinderOutputSpec(TraitedSpec): - outputLabelFile = File(desc='path/name of label file') + outputLabelFile = File(desc="path/name of label file") class Skullfinder(CommandLine): @@ -997,18 +1024,18 @@ class Skullfinder(CommandLine): >>> results = skullfinder.run() #doctest: +SKIP """ + input_spec = SkullfinderInputSpec output_spec = SkullfinderOutputSpec - _cmd = 'skullfinder' + _cmd = "skullfinder" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) - if name == 'outputLabelFile': - return getFileName(self.inputs.inputMRIFile, - '.skullfinder.label.nii.gz') + if name == "outputLabelFile": + return getFileName(self.inputs.inputMRIFile, ".skullfinder.label.nii.gz") return None @@ -1018,116 +1045,117 @@ def _list_outputs(self): class SVRegInputSpec(CommandLineInputSpec): subjectFilePrefix = traits.Str( - argstr='\'%s\'', + argstr="'%s'", mandatory=True, position=0, - desc= - 'Absolute path and filename prefix of the subjects output from BrainSuite ' - 'Cortical Surface Extraction Sequence') + desc="Absolute path and filename prefix of the subjects output from BrainSuite " + "Cortical Surface Extraction Sequence", + ) dataSinkDelay = traits.List( traits.Str, - argstr='%s', - desc= - 'Connect datasink out_file to dataSinkDelay to delay execution of SVReg ' - 'until dataSink has finished sinking CSE outputs.' - 'For use with parallel processing workflows including Brainsuites Cortical ' - 'Surface Extraction sequence (SVReg requires certain files from Brainsuite ' - 'CSE, which must all be in the pathway specified by subjectFilePrefix. see ' - 'http://brainsuite.org/processing/svreg/usage/ for list of required inputs ' + argstr="%s", + desc="Connect datasink out_file to dataSinkDelay to delay execution of SVReg " + "until dataSink has finished sinking CSE outputs." + "For use with parallel processing workflows including Brainsuites Cortical " + "Surface Extraction sequence (SVReg requires certain files from Brainsuite " + "CSE, which must all be in the pathway specified by subjectFilePrefix. see " + "http://brainsuite.org/processing/svreg/usage/ for list of required inputs ", ) atlasFilePrefix = traits.Str( position=1, - argstr='\'%s\'', - desc= - 'Optional: Absolute Path and filename prefix of atlas files and labels to which ' - 'the subject will be registered. If unspecified, SVReg' - 'will use its own included atlas files') + argstr="'%s'", + desc="Optional: Absolute Path and filename prefix of atlas files and labels to which " + "the subject will be registered. If unspecified, SVReg" + "will use its own included atlas files", + ) iterations = traits.Int( - argstr='\'-H %d\'', - desc='Assigns a number of iterations in the intensity registration step.' - 'if unspecified, performs 100 iterations') + argstr="'-H %d'", + desc="Assigns a number of iterations in the intensity registration step." + "if unspecified, performs 100 iterations", + ) refineOutputs = traits.Bool( - argstr='\'-r\'', - desc='Refine outputs at the expense of more processing time.') + argstr="'-r'", desc="Refine outputs at the expense of more processing time." + ) skipToVolumeReg = traits.Bool( - argstr='\'-s\'', - desc= - 'If surface registration was already performed at an earlier time and the ' - 'user would not like to redo this step, then this flag may be used to skip ' - 'ahead to the volumetric registration. Necessary input files will need to ' - 'be present in the input directory called by the command.') + argstr="'-s'", + desc="If surface registration was already performed at an earlier time and the " + "user would not like to redo this step, then this flag may be used to skip " + "ahead to the volumetric registration. Necessary input files will need to " + "be present in the input directory called by the command.", + ) skipToIntensityReg = traits.Bool( - argstr='\'-p\'', - desc= - 'If the p-harmonic volumetric registration was already performed at an ' - 'earlier time and the user would not like to redo this step, then this ' - 'flag may be used to skip ahead to the intensity registration and ' - 'label transfer step.') + argstr="'-p'", + desc="If the p-harmonic volumetric registration was already performed at an " + "earlier time and the user would not like to redo this step, then this " + "flag may be used to skip ahead to the intensity registration and " + "label transfer step.", + ) useManualMaskFile = traits.Bool( - argstr='\'-cbm\'', - desc= - 'Can call a manually edited cerebrum mask to limit boundaries. Will ' - 'use file: subbasename.cerebrum.mask.nii.gz Make sure to correctly ' - 'replace your manually edited mask file in your input folder with the ' - 'correct subbasename.') + argstr="'-cbm'", + desc="Can call a manually edited cerebrum mask to limit boundaries. Will " + "use file: subbasename.cerebrum.mask.nii.gz Make sure to correctly " + "replace your manually edited mask file in your input folder with the " + "correct subbasename.", + ) curveMatchingInstructions = traits.Str( - argstr='\'-cur %s\'', - desc= - 'Used to take control of the curve matching process between the atlas ' - 'and subject. One can specify the name of the .dfc file and ' - 'the sulcal numbers <#sul> to be used as constraints. ' - 'example: curveMatchingInstructions = "subbasename.right.dfc 1 2 20"') + argstr="'-cur %s'", + desc="Used to take control of the curve matching process between the atlas " + "and subject. One can specify the name of the .dfc file and " + "the sulcal numbers <#sul> to be used as constraints. " + 'example: curveMatchingInstructions = "subbasename.right.dfc 1 2 20"', + ) useCerebrumMask = traits.Bool( - argstr='\'-C\'', - desc= - 'The cerebrum mask will be used for ' - 'masking the final labels instead of the default pial surface mask. ' - 'Every voxel will be labeled within the cerebrum mask regardless of ' - 'the boundaries of the pial surface.') + argstr="'-C'", + desc="The cerebrum mask will be used for " + "masking the final labels instead of the default pial surface mask. " + "Every voxel will be labeled within the cerebrum mask regardless of " + "the boundaries of the pial surface.", + ) pialSurfaceMaskDilation = traits.Int( - argstr='\'-D %d\'', - desc= - 'Cortical volume labels found in file output subbasename.svreg.label.nii.gz ' - 'find its boundaries by using the pial surface then dilating by 1 voxel. ' - 'Use this flag in order to control the number of pial surface mask dilation. ' - '(ie. -D 0 will assign no voxel dilation)') + argstr="'-D %d'", + desc="Cortical volume labels found in file output subbasename.svreg.label.nii.gz " + "find its boundaries by using the pial surface then dilating by 1 voxel. " + "Use this flag in order to control the number of pial surface mask dilation. " + "(ie. -D 0 will assign no voxel dilation)", + ) keepIntermediates = traits.Bool( - argstr='\'-k\'', - desc='Keep the intermediate files after the svreg sequence is complete.' + argstr="'-k'", + desc="Keep the intermediate files after the svreg sequence is complete.", ) - _XOR_verbosity = ('verbosity0', 'verbosity1', 'verbosity2') + _XOR_verbosity = ("verbosity0", "verbosity1", "verbosity2") verbosity0 = traits.Bool( - argstr='\'-v0\'', - xor=_XOR_verbosity, - desc='no messages will be reported') + argstr="'-v0'", xor=_XOR_verbosity, desc="no messages will be reported" + ) verbosity1 = traits.Bool( - argstr='\'-v1\'', + argstr="'-v1'", xor=_XOR_verbosity, - desc= - 'messages will be reported but not the iteration-wise detailed messages' + desc="messages will be reported but not the iteration-wise detailed messages", ) verbosity2 = traits.Bool( - argstr='\'v2\'', + argstr="'v2'", xor=_XOR_verbosity, - desc='all the messages, including per-iteration, will be displayed') + desc="all the messages, including per-iteration, will be displayed", + ) shortMessages = traits.Bool( - argstr='\'-gui\'', desc='Short messages instead of detailed messages') + argstr="'-gui'", desc="Short messages instead of detailed messages" + ) displayModuleName = traits.Bool( - argstr='\'-m\'', desc='Module name will be displayed in the messages') + argstr="'-m'", desc="Module name will be displayed in the messages" + ) displayTimestamps = traits.Bool( - argstr='\'-t\'', desc='Timestamps will be displayed in the messages') + argstr="'-t'", desc="Timestamps will be displayed in the messages" + ) skipVolumetricProcessing = traits.Bool( - argstr='\'-S\'', - desc= - 'Only surface registration and labeling will be performed. Volumetric ' - 'processing will be skipped.') + argstr="'-S'", + desc="Only surface registration and labeling will be performed. Volumetric " + "processing will be skipped.", + ) useMultiThreading = traits.Bool( - argstr='\'-P\'', - desc= - 'If multiple CPUs are present on the system, the code will try to use ' - 'multithreading to make the execution fast.') - useSingleThreading = traits.Bool( - argstr='\'-U\'', desc='Use single threaded mode.') + argstr="'-P'", + desc="If multiple CPUs are present on the system, the code will try to use " + "multithreading to make the execution fast.", + ) + useSingleThreading = traits.Bool(argstr="'-U'", desc="Use single threaded mode.") class SVReg(CommandLine): @@ -1157,64 +1185,66 @@ class SVReg(CommandLine): """ input_spec = SVRegInputSpec - _cmd = 'svreg.sh' + _cmd = "svreg.sh" def _format_arg(self, name, spec, value): - if name == 'subjectFilePrefix' or name == 'atlasFilePrefix' or name == 'curveMatchingInstructions': + if ( + name == "subjectFilePrefix" + or name == "atlasFilePrefix" + or name == "curveMatchingInstructions" + ): return spec.argstr % os.path.expanduser(value) - if name == 'dataSinkDelay': - return spec.argstr % '' + if name == "dataSinkDelay": + return spec.argstr % "" return super(SVReg, self)._format_arg(name, spec, value) class BDPInputSpec(CommandLineInputSpec): bfcFile = File( - argstr='%s', + argstr="%s", mandatory=True, position=0, - xor=['noStructuralRegistration'], - desc= - 'Specify absolute path to file produced by bfc. By default, bfc produces the file in ' - 'the format: prefix.bfc.nii.gz') + xor=["noStructuralRegistration"], + desc="Specify absolute path to file produced by bfc. By default, bfc produces the file in " + "the format: prefix.bfc.nii.gz", + ) noStructuralRegistration = traits.Bool( - argstr='--no-structural-registration', + argstr="--no-structural-registration", mandatory=True, position=0, - xor=['bfcFile'], - desc= - 'Allows BDP to work without any structural input. This can useful when ' - 'one is only interested in diffusion modelling part of BDP. With this ' - 'flag only fieldmap-based distortion correction is supported. ' - 'outPrefix can be used to specify fileprefix of the output ' - 'filenames. Change dwiMask to define region of interest ' - 'for diffusion modelling.') + xor=["bfcFile"], + desc="Allows BDP to work without any structural input. This can useful when " + "one is only interested in diffusion modelling part of BDP. With this " + "flag only fieldmap-based distortion correction is supported. " + "outPrefix can be used to specify fileprefix of the output " + "filenames. Change dwiMask to define region of interest " + "for diffusion modelling.", + ) inputDiffusionData = File( - argstr='--nii %s', + argstr="--nii %s", mandatory=True, position=-2, - desc= - 'Specifies the absolute path and filename of the input diffusion data in 4D NIfTI-1 ' - 'format. The flag must be followed by the filename. Only NIfTI-1 files ' - 'with extension .nii or .nii.gz are supported. Furthermore, either ' - 'bMatrixFile, or a combination of both bValueFile and diffusionGradientFile ' - 'must be used to provide the necessary b-matrices/b-values and gradient vectors. ' + desc="Specifies the absolute path and filename of the input diffusion data in 4D NIfTI-1 " + "format. The flag must be followed by the filename. Only NIfTI-1 files " + "with extension .nii or .nii.gz are supported. Furthermore, either " + "bMatrixFile, or a combination of both bValueFile and diffusionGradientFile " + "must be used to provide the necessary b-matrices/b-values and gradient vectors. ", ) bMatrixFile = File( - argstr='--bmat %s', + argstr="--bmat %s", mandatory=True, - xor=['BVecBValPair'], + xor=["BVecBValPair"], position=-1, - desc= - 'Specifies the absolute path and filename of the file containing b-matrices for ' - 'diffusion-weighted scans. The flag must be followed by the filename. ' - 'This file must be a plain text file containing 3x3 matrices for each ' - 'diffusion encoding direction. It should contain zero matrices ' + desc="Specifies the absolute path and filename of the file containing b-matrices for " + "diffusion-weighted scans. The flag must be followed by the filename. " + "This file must be a plain text file containing 3x3 matrices for each " + "diffusion encoding direction. It should contain zero matrices " 'corresponding to b=0 images. This file usually has ".bmat" as its ' - 'extension, and can be used to provide BDP with the more-accurate ' - 'b-matrices as saved by some proprietary scanners. The b-matrices ' - 'specified by the file must be in the voxel coordinates of the input ' - 'diffusion weighted image (NIfTI file). In case b-matrices are not known/calculated, ' - 'bvec and .bval files can be used instead (see diffusionGradientFile and bValueFile). ' + "extension, and can be used to provide BDP with the more-accurate " + "b-matrices as saved by some proprietary scanners. The b-matrices " + "specified by the file must be in the voxel coordinates of the input " + "diffusion weighted image (NIfTI file). In case b-matrices are not known/calculated, " + "bvec and .bval files can be used instead (see diffusionGradientFile and bValueFile). ", ) BVecBValPair = traits.List( traits.Str, @@ -1222,452 +1252,465 @@ class BDPInputSpec(CommandLineInputSpec): maxlen=2, mandatory=True, position=-1, - xor=['bMatrixFile'], - argstr='--bvec %s --bval %s', - desc= - 'Must input a list containing first the BVector file, then the BValue file (both must be absolute paths)\n' - 'Example: bdp.inputs.BVecBValPair = [\'/directory/subdir/prefix.dwi.bvec\', \'/directory/subdir/prefix.dwi.bval\'] ' - 'The first item in the list specifies the filename of the file containing b-values for the ' - 'diffusion scan. The b-value file must be a plain-text file and usually has an ' - 'extension of .bval\n' - 'The second item in the list specifies the filename of the file containing the diffusion gradient ' - 'directions (specified in the voxel coordinates of the input ' - 'diffusion-weighted image)The b-vectors file must be a plain text file and ' - 'usually has an extension of .bvec ') + xor=["bMatrixFile"], + argstr="--bvec %s --bval %s", + desc="Must input a list containing first the BVector file, then the BValue file (both must be absolute paths)\n" + "Example: bdp.inputs.BVecBValPair = ['/directory/subdir/prefix.dwi.bvec', '/directory/subdir/prefix.dwi.bval'] " + "The first item in the list specifies the filename of the file containing b-values for the " + "diffusion scan. The b-value file must be a plain-text file and usually has an " + "extension of .bval\n" + "The second item in the list specifies the filename of the file containing the diffusion gradient " + "directions (specified in the voxel coordinates of the input " + "diffusion-weighted image)The b-vectors file must be a plain text file and " + "usually has an extension of .bvec ", + ) dataSinkDelay = traits.List( traits.Str, - argstr='%s', - desc= - 'For use in parallel processing workflows including Brainsuite Cortical ' - 'Surface Extraction sequence. Connect datasink out_file to dataSinkDelay ' - 'to delay execution of BDP until dataSink has finished sinking outputs. ' - 'In particular, BDP may be run after BFC has finished. For more information ' - 'see http://brainsuite.org/processing/diffusion/pipeline/') + argstr="%s", + desc="For use in parallel processing workflows including Brainsuite Cortical " + "Surface Extraction sequence. Connect datasink out_file to dataSinkDelay " + "to delay execution of BDP until dataSink has finished sinking outputs. " + "In particular, BDP may be run after BFC has finished. For more information " + "see http://brainsuite.org/processing/diffusion/pipeline/", + ) phaseEncodingDirection = traits.Enum( - 'x', - 'x-', - 'y', - 'y-', - 'z', - 'z-', - argstr='--dir=%s', - desc= - 'Specifies the phase-encoding direction of the EPI (diffusion) images. ' - 'It is same as the dominant direction of distortion in the images. This ' - 'information is used to constrain the distortion correction along the ' - 'specified direction. Directions are represented by any one of x, x-, y, ' + "x", + "x-", + "y", + "y-", + "z", + "z-", + argstr="--dir=%s", + desc="Specifies the phase-encoding direction of the EPI (diffusion) images. " + "It is same as the dominant direction of distortion in the images. This " + "information is used to constrain the distortion correction along the " + "specified direction. Directions are represented by any one of x, x-, y, " 'y-, z or z-. "x" direction increases towards the right side of the ' 'subject, while "x-" increases towards the left side of the subject. ' 'Similarly, "y" and "y-" are along the anterior-posterior direction of ' 'the subject, and "z" & "z-" are along the inferior-superior direction. ' 'When this flag is not used, BDP uses "y" as the default phase-encoding ' - 'direction. ') + "direction. ", + ) echoSpacing = traits.Float( - argstr='--echo-spacing=%f', - desc= - 'Sets the echo spacing to t seconds, which is used for fieldmap-based ' - 'distortion correction. This flag is required when using fieldmapCorrection' + argstr="--echo-spacing=%f", + desc="Sets the echo spacing to t seconds, which is used for fieldmap-based " + "distortion correction. This flag is required when using fieldmapCorrection", ) bValRatioThreshold = traits.Float( - argstr='--bval-ratio-threshold %f', - desc= - 'Sets a threshold which is used to determine b=0 images. When there are ' - 'no diffusion weighted image with b-value of zero, then BDP tries to use ' - 'diffusion weighted images with a low b-value in place of b=0 image. The ' - 'diffusion images with minimum b-value is used as b=0 image only if the ' - 'ratio of the maximum and minimum b-value is more than the specified ' - 'threshold. A lower value of threshold will allow diffusion images with ' - 'higher b-value to be used as b=0 image. The default value of this ' - 'threshold is set to 45, if this trait is not set. ') + argstr="--bval-ratio-threshold %f", + desc="Sets a threshold which is used to determine b=0 images. When there are " + "no diffusion weighted image with b-value of zero, then BDP tries to use " + "diffusion weighted images with a low b-value in place of b=0 image. The " + "diffusion images with minimum b-value is used as b=0 image only if the " + "ratio of the maximum and minimum b-value is more than the specified " + "threshold. A lower value of threshold will allow diffusion images with " + "higher b-value to be used as b=0 image. The default value of this " + "threshold is set to 45, if this trait is not set. ", + ) estimateTensors = traits.Bool( - argstr='--tensors', - desc= - 'Estimates diffusion tensors using a weighted log-linear estimation and ' - 'saves derived diffusion tensor parameters (FA, MD, axial, radial, L2, ' - 'L3). This is the default behavior if no diffusion modeling flags are ' - 'specified. The estimated diffusion tensors can be visualized by loading ' - 'the saved *.eig.nii.gz file in BrainSuite. BDP reports diffusivity (MD, ' - 'axial, radial, L2 and L3) in a unit which is reciprocal inverse of the ' - 'unit of input b-value. ') + argstr="--tensors", + desc="Estimates diffusion tensors using a weighted log-linear estimation and " + "saves derived diffusion tensor parameters (FA, MD, axial, radial, L2, " + "L3). This is the default behavior if no diffusion modeling flags are " + "specified. The estimated diffusion tensors can be visualized by loading " + "the saved *.eig.nii.gz file in BrainSuite. BDP reports diffusivity (MD, " + "axial, radial, L2 and L3) in a unit which is reciprocal inverse of the " + "unit of input b-value. ", + ) estimateODF_FRACT = traits.Bool( - argstr='--FRACT', - desc= - 'Estimates ODFs using the Funk-Radon and Cosine Transformation (FRACT). ' + argstr="--FRACT", + desc="Estimates ODFs using the Funk-Radon and Cosine Transformation (FRACT). " 'The outputs are saved in a separate directory with name "FRACT" and the ' - 'ODFs can be visualized by loading the saved ".odf" file in BrainSuite. ' + 'ODFs can be visualized by loading the saved ".odf" file in BrainSuite. ', ) estimateODF_FRT = traits.Bool( - argstr='--FRT', - desc= - 'Estimates ODFs using Funk-Radon Transformation (FRT). The coefficient ' + argstr="--FRT", + desc="Estimates ODFs using Funk-Radon Transformation (FRT). The coefficient " 'maps for ODFs are saved in a separate directory with name "FRT" and the ' 'ODFs can be visualized by loading the saved ".odf" file in BrainSuite. ' - 'The derived generalized-FA (GFA) maps are also saved in the output ' - 'directory. ') + "The derived generalized-FA (GFA) maps are also saved in the output " + "directory. ", + ) estimateODF_3DShore = traits.Float( - argstr='--3dshore --diffusion_time_ms %f', - desc='Estimates ODFs using 3Dshore. Pass in diffusion time, in ms') + argstr="--3dshore --diffusion_time_ms %f", + desc="Estimates ODFs using 3Dshore. Pass in diffusion time, in ms", + ) odfLambta = traits.Bool( - argstr='--odf-lambda ', - desc= - 'Sets the regularization parameter, lambda, of the Laplace-Beltrami ' - 'operator while estimating ODFs. The default value is set to 0.006 . This ' - 'can be used to set the appropriate regularization for the input ' - 'diffusion data. ') + argstr="--odf-lambda ", + desc="Sets the regularization parameter, lambda, of the Laplace-Beltrami " + "operator while estimating ODFs. The default value is set to 0.006 . This " + "can be used to set the appropriate regularization for the input " + "diffusion data. ", + ) t1Mask = File( - argstr='--t1-mask %s', - desc= - 'Specifies the filename of the brain-mask file for input T1-weighted ' - 'image. This mask can be same as the brain mask generated during ' - 'BrainSuite extraction sequence. For best results, the mask should not ' - 'include any extra-meningial tissues from T1-weighted image. The mask ' - 'must be in the same coordinates as input T1-weighted image (i.e. should ' - 'overlay correctly with input .bfc.nii.gz file in ' - 'BrainSuite). This mask is used for co-registration and defining brain ' - 'boundary for statistics computation. The mask can be generated and/or ' - 'edited in BrainSuite. In case outputDiffusionCoordinates is also ' - 'used, this mask is first transformed to diffusion coordinate and the ' - 'transformed mask is used for defining brain boundary in diffusion ' - 'coordinates. When t1Mask is not set, BDP will try to use ' - 'fileprefix>.mask.nii.gz as brain-mask. If .mask.nii.gz is ' - 'not found, then BDP will use the input .bfc.nii.gz itself as ' - 'mask (i.e. all non-zero voxels in .bfc.nii.gz is assumed to ' - 'constitute brain mask). ') + argstr="--t1-mask %s", + desc="Specifies the filename of the brain-mask file for input T1-weighted " + "image. This mask can be same as the brain mask generated during " + "BrainSuite extraction sequence. For best results, the mask should not " + "include any extra-meningial tissues from T1-weighted image. The mask " + "must be in the same coordinates as input T1-weighted image (i.e. should " + "overlay correctly with input .bfc.nii.gz file in " + "BrainSuite). This mask is used for co-registration and defining brain " + "boundary for statistics computation. The mask can be generated and/or " + "edited in BrainSuite. In case outputDiffusionCoordinates is also " + "used, this mask is first transformed to diffusion coordinate and the " + "transformed mask is used for defining brain boundary in diffusion " + "coordinates. When t1Mask is not set, BDP will try to use " + "fileprefix>.mask.nii.gz as brain-mask. If .mask.nii.gz is " + "not found, then BDP will use the input .bfc.nii.gz itself as " + "mask (i.e. all non-zero voxels in .bfc.nii.gz is assumed to " + "constitute brain mask). ", + ) dwiMask = File( - argstr='--dwi-mask %s', - desc= - 'Specifies the filename of the brain-mask file for diffusion data. This ' - 'mask is used only for co-registration purposes and can affect overall ' - 'quality of co-registration (see t1Mask for definition of brain mask ' - 'for statistics computation). The mask must be a 3D volume and should be ' - 'in the same coordinates as input Diffusion file/data (i.e. should ' - 'overlay correctly with input diffusion data in BrainSuite). For best ' - 'results, the mask should include only brain voxels (CSF voxels around ' - 'brain is also acceptable). When this flag is not used, BDP will generate ' - 'a pseudo mask using first b=0 image volume and would save it as ' - 'fileprefix>.dwi.RSA.mask.nii.gz. In case co-registration is not ' - 'accurate with automatically generated pseudo mask, BDP should be re-run ' - 'with a refined diffusion mask. The mask can be generated and/or edited ' - 'in BrainSuite. ') + argstr="--dwi-mask %s", + desc="Specifies the filename of the brain-mask file for diffusion data. This " + "mask is used only for co-registration purposes and can affect overall " + "quality of co-registration (see t1Mask for definition of brain mask " + "for statistics computation). The mask must be a 3D volume and should be " + "in the same coordinates as input Diffusion file/data (i.e. should " + "overlay correctly with input diffusion data in BrainSuite). For best " + "results, the mask should include only brain voxels (CSF voxels around " + "brain is also acceptable). When this flag is not used, BDP will generate " + "a pseudo mask using first b=0 image volume and would save it as " + "fileprefix>.dwi.RSA.mask.nii.gz. In case co-registration is not " + "accurate with automatically generated pseudo mask, BDP should be re-run " + "with a refined diffusion mask. The mask can be generated and/or edited " + "in BrainSuite. ", + ) rigidRegMeasure = traits.Enum( - 'MI', - 'INVERSION', - 'BDP', - argstr='--rigid-reg-measure %s', - desc='Defines the similarity measure to be used for rigid registration. ' + "MI", + "INVERSION", + "BDP", + argstr="--rigid-reg-measure %s", + desc="Defines the similarity measure to be used for rigid registration. " 'Possible measures are "MI", "INVERSION" and "BDP". MI measure uses ' - 'normalized mutual information based cost function. INVERSION measure ' - 'uses simpler cost function based on sum of squared difference by ' - 'exploiting the approximate inverse-contrast relationship in T1- and ' - 'T2-weighted images. BDP measure combines MI and INVERSION. It starts ' - 'with INVERSION measure and refines the result with MI measure. BDP is ' - 'the default measure when this trait is not set. ') + "normalized mutual information based cost function. INVERSION measure " + "uses simpler cost function based on sum of squared difference by " + "exploiting the approximate inverse-contrast relationship in T1- and " + "T2-weighted images. BDP measure combines MI and INVERSION. It starts " + "with INVERSION measure and refines the result with MI measure. BDP is " + "the default measure when this trait is not set. ", + ) dcorrRegMeasure = traits.Enum( - 'MI', - 'INVERSION-EPI', - 'INVERSION-T1', - 'INVERSION-BOTH', - 'BDP', - argstr='--dcorr-reg-method %s', - desc='Defines the method for registration-based distortion correction. ' + "MI", + "INVERSION-EPI", + "INVERSION-T1", + "INVERSION-BOTH", + "BDP", + argstr="--dcorr-reg-method %s", + desc="Defines the method for registration-based distortion correction. " 'Possible methods are "MI", "INVERSION-EPI", "INVERSION-T1", ' 'INVERSION-BOTH", and "BDP". MI method uses normalized mutual ' - 'information based cost-function while estimating the distortion field. ' - 'INVERSION-based method uses simpler cost function based on sum of ' - 'squared difference by exploiting the known approximate contrast ' - 'relationship in T1- and T2-weighted images. T2-weighted EPI is inverted ' - 'when INVERSION-EPI is used; T1-image is inverted when INVERSION-T1 is ' - 'used; and both are inverted when INVERSION-BOTH is used. BDP method add ' - 'the MI-based refinement after the correction using INVERSION-BOTH ' - 'method. BDP is the default method when this trait is not set. ') + "information based cost-function while estimating the distortion field. " + "INVERSION-based method uses simpler cost function based on sum of " + "squared difference by exploiting the known approximate contrast " + "relationship in T1- and T2-weighted images. T2-weighted EPI is inverted " + "when INVERSION-EPI is used; T1-image is inverted when INVERSION-T1 is " + "used; and both are inverted when INVERSION-BOTH is used. BDP method add " + "the MI-based refinement after the correction using INVERSION-BOTH " + "method. BDP is the default method when this trait is not set. ", + ) dcorrWeight = traits.Float( - argstr='--dcorr-regularization-wt %f', - desc= - 'Sets the (scalar) weighting parameter for regularization penalty in ' - 'registration-based distortion correction. Set this trait to a single, non-negative ' - 'number which specifies the weight. A large regularization weight encourages ' - 'smoother distortion field at the cost of low measure of image similarity ' - 'after distortion correction. On the other hand, a smaller regularization ' - 'weight can result into higher measure of image similarity but with ' - 'unrealistic and unsmooth distortion field. A weight of 0.5 would reduce ' - 'the penalty to half of the default regularization penalty (By default, this weight ' - 'is set to 1.0). Similarly, a weight of 2.0 ' - 'would increase the penalty to twice of the default penalty. ') + argstr="--dcorr-regularization-wt %f", + desc="Sets the (scalar) weighting parameter for regularization penalty in " + "registration-based distortion correction. Set this trait to a single, non-negative " + "number which specifies the weight. A large regularization weight encourages " + "smoother distortion field at the cost of low measure of image similarity " + "after distortion correction. On the other hand, a smaller regularization " + "weight can result into higher measure of image similarity but with " + "unrealistic and unsmooth distortion field. A weight of 0.5 would reduce " + "the penalty to half of the default regularization penalty (By default, this weight " + "is set to 1.0). Similarly, a weight of 2.0 " + "would increase the penalty to twice of the default penalty. ", + ) skipDistortionCorr = traits.Bool( - argstr='--no-distortion-correction', - desc='Skips distortion correction completely and performs only a rigid ' - 'registration of diffusion and T1-weighted image. This can be useful when ' - 'the input diffusion images do not have any distortion or they have been ' - 'corrected for distortion. ') + argstr="--no-distortion-correction", + desc="Skips distortion correction completely and performs only a rigid " + "registration of diffusion and T1-weighted image. This can be useful when " + "the input diffusion images do not have any distortion or they have been " + "corrected for distortion. ", + ) skipNonuniformityCorr = traits.Bool( - argstr='--no-nonuniformity-correction', - desc='Skips intensity non-uniformity correction in b=0 image for ' - 'registration-based distortion correction. The intensity non-uniformity ' - 'correction does not affect any diffusion modeling. ') + argstr="--no-nonuniformity-correction", + desc="Skips intensity non-uniformity correction in b=0 image for " + "registration-based distortion correction. The intensity non-uniformity " + "correction does not affect any diffusion modeling. ", + ) skipIntensityCorr = traits.Bool( - argstr='--no-intensity-correction', - xor=['fieldmapCorrectionMethod'], - desc= - 'Disables intensity correction when performing distortion correction. ' - 'Intensity correction can change the noise distribution in the corrected ' - 'image, but it does not affect estimated diffusion parameters like FA, ' - 'etc. ') + argstr="--no-intensity-correction", + xor=["fieldmapCorrectionMethod"], + desc="Disables intensity correction when performing distortion correction. " + "Intensity correction can change the noise distribution in the corrected " + "image, but it does not affect estimated diffusion parameters like FA, " + "etc. ", + ) fieldmapCorrection = File( - argstr='--fieldmap-correction %s', - requires=['echoSpacing'], - desc= - 'Use an acquired fieldmap for distortion correction. The fieldmap must ' - 'have units of radians/second. Specify the filename of the fieldmap file. ' - 'The field of view (FOV) of the fieldmap scan must cover the FOV of the diffusion ' - 'scan. BDP will try to check the overlap of the FOV of the two scans and ' + argstr="--fieldmap-correction %s", + requires=["echoSpacing"], + desc="Use an acquired fieldmap for distortion correction. The fieldmap must " + "have units of radians/second. Specify the filename of the fieldmap file. " + "The field of view (FOV) of the fieldmap scan must cover the FOV of the diffusion " + "scan. BDP will try to check the overlap of the FOV of the two scans and " 'will issue a warning/error if the diffusion scan"s FOV is not fully ' 'covered by the fieldmap"s FOV. BDP uses all of the information saved in ' - 'the NIfTI header to compute the FOV. If you get this error and think ' - 'that it is incorrect, then it can be suppressed using the flag ' - 'ignore-fieldmap-FOV. Neither the image matrix size nor the imaging ' - 'grid resolution of the fieldmap needs to be the same as that of the ' - 'diffusion scan, but the fieldmap must be pre-registred to the diffusion ' - 'scan. BDP does NOT align the fieldmap to the diffusion scan, nor does it ' - 'check the alignment of the fieldmap and diffusion scans. Only NIfTI ' - 'files with extension of .nii or .nii.gz are supported. Fieldmap-based ' - 'distortion correction also requires the echoSpacing. Also ' - 'fieldmapCorrectionMethod allows you to define method for ' - 'distortion correction. least squares is the default method. ') + "the NIfTI header to compute the FOV. If you get this error and think " + "that it is incorrect, then it can be suppressed using the flag " + "ignore-fieldmap-FOV. Neither the image matrix size nor the imaging " + "grid resolution of the fieldmap needs to be the same as that of the " + "diffusion scan, but the fieldmap must be pre-registred to the diffusion " + "scan. BDP does NOT align the fieldmap to the diffusion scan, nor does it " + "check the alignment of the fieldmap and diffusion scans. Only NIfTI " + "files with extension of .nii or .nii.gz are supported. Fieldmap-based " + "distortion correction also requires the echoSpacing. Also " + "fieldmapCorrectionMethod allows you to define method for " + "distortion correction. least squares is the default method. ", + ) fieldmapCorrectionMethod = traits.Enum( - 'pixelshift', - 'leastsq', - xor=['skipIntensityCorr'], - argstr='--fieldmap-correction-method %s', - desc='Defines the distortion correction method while using fieldmap. ' + "pixelshift", + "leastsq", + xor=["skipIntensityCorr"], + argstr="--fieldmap-correction-method %s", + desc="Defines the distortion correction method while using fieldmap. " 'Possible methods are "pixelshift" and "leastsq". leastsq is the default ' - 'method when this flag is not used. Pixel-shift (pixelshift) method uses ' - 'image interpolation to un-distort the distorted diffusion images. Least ' - 'squares (leastsq) method uses a physical model of distortion which is ' - 'more accurate (and more computationally expensive) than pixel-shift ' - 'method.') + "method when this flag is not used. Pixel-shift (pixelshift) method uses " + "image interpolation to un-distort the distorted diffusion images. Least " + "squares (leastsq) method uses a physical model of distortion which is " + "more accurate (and more computationally expensive) than pixel-shift " + "method.", + ) ignoreFieldmapFOV = traits.Bool( - argstr='--ignore-fieldmap-fov', - desc= - 'Supresses the error generated by an insufficient field of view of the ' - 'input fieldmap and continues with the processing. It is useful only when ' - 'used with fieldmap-based distortion correction. See ' - 'fieldmap-correction for a detailed explanation. ') + argstr="--ignore-fieldmap-fov", + desc="Supresses the error generated by an insufficient field of view of the " + "input fieldmap and continues with the processing. It is useful only when " + "used with fieldmap-based distortion correction. See " + "fieldmap-correction for a detailed explanation. ", + ) fieldmapSmooth = traits.Float( - argstr='--fieldmap-smooth3=%f', - desc='Applies 3D Gaussian smoothing with a standard deviation of S ' - 'millimeters (mm) to the input fieldmap before applying distortion ' - 'correction. This trait is only useful with ' - 'fieldmapCorrection. Skip this trait for no smoothing. ') + argstr="--fieldmap-smooth3=%f", + desc="Applies 3D Gaussian smoothing with a standard deviation of S " + "millimeters (mm) to the input fieldmap before applying distortion " + "correction. This trait is only useful with " + "fieldmapCorrection. Skip this trait for no smoothing. ", + ) transformDiffusionVolume = File( - argstr='--transform-diffusion-volume %s', - desc='This flag allows to define custom volumes in diffusion coordinate ' - 'which would be transformed into T1 coordinate in a rigid fashion. The ' - 'flag must be followed by the name of either a NIfTI file or of a folder ' - 'that contains one or more NIfTI files. All of the files must be in ' - 'diffusion coordinate, i.e. the files should overlay correctly with the ' - 'diffusion scan in BrainSuite. Only NIfTI files with an extension of .nii ' - 'or .nii.gz are supported. The transformed files are written to the ' + argstr="--transform-diffusion-volume %s", + desc="This flag allows to define custom volumes in diffusion coordinate " + "which would be transformed into T1 coordinate in a rigid fashion. The " + "flag must be followed by the name of either a NIfTI file or of a folder " + "that contains one or more NIfTI files. All of the files must be in " + "diffusion coordinate, i.e. the files should overlay correctly with the " + "diffusion scan in BrainSuite. Only NIfTI files with an extension of .nii " + "or .nii.gz are supported. The transformed files are written to the " 'output directory with suffix ".T1_coord" in the filename and will not be ' - 'corrected for distortion, if any. The trait transformInterpolation can ' - 'be used to define the type of interpolation that would be used (default ' - 'is set to linear). If you are attempting to transform a label file or ' + "corrected for distortion, if any. The trait transformInterpolation can " + "be used to define the type of interpolation that would be used (default " + "is set to linear). If you are attempting to transform a label file or " 'mask file, use "nearest" interpolation method with transformInterpolation. ' - 'See also transformT1Volume and transformInterpolation') + "See also transformT1Volume and transformInterpolation", + ) transformT1Volume = File( - argstr='--transform-t1-volume %s', - desc='Same as transformDiffusionVolume except that files specified must ' - 'be in T1 coordinate, i.e. the files should overlay correctly with the ' - 'input .bfc.nii.gz files in BrainSuite. BDP transforms these ' - 'data/images from T1 coordinate to diffusion coordinate. The transformed ' + argstr="--transform-t1-volume %s", + desc="Same as transformDiffusionVolume except that files specified must " + "be in T1 coordinate, i.e. the files should overlay correctly with the " + "input .bfc.nii.gz files in BrainSuite. BDP transforms these " + "data/images from T1 coordinate to diffusion coordinate. The transformed " 'files are written to the output directory with suffix ".D_coord" in the ' - 'filename. See also transformDiffusionVolume and transformInterpolation. ' + "filename. See also transformDiffusionVolume and transformInterpolation. ", ) transformInterpolation = traits.Enum( - 'linear', - 'nearest', - 'cubic', - 'spline', - argstr='--transform-interpolation %s', - desc= - 'Defines the type of interpolation method which would be used while ' - 'transforming volumes defined by transformT1Volume and ' + "linear", + "nearest", + "cubic", + "spline", + argstr="--transform-interpolation %s", + desc="Defines the type of interpolation method which would be used while " + "transforming volumes defined by transformT1Volume and " 'transformDiffusionVolume. Possible methods are "linear", "nearest", ' - '"cubic" and "spline". By default, "linear" interpolation is used. ') + '"cubic" and "spline". By default, "linear" interpolation is used. ', + ) transformT1Surface = File( - argstr='--transform-t1-surface %s', - desc='Similar to transformT1Volume, except that this flag allows ' - 'transforming surfaces (instead of volumes) in T1 coordinate into ' - 'diffusion coordinate in a rigid fashion. The flag must be followed by ' - 'the name of either a .dfs file or of a folder that contains one or more ' - 'dfs files. All of the files must be in T1 coordinate, i.e. the files ' - 'should overlay correctly with the T1-weighted scan in BrainSuite. The ' - 'transformed files are written to the output directory with suffix ' - 'D_coord" in the filename. ') + argstr="--transform-t1-surface %s", + desc="Similar to transformT1Volume, except that this flag allows " + "transforming surfaces (instead of volumes) in T1 coordinate into " + "diffusion coordinate in a rigid fashion. The flag must be followed by " + "the name of either a .dfs file or of a folder that contains one or more " + "dfs files. All of the files must be in T1 coordinate, i.e. the files " + "should overlay correctly with the T1-weighted scan in BrainSuite. The " + "transformed files are written to the output directory with suffix " + 'D_coord" in the filename. ', + ) transformDiffusionSurface = File( - argstr='--transform-diffusion-surface %s', - desc='Same as transformT1Volume, except that the .dfs files specified ' - 'must be in diffusion coordinate, i.e. the surface files should overlay ' - 'correctly with the diffusion scan in BrainSuite. The transformed files ' + argstr="--transform-diffusion-surface %s", + desc="Same as transformT1Volume, except that the .dfs files specified " + "must be in diffusion coordinate, i.e. the surface files should overlay " + "correctly with the diffusion scan in BrainSuite. The transformed files " 'are written to the output directory with suffix ".T1_coord" in the ' - 'filename. See also transformT1Volume. ') + "filename. See also transformT1Volume. ", + ) transformDataOnly = traits.Bool( - argstr='--transform-data-only', - desc= - 'Skip all of the processing (co-registration, distortion correction and ' - 'tensor/ODF estimation) and directly start transformation of defined ' - 'custom volumes, mask and labels (using transformT1Volume, ' - 'transformDiffusionVolume, transformT1Surface, ' - 'transformDiffusionSurface, customDiffusionLabel, ' - 'customT1Label). This flag is useful when BDP was previously run on a ' - 'subject (or ) and some more data (volumes, mask or labels) ' - 'need to be transformed across the T1-diffusion coordinate spaces. This ' - 'assumes that all the necessary files were generated earlier and all of ' - 'the other flags MUST be used in the same way as they were in the initial ' - 'BDP run that processed the data. ') + argstr="--transform-data-only", + desc="Skip all of the processing (co-registration, distortion correction and " + "tensor/ODF estimation) and directly start transformation of defined " + "custom volumes, mask and labels (using transformT1Volume, " + "transformDiffusionVolume, transformT1Surface, " + "transformDiffusionSurface, customDiffusionLabel, " + "customT1Label). This flag is useful when BDP was previously run on a " + "subject (or ) and some more data (volumes, mask or labels) " + "need to be transformed across the T1-diffusion coordinate spaces. This " + "assumes that all the necessary files were generated earlier and all of " + "the other flags MUST be used in the same way as they were in the initial " + "BDP run that processed the data. ", + ) generateStats = traits.Bool( - argstr='--generate-stats', - desc= - 'Generate ROI-wise statistics of estimated diffusion tensor parameters. ' - 'Units of the reported statistics are same as that of the estimated ' - 'tensor parameters (see estimateTensors). Mean, variance, and voxel counts of ' - 'white matter(WM), grey matter(GM), and both WM and GM combined are ' - 'written for each estimated parameter in a separate comma-seperated value ' - 'csv) file. BDP uses the ROI labels generated by Surface-Volume ' - 'Registration (SVReg) in the BrainSuite extraction sequence. ' - 'Specifically, it looks for labels saved in either ' - 'fileprefix>.svreg.corr.label.nii.gz or .svreg.label.nii.gz. ' - 'In case both files are present, only the first file is used. Also see ' - 'customDiffusionLabel and customT1Label for specifying your own ' - 'ROIs. It is also possible to forgo computing the SVReg ROI-wise ' - 'statistics and only compute stats with custom labels if SVReg label is ' - 'missing. BDP also transfers (and saves) the label/mask files to ' - 'appropriate coordinates before computing statistics. Also see ' - 'outputDiffusionCoordinates for outputs in diffusion coordinate and ' - 'forcePartialROIStats for an important note about field of view of ' - 'diffusion and T1-weighted scans. ') + argstr="--generate-stats", + desc="Generate ROI-wise statistics of estimated diffusion tensor parameters. " + "Units of the reported statistics are same as that of the estimated " + "tensor parameters (see estimateTensors). Mean, variance, and voxel counts of " + "white matter(WM), grey matter(GM), and both WM and GM combined are " + "written for each estimated parameter in a separate comma-seperated value " + "csv) file. BDP uses the ROI labels generated by Surface-Volume " + "Registration (SVReg) in the BrainSuite extraction sequence. " + "Specifically, it looks for labels saved in either " + "fileprefix>.svreg.corr.label.nii.gz or .svreg.label.nii.gz. " + "In case both files are present, only the first file is used. Also see " + "customDiffusionLabel and customT1Label for specifying your own " + "ROIs. It is also possible to forgo computing the SVReg ROI-wise " + "statistics and only compute stats with custom labels if SVReg label is " + "missing. BDP also transfers (and saves) the label/mask files to " + "appropriate coordinates before computing statistics. Also see " + "outputDiffusionCoordinates for outputs in diffusion coordinate and " + "forcePartialROIStats for an important note about field of view of " + "diffusion and T1-weighted scans. ", + ) onlyStats = traits.Bool( - argstr='--generate-only-stats', - desc= - 'Skip all of the processing (co-registration, distortion correction and ' - 'tensor/ODF estimation) and directly start computation of statistics. ' - 'This flag is useful when BDP was previously run on a subject (or ' - 'fileprefix>) and statistics need to be (re-)computed later. This ' - 'assumes that all the necessary files were generated earlier. All of the ' - 'other flags MUST be used in the same way as they were in the initial BDP ' - 'run that processed the data. ') + argstr="--generate-only-stats", + desc="Skip all of the processing (co-registration, distortion correction and " + "tensor/ODF estimation) and directly start computation of statistics. " + "This flag is useful when BDP was previously run on a subject (or " + "fileprefix>) and statistics need to be (re-)computed later. This " + "assumes that all the necessary files were generated earlier. All of the " + "other flags MUST be used in the same way as they were in the initial BDP " + "run that processed the data. ", + ) forcePartialROIStats = traits.Bool( - argstr='--force-partial-roi-stats', - desc= - 'The field of view (FOV) of the diffusion and T1-weighted scans may ' - 'differ significantly in some situations. This may result in partial ' - 'acquisitions of some ROIs in the diffusion scan. By default, BDP does ' - 'not compute statistics for partially acquired ROIs and shows warnings. ' - 'This flag forces computation of statistics for all ROIs, including those ' - 'which are partially acquired. When this flag is used, number of missing ' - 'voxels are also reported for each ROI in statistics files. Number of ' - 'missing voxels are reported in the same coordinate system as the ' - 'statistics file. ') + argstr="--force-partial-roi-stats", + desc="The field of view (FOV) of the diffusion and T1-weighted scans may " + "differ significantly in some situations. This may result in partial " + "acquisitions of some ROIs in the diffusion scan. By default, BDP does " + "not compute statistics for partially acquired ROIs and shows warnings. " + "This flag forces computation of statistics for all ROIs, including those " + "which are partially acquired. When this flag is used, number of missing " + "voxels are also reported for each ROI in statistics files. Number of " + "missing voxels are reported in the same coordinate system as the " + "statistics file. ", + ) customDiffusionLabel = File( - argstr='--custom-diffusion-label %s', - desc= - 'BDP supports custom ROIs in addition to those generated by BrainSuite ' - 'SVReg) for ROI-wise statistics calculation. The flag must be followed ' - 'by the name of either a file (custom ROI file) or of a folder that ' - 'contains one or more ROI files. All of the files must be in diffusion ' - 'coordinate, i.e. the label files should overlay correctly with the ' - 'diffusion scan in BrainSuite. These input label files are also ' - 'transferred (and saved) to T1 coordinate for statistics in T1 ' - 'coordinate. BDP uses nearest-neighborhood interpolation for this ' - 'transformation. Only NIfTI files, with an extension of .nii or .nii.gz ' - 'are supported. In order to avoid confusion with other ROI IDs in the ' - 'statistic files, a 5-digit ROI ID is generated for each custom label ' - 'found and the mapping of ID to label file is saved in the file ' - 'fileprefix>.BDP_ROI_MAP.xml. Custom label files can also be generated ' - 'by using the label painter tool in BrainSuite. See also ' - 'customLabelXML') + argstr="--custom-diffusion-label %s", + desc="BDP supports custom ROIs in addition to those generated by BrainSuite " + "SVReg) for ROI-wise statistics calculation. The flag must be followed " + "by the name of either a file (custom ROI file) or of a folder that " + "contains one or more ROI files. All of the files must be in diffusion " + "coordinate, i.e. the label files should overlay correctly with the " + "diffusion scan in BrainSuite. These input label files are also " + "transferred (and saved) to T1 coordinate for statistics in T1 " + "coordinate. BDP uses nearest-neighborhood interpolation for this " + "transformation. Only NIfTI files, with an extension of .nii or .nii.gz " + "are supported. In order to avoid confusion with other ROI IDs in the " + "statistic files, a 5-digit ROI ID is generated for each custom label " + "found and the mapping of ID to label file is saved in the file " + "fileprefix>.BDP_ROI_MAP.xml. Custom label files can also be generated " + "by using the label painter tool in BrainSuite. See also " + "customLabelXML", + ) customT1Label = File( - argstr='--custom-t1-label %s', - desc='Same as customDiffusionLabelexcept that the label files specified ' - 'must be in T1 coordinate, i.e. the label files should overlay correctly ' - 'with the T1-weighted scan in BrainSuite. If the trait ' - 'outputDiffusionCoordinates is also used then these input label files ' - 'are also transferred (and saved) to diffusion coordinate for statistics ' - 'in diffusion coordinate. BDP uses nearest-neighborhood interpolation for ' - 'this transformation. See also customLabelXML. ') + argstr="--custom-t1-label %s", + desc="Same as customDiffusionLabelexcept that the label files specified " + "must be in T1 coordinate, i.e. the label files should overlay correctly " + "with the T1-weighted scan in BrainSuite. If the trait " + "outputDiffusionCoordinates is also used then these input label files " + "are also transferred (and saved) to diffusion coordinate for statistics " + "in diffusion coordinate. BDP uses nearest-neighborhood interpolation for " + "this transformation. See also customLabelXML. ", + ) customLabelXML = File( - argstr='--custom-label-xml %s', - desc= - 'BrainSuite saves a descriptions of the SVReg labels (ROI name, ID, ' - 'color, and description) in an .xml file ' + argstr="--custom-label-xml %s", + desc="BrainSuite saves a descriptions of the SVReg labels (ROI name, ID, " + "color, and description) in an .xml file " 'brainsuite_labeldescription.xml). BDP uses the ROI ID"s from this xml ' - 'file to report statistics. This flag allows for the use of a custom ' - 'label description xml file. The flag must be followed by an xml ' - 'filename. This can be useful when you want to limit the ROIs for which ' - 'you compute statistics. You can also use custom xml files to name your ' + "file to report statistics. This flag allows for the use of a custom " + "label description xml file. The flag must be followed by an xml " + "filename. This can be useful when you want to limit the ROIs for which " + "you compute statistics. You can also use custom xml files to name your " 'own ROIs (assign ID"s) for custom labels. BrainSuite can save a label ' - 'description in .xml format after using the label painter tool to create ' + "description in .xml format after using the label painter tool to create " 'a ROI label. The xml file MUST be in the same format as BrainSuite"s ' - 'label description file (see brainsuite_labeldescription.xml for an ' - 'example). When this flag is used, NO 5-digit ROI ID is generated for ' - 'custom label files and NO Statistics will be calculated for ROIs not ' - 'identified in the custom xml file. See also customDiffusionLabel and ' - 'customT1Label.') + "label description file (see brainsuite_labeldescription.xml for an " + "example). When this flag is used, NO 5-digit ROI ID is generated for " + "custom label files and NO Statistics will be calculated for ROIs not " + "identified in the custom xml file. See also customDiffusionLabel and " + "customT1Label.", + ) outputSubdir = traits.Str( - argstr='--output-subdir %s', - desc= - 'By default, BDP writes out all the output (and intermediate) files in ' - 'the same directory (or folder) as the BFC file. This flag allows to ' - 'specify a sub-directory name in which output (and intermediate) files ' - 'would be written. BDP will create the sub-directory in the same ' - 'directory as BFC file. should be the name of the ' - 'sub-directory without any path. This can be useful to organize all ' - 'outputs generated by BDP in a separate sub-directory. ') + argstr="--output-subdir %s", + desc="By default, BDP writes out all the output (and intermediate) files in " + "the same directory (or folder) as the BFC file. This flag allows to " + "specify a sub-directory name in which output (and intermediate) files " + "would be written. BDP will create the sub-directory in the same " + "directory as BFC file. should be the name of the " + "sub-directory without any path. This can be useful to organize all " + "outputs generated by BDP in a separate sub-directory. ", + ) outputDiffusionCoordinates = traits.Bool( - argstr='--output-diffusion-coordinate', - desc= - 'Enables estimation of diffusion tensors and/or ODFs (and statistics if ' - 'applicable) in the native diffusion coordinate in addition to the ' - 'default T1-coordinate. All native diffusion coordinate files are saved ' + argstr="--output-diffusion-coordinate", + desc="Enables estimation of diffusion tensors and/or ODFs (and statistics if " + "applicable) in the native diffusion coordinate in addition to the " + "default T1-coordinate. All native diffusion coordinate files are saved " 'in a seperate folder named "diffusion_coord_outputs". In case statistics ' - 'computation is required, it will also transform/save all label/mask ' - 'files required to diffusion coordinate (see generateStats for ' - 'details). ') + "computation is required, it will also transform/save all label/mask " + "files required to diffusion coordinate (see generateStats for " + "details). ", + ) flagConfigFile = File( - argstr='--flag-conf-file %s', - desc= - 'Uses the defined file to specify BDP flags which can be useful for ' - 'batch processing. A flag configuration file is a plain text file which ' + argstr="--flag-conf-file %s", + desc="Uses the defined file to specify BDP flags which can be useful for " + "batch processing. A flag configuration file is a plain text file which " 'can contain any number of BDP"s optional flags (and their parameters) ' - 'separated by whitespace. Everything coming after # until end-of-line is ' - 'treated as comment and is ignored. If a flag is defined in configuration ' - 'file and is also specified in the command used to run BDP, then the ' - 'later get preference and overrides the definition in configuration ' - 'file. ') + "separated by whitespace. Everything coming after # until end-of-line is " + "treated as comment and is ignored. If a flag is defined in configuration " + "file and is also specified in the command used to run BDP, then the " + "later get preference and overrides the definition in configuration " + "file. ", + ) outPrefix = traits.Str( - argstr='--output-fileprefix %s', - desc='Specifies output fileprefix when noStructuralRegistration is ' - 'used. The fileprefix can not start with a dash (-) and should be a ' - 'simple string reflecting the absolute path to desired location, along with outPrefix. When this flag is ' - 'not specified (and noStructuralRegistration is used) then the output ' - 'files have same file-base as the input diffusion file. This trait is ' - 'ignored when noStructuralRegistration is not used. ') + argstr="--output-fileprefix %s", + desc="Specifies output fileprefix when noStructuralRegistration is " + "used. The fileprefix can not start with a dash (-) and should be a " + "simple string reflecting the absolute path to desired location, along with outPrefix. When this flag is " + "not specified (and noStructuralRegistration is used) then the output " + "files have same file-base as the input diffusion file. This trait is " + "ignored when noStructuralRegistration is not used. ", + ) threads = traits.Int( - argstr='--threads=%d', - desc='Sets the number of parallel process threads which can be used for ' - 'computations to N, where N must be an integer. Default value of N is ' - ' ') + argstr="--threads=%d", + desc="Sets the number of parallel process threads which can be used for " + "computations to N, where N must be an integer. Default value of N is " + " ", + ) lowMemory = traits.Bool( - argstr='--low-memory', - desc='Activates low-memory mode. This will run the registration-based ' - 'distortion correction at a lower resolution, which could result in a ' - 'less-accurate correction. This should only be used when no other ' - 'alternative is available. ') + argstr="--low-memory", + desc="Activates low-memory mode. This will run the registration-based " + "distortion correction at a lower resolution, which could result in a " + "less-accurate correction. This should only be used when no other " + "alternative is available. ", + ) ignoreMemory = traits.Bool( - argstr='--ignore-memory', - desc='Deactivates the inbuilt memory checks and forces BDP to run ' - 'registration-based distortion correction at its default resolution even ' - 'on machines with a low amount of memory. This may result in an ' - 'out-of-memory error when BDP cannot allocate sufficient memory. ') + argstr="--ignore-memory", + desc="Deactivates the inbuilt memory checks and forces BDP to run " + "registration-based distortion correction at its default resolution even " + "on machines with a low amount of memory. This may result in an " + "out-of-memory error when BDP cannot allocate sufficient memory. ", + ) class BDP(CommandLine): @@ -1696,21 +1739,22 @@ class BDP(CommandLine): """ input_spec = BDPInputSpec - _cmd = 'bdp.sh' + _cmd = "bdp.sh" def _format_arg(self, name, spec, value): - if name == 'BVecBValPair': + if name == "BVecBValPair": return spec.argstr % (value[0], value[1]) - if name == 'dataSinkDelay': - return spec.argstr % '' + if name == "dataSinkDelay": + return spec.argstr % "" return super(BDP, self)._format_arg(name, spec, value) class ThicknessPVCInputSpec(CommandLineInputSpec): subjectFilePrefix = traits.Str( - argstr='%s', + argstr="%s", mandatory=True, - desc='Absolute path and filename prefix of the subject data') + desc="Absolute path and filename prefix of the subject data", + ) class ThicknessPVC(CommandLine): @@ -1737,7 +1781,7 @@ class ThicknessPVC(CommandLine): """ input_spec = ThicknessPVCInputSpec - _cmd = 'thicknessPVC.sh' + _cmd = "thicknessPVC.sh" # used to generate file names for outputs @@ -1748,7 +1792,7 @@ def getFileName(inputName, suffix): dotRegex = regex.compile("[^.]+") # extract between last slash and first period inputNoExtension = dotRegex.findall(fullInput)[0] - return os.path.abspath(''.join((inputNoExtension, suffix))) + return os.path.abspath("".join((inputNoExtension, suffix))) def l_outputs(self): diff --git a/nipype/interfaces/brainsuite/tests/test_auto_BDP.py b/nipype/interfaces/brainsuite/tests/test_auto_BDP.py index ae25cb0598..94f95c5c2a 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_BDP.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_BDP.py @@ -5,123 +5,90 @@ def test_BDP_inputs(): input_map = dict( BVecBValPair=dict( - argstr='--bvec %s --bval %s', + argstr="--bvec %s --bval %s", mandatory=True, position=-1, - xor=['bMatrixFile'], + xor=["bMatrixFile"], ), - args=dict(argstr='%s', ), + args=dict(argstr="%s",), bMatrixFile=dict( - argstr='--bmat %s', + argstr="--bmat %s", extensions=None, mandatory=True, position=-1, - xor=['BVecBValPair'], + xor=["BVecBValPair"], ), - bValRatioThreshold=dict(argstr='--bval-ratio-threshold %f', ), + bValRatioThreshold=dict(argstr="--bval-ratio-threshold %f",), bfcFile=dict( - argstr='%s', + argstr="%s", extensions=None, mandatory=True, position=0, - xor=['noStructuralRegistration'], + xor=["noStructuralRegistration"], ), customDiffusionLabel=dict( - argstr='--custom-diffusion-label %s', - extensions=None, - ), - customLabelXML=dict( - argstr='--custom-label-xml %s', - extensions=None, - ), - customT1Label=dict( - argstr='--custom-t1-label %s', - extensions=None, - ), - dataSinkDelay=dict(argstr='%s', ), - dcorrRegMeasure=dict(argstr='--dcorr-reg-method %s', ), - dcorrWeight=dict(argstr='--dcorr-regularization-wt %f', ), - dwiMask=dict( - argstr='--dwi-mask %s', - extensions=None, - ), - echoSpacing=dict(argstr='--echo-spacing=%f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - estimateODF_3DShore=dict(argstr='--3dshore --diffusion_time_ms %f', ), - estimateODF_FRACT=dict(argstr='--FRACT', ), - estimateODF_FRT=dict(argstr='--FRT', ), - estimateTensors=dict(argstr='--tensors', ), + argstr="--custom-diffusion-label %s", extensions=None, + ), + customLabelXML=dict(argstr="--custom-label-xml %s", extensions=None,), + customT1Label=dict(argstr="--custom-t1-label %s", extensions=None,), + dataSinkDelay=dict(argstr="%s",), + dcorrRegMeasure=dict(argstr="--dcorr-reg-method %s",), + dcorrWeight=dict(argstr="--dcorr-regularization-wt %f",), + dwiMask=dict(argstr="--dwi-mask %s", extensions=None,), + echoSpacing=dict(argstr="--echo-spacing=%f",), + environ=dict(nohash=True, usedefault=True,), + estimateODF_3DShore=dict(argstr="--3dshore --diffusion_time_ms %f",), + estimateODF_FRACT=dict(argstr="--FRACT",), + estimateODF_FRT=dict(argstr="--FRT",), + estimateTensors=dict(argstr="--tensors",), fieldmapCorrection=dict( - argstr='--fieldmap-correction %s', + argstr="--fieldmap-correction %s", extensions=None, - requires=['echoSpacing'], + requires=["echoSpacing"], ), fieldmapCorrectionMethod=dict( - argstr='--fieldmap-correction-method %s', - xor=['skipIntensityCorr'], - ), - fieldmapSmooth=dict(argstr='--fieldmap-smooth3=%f', ), - flagConfigFile=dict( - argstr='--flag-conf-file %s', - extensions=None, - ), - forcePartialROIStats=dict(argstr='--force-partial-roi-stats', ), - generateStats=dict(argstr='--generate-stats', ), - ignoreFieldmapFOV=dict(argstr='--ignore-fieldmap-fov', ), - ignoreMemory=dict(argstr='--ignore-memory', ), + argstr="--fieldmap-correction-method %s", xor=["skipIntensityCorr"], + ), + fieldmapSmooth=dict(argstr="--fieldmap-smooth3=%f",), + flagConfigFile=dict(argstr="--flag-conf-file %s", extensions=None,), + forcePartialROIStats=dict(argstr="--force-partial-roi-stats",), + generateStats=dict(argstr="--generate-stats",), + ignoreFieldmapFOV=dict(argstr="--ignore-fieldmap-fov",), + ignoreMemory=dict(argstr="--ignore-memory",), inputDiffusionData=dict( - argstr='--nii %s', - extensions=None, - mandatory=True, - position=-2, + argstr="--nii %s", extensions=None, mandatory=True, position=-2, ), - lowMemory=dict(argstr='--low-memory', ), + lowMemory=dict(argstr="--low-memory",), noStructuralRegistration=dict( - argstr='--no-structural-registration', + argstr="--no-structural-registration", mandatory=True, position=0, - xor=['bfcFile'], - ), - odfLambta=dict(argstr='--odf-lambda ', ), - onlyStats=dict(argstr='--generate-only-stats', ), - outPrefix=dict(argstr='--output-fileprefix %s', ), - outputDiffusionCoordinates=dict( - argstr='--output-diffusion-coordinate', ), - outputSubdir=dict(argstr='--output-subdir %s', ), - phaseEncodingDirection=dict(argstr='--dir=%s', ), - rigidRegMeasure=dict(argstr='--rigid-reg-measure %s', ), - skipDistortionCorr=dict(argstr='--no-distortion-correction', ), + xor=["bfcFile"], + ), + odfLambta=dict(argstr="--odf-lambda ",), + onlyStats=dict(argstr="--generate-only-stats",), + outPrefix=dict(argstr="--output-fileprefix %s",), + outputDiffusionCoordinates=dict(argstr="--output-diffusion-coordinate",), + outputSubdir=dict(argstr="--output-subdir %s",), + phaseEncodingDirection=dict(argstr="--dir=%s",), + rigidRegMeasure=dict(argstr="--rigid-reg-measure %s",), + skipDistortionCorr=dict(argstr="--no-distortion-correction",), skipIntensityCorr=dict( - argstr='--no-intensity-correction', - xor=['fieldmapCorrectionMethod'], + argstr="--no-intensity-correction", xor=["fieldmapCorrectionMethod"], ), - skipNonuniformityCorr=dict(argstr='--no-nonuniformity-correction', ), - t1Mask=dict( - argstr='--t1-mask %s', - extensions=None, - ), - threads=dict(argstr='--threads=%d', ), - transformDataOnly=dict(argstr='--transform-data-only', ), + skipNonuniformityCorr=dict(argstr="--no-nonuniformity-correction",), + t1Mask=dict(argstr="--t1-mask %s", extensions=None,), + threads=dict(argstr="--threads=%d",), + transformDataOnly=dict(argstr="--transform-data-only",), transformDiffusionSurface=dict( - argstr='--transform-diffusion-surface %s', - extensions=None, + argstr="--transform-diffusion-surface %s", extensions=None, ), transformDiffusionVolume=dict( - argstr='--transform-diffusion-volume %s', - extensions=None, - ), - transformInterpolation=dict(argstr='--transform-interpolation %s', ), - transformT1Surface=dict( - argstr='--transform-t1-surface %s', - extensions=None, - ), - transformT1Volume=dict( - argstr='--transform-t1-volume %s', - extensions=None, + argstr="--transform-diffusion-volume %s", extensions=None, ), + transformInterpolation=dict(argstr="--transform-interpolation %s",), + transformT1Surface=dict(argstr="--transform-t1-surface %s", extensions=None,), + transformT1Volume=dict(argstr="--transform-t1-volume %s", extensions=None,), ) inputs = BDP.input_spec() diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Bfc.py b/nipype/interfaces/brainsuite/tests/test_auto_Bfc.py index 3f41f15518..5e2588fd74 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Bfc.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Bfc.py @@ -4,76 +4,49 @@ def test_Bfc_inputs(): input_map = dict( - args=dict(argstr='%s', ), - biasEstimateConvergenceThreshold=dict(argstr='--beps %f', ), - biasEstimateSpacing=dict(argstr='-s %d', ), - biasFieldEstimatesOutputPrefix=dict(argstr='--biasprefix %s', ), - biasRange=dict(argstr='%s', ), - controlPointSpacing=dict(argstr='-c %d', ), - convergenceThreshold=dict(argstr='--eps %f', ), - correctWholeVolume=dict(argstr='--extrapolate', ), - correctedImagesOutputPrefix=dict(argstr='--prefix %s', ), - correctionScheduleFile=dict( - argstr='--schedule %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - histogramRadius=dict(argstr='-r %d', ), - histogramType=dict(argstr='%s', ), - inputMRIFile=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - ), - inputMaskFile=dict( - argstr='-m %s', - extensions=None, - hash_files=False, - ), - intermediate_file_type=dict(argstr='%s', ), - iterativeMode=dict(argstr='--iterate', ), - maxBias=dict( - argstr='-U %f', - usedefault=True, - ), - minBias=dict( - argstr='-L %f', - usedefault=True, - ), - outputBiasField=dict( - argstr='--bias %s', - extensions=None, - hash_files=False, - ), + args=dict(argstr="%s",), + biasEstimateConvergenceThreshold=dict(argstr="--beps %f",), + biasEstimateSpacing=dict(argstr="-s %d",), + biasFieldEstimatesOutputPrefix=dict(argstr="--biasprefix %s",), + biasRange=dict(argstr="%s",), + controlPointSpacing=dict(argstr="-c %d",), + convergenceThreshold=dict(argstr="--eps %f",), + correctWholeVolume=dict(argstr="--extrapolate",), + correctedImagesOutputPrefix=dict(argstr="--prefix %s",), + correctionScheduleFile=dict(argstr="--schedule %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + histogramRadius=dict(argstr="-r %d",), + histogramType=dict(argstr="%s",), + inputMRIFile=dict(argstr="-i %s", extensions=None, mandatory=True,), + inputMaskFile=dict(argstr="-m %s", extensions=None, hash_files=False,), + intermediate_file_type=dict(argstr="%s",), + iterativeMode=dict(argstr="--iterate",), + maxBias=dict(argstr="-U %f", usedefault=True,), + minBias=dict(argstr="-L %f", usedefault=True,), + outputBiasField=dict(argstr="--bias %s", extensions=None, hash_files=False,), outputMRIVolume=dict( - argstr='-o %s', - extensions=None, - genfile=True, - hash_files=False, + argstr="-o %s", extensions=None, genfile=True, hash_files=False, ), outputMaskedBiasField=dict( - argstr='--maskedbias %s', - extensions=None, - hash_files=False, + argstr="--maskedbias %s", extensions=None, hash_files=False, ), - splineLambda=dict(argstr='-w %f', ), - timer=dict(argstr='--timer', ), - verbosityLevel=dict(argstr='-v %d', ), + splineLambda=dict(argstr="-w %f",), + timer=dict(argstr="--timer",), + verbosityLevel=dict(argstr="-v %d",), ) inputs = Bfc.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Bfc_outputs(): output_map = dict( - correctionScheduleFile=dict(extensions=None, ), - outputBiasField=dict(extensions=None, ), - outputMRIVolume=dict(extensions=None, ), - outputMaskedBiasField=dict(extensions=None, ), + correctionScheduleFile=dict(extensions=None,), + outputBiasField=dict(extensions=None,), + outputMRIVolume=dict(extensions=None,), + outputMaskedBiasField=dict(extensions=None,), ) outputs = Bfc.output_spec() diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Bse.py b/nipype/interfaces/brainsuite/tests/test_auto_Bse.py index 3476926f0e..a980010ef0 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Bse.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Bse.py @@ -4,92 +4,48 @@ def test_Bse_inputs(): input_map = dict( - args=dict(argstr='%s', ), - diffusionConstant=dict( - argstr='-d %f', - usedefault=True, - ), - diffusionIterations=dict( - argstr='-n %d', - usedefault=True, - ), - dilateFinalMask=dict( - argstr='-p', - usedefault=True, - ), - edgeDetectionConstant=dict( - argstr='-s %f', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputMRIFile=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - ), - noRotate=dict(argstr='--norotate', ), - outputCortexFile=dict( - argstr='--cortex %s', - extensions=None, - hash_files=False, - ), + args=dict(argstr="%s",), + diffusionConstant=dict(argstr="-d %f", usedefault=True,), + diffusionIterations=dict(argstr="-n %d", usedefault=True,), + dilateFinalMask=dict(argstr="-p", usedefault=True,), + edgeDetectionConstant=dict(argstr="-s %f", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + inputMRIFile=dict(argstr="-i %s", extensions=None, mandatory=True,), + noRotate=dict(argstr="--norotate",), + outputCortexFile=dict(argstr="--cortex %s", extensions=None, hash_files=False,), outputDetailedBrainMask=dict( - argstr='--hires %s', - extensions=None, - hash_files=False, + argstr="--hires %s", extensions=None, hash_files=False, ), outputDiffusionFilter=dict( - argstr='--adf %s', - extensions=None, - hash_files=False, - ), - outputEdgeMap=dict( - argstr='--edge %s', - extensions=None, - hash_files=False, + argstr="--adf %s", extensions=None, hash_files=False, ), + outputEdgeMap=dict(argstr="--edge %s", extensions=None, hash_files=False,), outputMRIVolume=dict( - argstr='-o %s', - extensions=None, - genfile=True, - hash_files=False, + argstr="-o %s", extensions=None, genfile=True, hash_files=False, ), outputMaskFile=dict( - argstr='--mask %s', - extensions=None, - genfile=True, - hash_files=False, - ), - radius=dict( - argstr='-r %f', - usedefault=True, - ), - timer=dict(argstr='--timer', ), - trim=dict( - argstr='--trim', - usedefault=True, - ), - verbosityLevel=dict( - argstr='-v %f', - usedefault=True, + argstr="--mask %s", extensions=None, genfile=True, hash_files=False, ), + radius=dict(argstr="-r %f", usedefault=True,), + timer=dict(argstr="--timer",), + trim=dict(argstr="--trim", usedefault=True,), + verbosityLevel=dict(argstr="-v %f", usedefault=True,), ) inputs = Bse.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Bse_outputs(): output_map = dict( - outputCortexFile=dict(extensions=None, ), - outputDetailedBrainMask=dict(extensions=None, ), - outputDiffusionFilter=dict(extensions=None, ), - outputEdgeMap=dict(extensions=None, ), - outputMRIVolume=dict(extensions=None, ), - outputMaskFile=dict(extensions=None, ), + outputCortexFile=dict(extensions=None,), + outputDetailedBrainMask=dict(extensions=None,), + outputDiffusionFilter=dict(extensions=None,), + outputEdgeMap=dict(extensions=None,), + outputMRIVolume=dict(extensions=None,), + outputMaskFile=dict(extensions=None,), ) outputs = Bse.output_spec() diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Cerebro.py b/nipype/interfaces/brainsuite/tests/test_auto_Cerebro.py index 91b610bbf2..e2ff64c071 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Cerebro.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Cerebro.py @@ -4,74 +4,45 @@ def test_Cerebro_inputs(): input_map = dict( - args=dict(argstr='%s', ), - costFunction=dict( - argstr='-c %d', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + costFunction=dict(argstr="-c %d", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), inputAtlasLabelFile=dict( - argstr='--atlaslabels %s', - extensions=None, - mandatory=True, - ), - inputAtlasMRIFile=dict( - argstr='--atlas %s', - extensions=None, - mandatory=True, - ), - inputBrainMaskFile=dict( - argstr='-m %s', - extensions=None, - ), - inputMRIFile=dict( - argstr='-i %s', - extensions=None, - mandatory=True, + argstr="--atlaslabels %s", extensions=None, mandatory=True, ), - keepTempFiles=dict(argstr='--keep', ), - linearConvergence=dict(argstr='--linconv %f', ), + inputAtlasMRIFile=dict(argstr="--atlas %s", extensions=None, mandatory=True,), + inputBrainMaskFile=dict(argstr="-m %s", extensions=None,), + inputMRIFile=dict(argstr="-i %s", extensions=None, mandatory=True,), + keepTempFiles=dict(argstr="--keep",), + linearConvergence=dict(argstr="--linconv %f",), outputAffineTransformFile=dict( - argstr='--air %s', - extensions=None, - genfile=True, - ), - outputCerebrumMaskFile=dict( - argstr='-o %s', - extensions=None, - genfile=True, - ), - outputLabelVolumeFile=dict( - argstr='-l %s', - extensions=None, - genfile=True, + argstr="--air %s", extensions=None, genfile=True, ), + outputCerebrumMaskFile=dict(argstr="-o %s", extensions=None, genfile=True,), + outputLabelVolumeFile=dict(argstr="-l %s", extensions=None, genfile=True,), outputWarpTransformFile=dict( - argstr='--warp %s', - extensions=None, - genfile=True, - ), - tempDirectory=dict(argstr='--tempdir %s', ), - tempDirectoryBase=dict(argstr='--tempdirbase %s', ), - useCentroids=dict(argstr='--centroids', ), - verbosity=dict(argstr='-v %d', ), - warpConvergence=dict(argstr='--warpconv %f', ), - warpLabel=dict(argstr='--warplevel %d', ), + argstr="--warp %s", extensions=None, genfile=True, + ), + tempDirectory=dict(argstr="--tempdir %s",), + tempDirectoryBase=dict(argstr="--tempdirbase %s",), + useCentroids=dict(argstr="--centroids",), + verbosity=dict(argstr="-v %d",), + warpConvergence=dict(argstr="--warpconv %f",), + warpLabel=dict(argstr="--warplevel %d",), ) inputs = Cerebro.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Cerebro_outputs(): output_map = dict( - outputAffineTransformFile=dict(extensions=None, ), - outputCerebrumMaskFile=dict(extensions=None, ), - outputLabelVolumeFile=dict(extensions=None, ), - outputWarpTransformFile=dict(extensions=None, ), + outputAffineTransformFile=dict(extensions=None,), + outputCerebrumMaskFile=dict(extensions=None,), + outputLabelVolumeFile=dict(extensions=None,), + outputWarpTransformFile=dict(extensions=None,), ) outputs = Cerebro.output_spec() diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Cortex.py b/nipype/interfaces/brainsuite/tests/test_auto_Cortex.py index 607d98eb14..75015d79ab 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Cortex.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Cortex.py @@ -4,49 +4,27 @@ def test_Cortex_inputs(): input_map = dict( - args=dict(argstr='%s', ), - computeGCBoundary=dict(argstr='-g', ), - computeWGBoundary=dict( - argstr='-w', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - includeAllSubcorticalAreas=dict( - argstr='-a', - usedefault=True, - ), - inputHemisphereLabelFile=dict( - argstr='-h %s', - extensions=None, - mandatory=True, - ), - inputTissueFractionFile=dict( - argstr='-f %s', - extensions=None, - mandatory=True, - ), - outputCerebrumMask=dict( - argstr='-o %s', - extensions=None, - genfile=True, - ), - timer=dict(argstr='--timer', ), - tissueFractionThreshold=dict( - argstr='-p %f', - usedefault=True, - ), - verbosity=dict(argstr='-v %d', ), + args=dict(argstr="%s",), + computeGCBoundary=dict(argstr="-g",), + computeWGBoundary=dict(argstr="-w", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + includeAllSubcorticalAreas=dict(argstr="-a", usedefault=True,), + inputHemisphereLabelFile=dict(argstr="-h %s", extensions=None, mandatory=True,), + inputTissueFractionFile=dict(argstr="-f %s", extensions=None, mandatory=True,), + outputCerebrumMask=dict(argstr="-o %s", extensions=None, genfile=True,), + timer=dict(argstr="--timer",), + tissueFractionThreshold=dict(argstr="-p %f", usedefault=True,), + verbosity=dict(argstr="-v %d",), ) inputs = Cortex.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Cortex_outputs(): - output_map = dict(outputCerebrumMask=dict(extensions=None, ), ) + output_map = dict(outputCerebrumMask=dict(extensions=None,),) outputs = Cortex.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Dewisp.py b/nipype/interfaces/brainsuite/tests/test_auto_Dewisp.py index d6eb0c6414..ed3b4c32f6 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Dewisp.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Dewisp.py @@ -4,33 +4,24 @@ def test_Dewisp_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputMaskFile=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - ), - maximumIterations=dict(argstr='-n %d', ), - outputMaskFile=dict( - argstr='-o %s', - extensions=None, - genfile=True, - ), - sizeThreshold=dict(argstr='-t %d', ), - timer=dict(argstr='--timer', ), - verbosity=dict(argstr='-v %d', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputMaskFile=dict(argstr="-i %s", extensions=None, mandatory=True,), + maximumIterations=dict(argstr="-n %d",), + outputMaskFile=dict(argstr="-o %s", extensions=None, genfile=True,), + sizeThreshold=dict(argstr="-t %d",), + timer=dict(argstr="--timer",), + verbosity=dict(argstr="-v %d",), ) inputs = Dewisp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Dewisp_outputs(): - output_map = dict(outputMaskFile=dict(extensions=None, ), ) + output_map = dict(outputMaskFile=dict(extensions=None,),) outputs = Dewisp.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Dfs.py b/nipype/interfaces/brainsuite/tests/test_auto_Dfs.py index f79d309262..00a35e8c82 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Dfs.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Dfs.py @@ -4,62 +4,40 @@ def test_Dfs_inputs(): input_map = dict( - args=dict(argstr='%s', ), - curvatureWeighting=dict( - argstr='-w %f', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputShadingVolume=dict( - argstr='-c %s', - extensions=None, - ), - inputVolumeFile=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - ), - noNormalsFlag=dict(argstr='--nonormals', ), + args=dict(argstr="%s",), + curvatureWeighting=dict(argstr="-w %f", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + inputShadingVolume=dict(argstr="-c %s", extensions=None,), + inputVolumeFile=dict(argstr="-i %s", extensions=None, mandatory=True,), + noNormalsFlag=dict(argstr="--nonormals",), nonZeroTessellation=dict( - argstr='-nz', - xor=('nonZeroTessellation', 'specialTessellation'), - ), - outputSurfaceFile=dict( - argstr='-o %s', - extensions=None, - genfile=True, - ), - postSmoothFlag=dict(argstr='--postsmooth', ), - scalingPercentile=dict(argstr='-f %f', ), - smoothingConstant=dict( - argstr='-a %f', - usedefault=True, - ), - smoothingIterations=dict( - argstr='-n %d', - usedefault=True, + argstr="-nz", xor=("nonZeroTessellation", "specialTessellation"), ), + outputSurfaceFile=dict(argstr="-o %s", extensions=None, genfile=True,), + postSmoothFlag=dict(argstr="--postsmooth",), + scalingPercentile=dict(argstr="-f %f",), + smoothingConstant=dict(argstr="-a %f", usedefault=True,), + smoothingIterations=dict(argstr="-n %d", usedefault=True,), specialTessellation=dict( - argstr='%s', + argstr="%s", position=-1, - requires=['tessellationThreshold'], - xor=('nonZeroTessellation', 'specialTessellation'), + requires=["tessellationThreshold"], + xor=("nonZeroTessellation", "specialTessellation"), ), - tessellationThreshold=dict(argstr='%f', ), - timer=dict(argstr='--timer', ), - verbosity=dict(argstr='-v %d', ), - zeroPadFlag=dict(argstr='-z', ), + tessellationThreshold=dict(argstr="%f",), + timer=dict(argstr="--timer",), + verbosity=dict(argstr="-v %d",), + zeroPadFlag=dict(argstr="-z",), ) inputs = Dfs.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Dfs_outputs(): - output_map = dict(outputSurfaceFile=dict(extensions=None, ), ) + output_map = dict(outputSurfaceFile=dict(extensions=None,),) outputs = Dfs.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Hemisplit.py b/nipype/interfaces/brainsuite/tests/test_auto_Hemisplit.py index cfc74eb69a..761d049672 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Hemisplit.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Hemisplit.py @@ -4,59 +4,31 @@ def test_Hemisplit_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputHemisphereLabelFile=dict( - argstr='-l %s', - extensions=None, - mandatory=True, - ), - inputSurfaceFile=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - ), - outputLeftHemisphere=dict( - argstr='--left %s', - extensions=None, - genfile=True, - ), - outputLeftPialHemisphere=dict( - argstr='-pl %s', - extensions=None, - genfile=True, - ), - outputRightHemisphere=dict( - argstr='--right %s', - extensions=None, - genfile=True, - ), - outputRightPialHemisphere=dict( - argstr='-pr %s', - extensions=None, - genfile=True, - ), - pialSurfaceFile=dict( - argstr='-p %s', - extensions=None, - ), - timer=dict(argstr='--timer', ), - verbosity=dict(argstr='-v %d', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputHemisphereLabelFile=dict(argstr="-l %s", extensions=None, mandatory=True,), + inputSurfaceFile=dict(argstr="-i %s", extensions=None, mandatory=True,), + outputLeftHemisphere=dict(argstr="--left %s", extensions=None, genfile=True,), + outputLeftPialHemisphere=dict(argstr="-pl %s", extensions=None, genfile=True,), + outputRightHemisphere=dict(argstr="--right %s", extensions=None, genfile=True,), + outputRightPialHemisphere=dict(argstr="-pr %s", extensions=None, genfile=True,), + pialSurfaceFile=dict(argstr="-p %s", extensions=None,), + timer=dict(argstr="--timer",), + verbosity=dict(argstr="-v %d",), ) inputs = Hemisplit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Hemisplit_outputs(): output_map = dict( - outputLeftHemisphere=dict(extensions=None, ), - outputLeftPialHemisphere=dict(extensions=None, ), - outputRightHemisphere=dict(extensions=None, ), - outputRightPialHemisphere=dict(extensions=None, ), + outputLeftHemisphere=dict(extensions=None,), + outputLeftPialHemisphere=dict(extensions=None,), + outputRightHemisphere=dict(extensions=None,), + outputRightPialHemisphere=dict(extensions=None,), ) outputs = Hemisplit.output_spec() diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Pialmesh.py b/nipype/interfaces/brainsuite/tests/test_auto_Pialmesh.py index 561f6ef1d8..9da07862f8 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Pialmesh.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Pialmesh.py @@ -4,76 +4,35 @@ def test_Pialmesh_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - exportPrefix=dict(argstr='--prefix %s', ), - inputMaskFile=dict( - argstr='-m %s', - extensions=None, - mandatory=True, - ), - inputSurfaceFile=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - ), - inputTissueFractionFile=dict( - argstr='-f %s', - extensions=None, - mandatory=True, - ), - laplacianSmoothing=dict( - argstr='--smooth %f', - usedefault=True, - ), - maxThickness=dict( - argstr='--max %f', - usedefault=True, - ), - normalSmoother=dict( - argstr='--nc %f', - usedefault=True, - ), - numIterations=dict( - argstr='-n %d', - usedefault=True, - ), - outputInterval=dict( - argstr='--interval %d', - usedefault=True, - ), - outputSurfaceFile=dict( - argstr='-o %s', - extensions=None, - genfile=True, - ), - recomputeNormals=dict(argstr='--norm', ), - searchRadius=dict( - argstr='-r %f', - usedefault=True, - ), - stepSize=dict( - argstr='-s %f', - usedefault=True, - ), - tangentSmoother=dict(argstr='--tc %f', ), - timer=dict(argstr='--timer', ), - tissueThreshold=dict( - argstr='-t %f', - usedefault=True, - ), - verbosity=dict(argstr='-v %d', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + exportPrefix=dict(argstr="--prefix %s",), + inputMaskFile=dict(argstr="-m %s", extensions=None, mandatory=True,), + inputSurfaceFile=dict(argstr="-i %s", extensions=None, mandatory=True,), + inputTissueFractionFile=dict(argstr="-f %s", extensions=None, mandatory=True,), + laplacianSmoothing=dict(argstr="--smooth %f", usedefault=True,), + maxThickness=dict(argstr="--max %f", usedefault=True,), + normalSmoother=dict(argstr="--nc %f", usedefault=True,), + numIterations=dict(argstr="-n %d", usedefault=True,), + outputInterval=dict(argstr="--interval %d", usedefault=True,), + outputSurfaceFile=dict(argstr="-o %s", extensions=None, genfile=True,), + recomputeNormals=dict(argstr="--norm",), + searchRadius=dict(argstr="-r %f", usedefault=True,), + stepSize=dict(argstr="-s %f", usedefault=True,), + tangentSmoother=dict(argstr="--tc %f",), + timer=dict(argstr="--timer",), + tissueThreshold=dict(argstr="-t %f", usedefault=True,), + verbosity=dict(argstr="-v %d",), ) inputs = Pialmesh.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Pialmesh_outputs(): - output_map = dict(outputSurfaceFile=dict(extensions=None, ), ) + output_map = dict(outputSurfaceFile=dict(extensions=None,),) outputs = Pialmesh.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Pvc.py b/nipype/interfaces/brainsuite/tests/test_auto_Pvc.py index 5b93215b70..fa1c8bc8b7 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Pvc.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Pvc.py @@ -4,44 +4,28 @@ def test_Pvc_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputMRIFile=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - ), - inputMaskFile=dict( - argstr='-m %s', - extensions=None, - ), - outputLabelFile=dict( - argstr='-o %s', - extensions=None, - genfile=True, - ), - outputTissueFractionFile=dict( - argstr='-f %s', - extensions=None, - genfile=True, - ), - spatialPrior=dict(argstr='-l %f', ), - threeClassFlag=dict(argstr='-3', ), - timer=dict(argstr='--timer', ), - verbosity=dict(argstr='-v %d', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputMRIFile=dict(argstr="-i %s", extensions=None, mandatory=True,), + inputMaskFile=dict(argstr="-m %s", extensions=None,), + outputLabelFile=dict(argstr="-o %s", extensions=None, genfile=True,), + outputTissueFractionFile=dict(argstr="-f %s", extensions=None, genfile=True,), + spatialPrior=dict(argstr="-l %f",), + threeClassFlag=dict(argstr="-3",), + timer=dict(argstr="--timer",), + verbosity=dict(argstr="-v %d",), ) inputs = Pvc.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Pvc_outputs(): output_map = dict( - outputLabelFile=dict(extensions=None, ), - outputTissueFractionFile=dict(extensions=None, ), + outputLabelFile=dict(extensions=None,), + outputTissueFractionFile=dict(extensions=None,), ) outputs = Pvc.output_spec() diff --git a/nipype/interfaces/brainsuite/tests/test_auto_SVReg.py b/nipype/interfaces/brainsuite/tests/test_auto_SVReg.py index 3d861a0677..2c19934e7c 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_SVReg.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_SVReg.py @@ -4,48 +4,33 @@ def test_SVReg_inputs(): input_map = dict( - args=dict(argstr='%s', ), - atlasFilePrefix=dict( - argstr="'%s'", - position=1, - ), - curveMatchingInstructions=dict(argstr="'-cur %s'", ), - dataSinkDelay=dict(argstr='%s', ), - displayModuleName=dict(argstr="'-m'", ), - displayTimestamps=dict(argstr="'-t'", ), - environ=dict( - nohash=True, - usedefault=True, - ), - iterations=dict(argstr="'-H %d'", ), - keepIntermediates=dict(argstr="'-k'", ), - pialSurfaceMaskDilation=dict(argstr="'-D %d'", ), - refineOutputs=dict(argstr="'-r'", ), - shortMessages=dict(argstr="'-gui'", ), - skipToIntensityReg=dict(argstr="'-p'", ), - skipToVolumeReg=dict(argstr="'-s'", ), - skipVolumetricProcessing=dict(argstr="'-S'", ), - subjectFilePrefix=dict( - argstr="'%s'", - mandatory=True, - position=0, - ), - useCerebrumMask=dict(argstr="'-C'", ), - useManualMaskFile=dict(argstr="'-cbm'", ), - useMultiThreading=dict(argstr="'-P'", ), - useSingleThreading=dict(argstr="'-U'", ), + args=dict(argstr="%s",), + atlasFilePrefix=dict(argstr="'%s'", position=1,), + curveMatchingInstructions=dict(argstr="'-cur %s'",), + dataSinkDelay=dict(argstr="%s",), + displayModuleName=dict(argstr="'-m'",), + displayTimestamps=dict(argstr="'-t'",), + environ=dict(nohash=True, usedefault=True,), + iterations=dict(argstr="'-H %d'",), + keepIntermediates=dict(argstr="'-k'",), + pialSurfaceMaskDilation=dict(argstr="'-D %d'",), + refineOutputs=dict(argstr="'-r'",), + shortMessages=dict(argstr="'-gui'",), + skipToIntensityReg=dict(argstr="'-p'",), + skipToVolumeReg=dict(argstr="'-s'",), + skipVolumetricProcessing=dict(argstr="'-S'",), + subjectFilePrefix=dict(argstr="'%s'", mandatory=True, position=0,), + useCerebrumMask=dict(argstr="'-C'",), + useManualMaskFile=dict(argstr="'-cbm'",), + useMultiThreading=dict(argstr="'-P'",), + useSingleThreading=dict(argstr="'-U'",), verbosity0=dict( - argstr="'-v0'", - xor=('verbosity0', 'verbosity1', 'verbosity2'), + argstr="'-v0'", xor=("verbosity0", "verbosity1", "verbosity2"), ), verbosity1=dict( - argstr="'-v1'", - xor=('verbosity0', 'verbosity1', 'verbosity2'), - ), - verbosity2=dict( - argstr="'v2'", - xor=('verbosity0', 'verbosity1', 'verbosity2'), + argstr="'-v1'", xor=("verbosity0", "verbosity1", "verbosity2"), ), + verbosity2=dict(argstr="'v2'", xor=("verbosity0", "verbosity1", "verbosity2"),), ) inputs = SVReg.input_spec() diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Scrubmask.py b/nipype/interfaces/brainsuite/tests/test_auto_Scrubmask.py index c04b16d5d2..b356b0335e 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Scrubmask.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Scrubmask.py @@ -4,40 +4,25 @@ def test_Scrubmask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - backgroundFillThreshold=dict( - argstr='-b %d', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - foregroundTrimThreshold=dict( - argstr='-f %d', - usedefault=True, - ), - inputMaskFile=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - ), - numberIterations=dict(argstr='-n %d', ), - outputMaskFile=dict( - argstr='-o %s', - extensions=None, - genfile=True, - ), - timer=dict(argstr='--timer', ), - verbosity=dict(argstr='-v %d', ), + args=dict(argstr="%s",), + backgroundFillThreshold=dict(argstr="-b %d", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + foregroundTrimThreshold=dict(argstr="-f %d", usedefault=True,), + inputMaskFile=dict(argstr="-i %s", extensions=None, mandatory=True,), + numberIterations=dict(argstr="-n %d",), + outputMaskFile=dict(argstr="-o %s", extensions=None, genfile=True,), + timer=dict(argstr="--timer",), + verbosity=dict(argstr="-v %d",), ) inputs = Scrubmask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Scrubmask_outputs(): - output_map = dict(outputMaskFile=dict(extensions=None, ), ) + output_map = dict(outputMaskFile=dict(extensions=None,),) outputs = Scrubmask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Skullfinder.py b/nipype/interfaces/brainsuite/tests/test_auto_Skullfinder.py index 11707befa3..06480f30e8 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Skullfinder.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Skullfinder.py @@ -4,44 +4,31 @@ def test_Skullfinder_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bgLabelValue=dict(argstr='--bglabel %d', ), - brainLabelValue=dict(argstr='--brainlabel %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputMRIFile=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - ), - inputMaskFile=dict( - argstr='-m %s', - extensions=None, - mandatory=True, - ), - lowerThreshold=dict(argstr='-l %d', ), - outputLabelFile=dict( - argstr='-o %s', - extensions=None, - genfile=True, - ), - performFinalOpening=dict(argstr='--finalOpening', ), - scalpLabelValue=dict(argstr='--scalplabel %d', ), - skullLabelValue=dict(argstr='--skulllabel %d', ), - spaceLabelValue=dict(argstr='--spacelabel %d', ), - surfaceFilePrefix=dict(argstr='-s %s', ), - upperThreshold=dict(argstr='-u %d', ), - verbosity=dict(argstr='-v %d', ), + args=dict(argstr="%s",), + bgLabelValue=dict(argstr="--bglabel %d",), + brainLabelValue=dict(argstr="--brainlabel %d",), + environ=dict(nohash=True, usedefault=True,), + inputMRIFile=dict(argstr="-i %s", extensions=None, mandatory=True,), + inputMaskFile=dict(argstr="-m %s", extensions=None, mandatory=True,), + lowerThreshold=dict(argstr="-l %d",), + outputLabelFile=dict(argstr="-o %s", extensions=None, genfile=True,), + performFinalOpening=dict(argstr="--finalOpening",), + scalpLabelValue=dict(argstr="--scalplabel %d",), + skullLabelValue=dict(argstr="--skulllabel %d",), + spaceLabelValue=dict(argstr="--spacelabel %d",), + surfaceFilePrefix=dict(argstr="-s %s",), + upperThreshold=dict(argstr="-u %d",), + verbosity=dict(argstr="-v %d",), ) inputs = Skullfinder.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Skullfinder_outputs(): - output_map = dict(outputLabelFile=dict(extensions=None, ), ) + output_map = dict(outputLabelFile=dict(extensions=None,),) outputs = Skullfinder.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Tca.py b/nipype/interfaces/brainsuite/tests/test_auto_Tca.py index b7767a1b01..66cb70ac8f 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Tca.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Tca.py @@ -4,40 +4,25 @@ def test_Tca_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - foregroundDelta=dict( - argstr='--delta %d', - usedefault=True, - ), - inputMaskFile=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - ), - maxCorrectionSize=dict(argstr='-n %d', ), - minCorrectionSize=dict( - argstr='-m %d', - usedefault=True, - ), - outputMaskFile=dict( - argstr='-o %s', - extensions=None, - genfile=True, - ), - timer=dict(argstr='--timer', ), - verbosity=dict(argstr='-v %d', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + foregroundDelta=dict(argstr="--delta %d", usedefault=True,), + inputMaskFile=dict(argstr="-i %s", extensions=None, mandatory=True,), + maxCorrectionSize=dict(argstr="-n %d",), + minCorrectionSize=dict(argstr="-m %d", usedefault=True,), + outputMaskFile=dict(argstr="-o %s", extensions=None, genfile=True,), + timer=dict(argstr="--timer",), + verbosity=dict(argstr="-v %d",), ) inputs = Tca.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Tca_outputs(): - output_map = dict(outputMaskFile=dict(extensions=None, ), ) + output_map = dict(outputMaskFile=dict(extensions=None,),) outputs = Tca.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/brainsuite/tests/test_auto_ThicknessPVC.py b/nipype/interfaces/brainsuite/tests/test_auto_ThicknessPVC.py index cf2174690e..c0265e7e6c 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_ThicknessPVC.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_ThicknessPVC.py @@ -4,15 +4,9 @@ def test_ThicknessPVC_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - subjectFilePrefix=dict( - argstr='%s', - mandatory=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + subjectFilePrefix=dict(argstr="%s", mandatory=True,), ) inputs = ThicknessPVC.input_spec() diff --git a/nipype/interfaces/bru2nii.py b/nipype/interfaces/bru2nii.py index 1b4574f00c..7ed9c77651 100644 --- a/nipype/interfaces/bru2nii.py +++ b/nipype/interfaces/bru2nii.py @@ -3,32 +3,38 @@ """ import os -from .base import (CommandLine, CommandLineInputSpec, traits, TraitedSpec, - isdefined, File, Directory) +from .base import ( + CommandLine, + CommandLineInputSpec, + traits, + TraitedSpec, + isdefined, + File, + Directory, +) class Bru2InputSpec(CommandLineInputSpec): input_dir = Directory( - desc="Input Directory", - exists=True, - mandatory=True, - position=-1, - argstr="%s") + desc="Input Directory", exists=True, mandatory=True, position=-1, argstr="%s" + ) actual_size = traits.Bool( - argstr='-a', - desc="Keep actual size - otherwise x10 scale so animals match human.") + argstr="-a", + desc="Keep actual size - otherwise x10 scale so animals match human.", + ) force_conversion = traits.Bool( - argstr='-f', - desc="Force conversion of localizers images (multiple slice " - "orientations).") - compress = traits.Bool( - argstr='-z', desc='gz compress images (".nii.gz").') + argstr="-f", + desc="Force conversion of localizers images (multiple slice " "orientations).", + ) + compress = traits.Bool(argstr="-z", desc='gz compress images (".nii.gz").') append_protocol_name = traits.Bool( - argstr='-p', desc="Append protocol name to output filename.") + argstr="-p", desc="Append protocol name to output filename." + ) output_filename = traits.Str( argstr="-o %s", desc='Output filename (".nii" will be appended, or ".nii.gz" if the "-z" compress option is selected)', - genfile=True) + genfile=True, + ) class Bru2OutputSpec(TraitedSpec): @@ -47,6 +53,7 @@ class Bru2(CommandLine): >>> converter.cmdline # doctest: +ELLIPSIS 'Bru2 -o .../data/brukerdir brukerdir' """ + input_spec = Bru2InputSpec output_spec = Bru2OutputSpec _cmd = "Bru2" @@ -56,7 +63,7 @@ def _list_outputs(self): if isdefined(self.inputs.output_filename): output_filename1 = os.path.abspath(self.inputs.output_filename) else: - output_filename1 = self._gen_filename('output_filename') + output_filename1 = self._gen_filename("output_filename") if self.inputs.compress: outputs["nii_file"] = output_filename1 + ".nii.gz" else: @@ -64,8 +71,8 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name == 'output_filename': + if name == "output_filename": outfile = os.path.join( - os.getcwd(), - os.path.basename(os.path.normpath(self.inputs.input_dir))) + os.getcwd(), os.path.basename(os.path.normpath(self.inputs.input_dir)) + ) return outfile diff --git a/nipype/interfaces/c3.py b/nipype/interfaces/c3.py index db81fce55f..4eadb98207 100644 --- a/nipype/interfaces/c3.py +++ b/nipype/interfaces/c3.py @@ -5,9 +5,17 @@ import os from glob import glob -from .base import (CommandLineInputSpec, traits, TraitedSpec, File, - SEMLikeCommandLine, InputMultiPath, OutputMultiPath, - CommandLine, isdefined) +from .base import ( + CommandLineInputSpec, + traits, + TraitedSpec, + File, + SEMLikeCommandLine, + InputMultiPath, + OutputMultiPath, + CommandLine, + isdefined, +) from ..utils.filemanip import split_filename from .. import logging @@ -16,16 +24,17 @@ class C3dAffineToolInputSpec(CommandLineInputSpec): reference_file = File(exists=True, argstr="-ref %s", position=1) - source_file = File(exists=True, argstr='-src %s', position=2) - transform_file = File(exists=True, argstr='%s', position=3) + source_file = File(exists=True, argstr="-src %s", position=2) + transform_file = File(exists=True, argstr="%s", position=3) itk_transform = traits.Either( traits.Bool, File(), hash_files=False, desc="Export ITK transform.", argstr="-oitk %s", - position=5) - fsl2ras = traits.Bool(argstr='-fsl2ras', position=4) + position=5, + ) + fsl2ras = traits.Bool(argstr="-fsl2ras", position=4) class C3dAffineToolOutputSpec(TraitedSpec): @@ -46,11 +55,12 @@ class C3dAffineTool(SEMLikeCommandLine): >>> c3.cmdline 'c3d_affine_tool -src cmatrix.mat -fsl2ras -oitk affine.txt' """ + input_spec = C3dAffineToolInputSpec output_spec = C3dAffineToolOutputSpec - _cmd = 'c3d_affine_tool' - _outputs_filenames = {'itk_transform': 'affine.txt'} + _cmd = "c3d_affine_tool" + _outputs_filenames = {"itk_transform": "affine.txt"} class C3dInputSpec(CommandLineInputSpec): @@ -59,62 +69,97 @@ class C3dInputSpec(CommandLineInputSpec): position=1, argstr="%s", mandatory=True, - desc="Input file (wildcard and multiple are supported).") + desc="Input file (wildcard and multiple are supported).", + ) out_file = File( exists=False, argstr="-o %s", position=-1, xor=["out_files"], - desc="Output file of last image on the stack.") + desc="Output file of last image on the stack.", + ) out_files = InputMultiPath( File(), argstr="-oo %s", xor=["out_file"], position=-1, - desc=("Write all images on the convert3d stack as multiple files." - " Supports both list of output files or a pattern for the output" - " filenames (using %d substituion).")) + desc=( + "Write all images on the convert3d stack as multiple files." + " Supports both list of output files or a pattern for the output" + " filenames (using %d substituion)." + ), + ) pix_type = traits.Enum( - "float", "char", "uchar", "short", "ushort", "int", "uint", "double", + "float", + "char", + "uchar", + "short", + "ushort", + "int", + "uint", + "double", argstr="-type %s", - desc=("Specifies the pixel type for the output image. By default," - " images are written in floating point (float) format")) + desc=( + "Specifies the pixel type for the output image. By default," + " images are written in floating point (float) format" + ), + ) scale = traits.Either( - traits.Int(), traits.Float(), + traits.Int(), + traits.Float(), argstr="-scale %s", - desc=("Multiplies the intensity of each voxel in the last image on the" - " stack by the given factor.")) + desc=( + "Multiplies the intensity of each voxel in the last image on the" + " stack by the given factor." + ), + ) shift = traits.Either( - traits.Int(), traits.Float(), + traits.Int(), + traits.Float(), argstr="-shift %s", - desc='Adds the given constant to every voxel.') + desc="Adds the given constant to every voxel.", + ) interp = traits.Enum( - "Linear", "NearestNeighbor", "Cubic", "Sinc", "Gaussian", + "Linear", + "NearestNeighbor", + "Cubic", + "Sinc", + "Gaussian", argstr="-interpolation %s", - desc=("Specifies the interpolation used with -resample and other" - " commands. Default is Linear.")) + desc=( + "Specifies the interpolation used with -resample and other" + " commands. Default is Linear." + ), + ) resample = traits.Str( argstr="-resample %s", - desc=("Resamples the image, keeping the bounding box the same, but" - " changing the number of voxels in the image. The dimensions can be" - " specified as a percentage, for example to double the number of voxels" - " in each direction. The -interpolation flag affects how sampling is" - " performed.")) + desc=( + "Resamples the image, keeping the bounding box the same, but" + " changing the number of voxels in the image. The dimensions can be" + " specified as a percentage, for example to double the number of voxels" + " in each direction. The -interpolation flag affects how sampling is" + " performed." + ), + ) smooth = traits.Str( argstr="-smooth %s", - desc=("Applies Gaussian smoothing to the image. The parameter vector" - " specifies the standard deviation of the Gaussian kernel.")) + desc=( + "Applies Gaussian smoothing to the image. The parameter vector" + " specifies the standard deviation of the Gaussian kernel." + ), + ) multicomp_split = traits.Bool( False, usedefault=True, argstr="-mcr", position=0, - desc="Enable reading of multi-component images.") + desc="Enable reading of multi-component images.", + ) is_4d = traits.Bool( False, usedefault=True, - desc=("Changes command to support 4D file operations (default is" - " false).")) + desc=("Changes command to support 4D file operations (default is" " false)."), + ) class C3dOutputSpec(TraitedSpec): @@ -147,6 +192,7 @@ class C3d(CommandLine): >>> c3.cmdline 'c4d epi.nii -type short -o epi.img' """ + input_spec = C3dInputSpec output_spec = C3dOutputSpec @@ -163,8 +209,7 @@ def _is_4d(self): def _run_interface(self, runtime): cmd = self._cmd - if (not isdefined(self.inputs.out_file) - and not isdefined(self.inputs.out_files)): + if not isdefined(self.inputs.out_file) and not isdefined(self.inputs.out_files): # Convert3d does not want to override file, by default # so we define a new output file self._gen_outfile() @@ -175,8 +220,9 @@ def _run_interface(self, runtime): def _gen_outfile(self): # if many infiles, raise exception if (len(self.inputs.in_file) > 1) or ("*" in self.inputs.in_file[0]): - raise AttributeError("Multiple in_files found - specify either" - " `out_file` or `out_files`.") + raise AttributeError( + "Multiple in_files found - specify either" " `out_file` or `out_files`." + ) _, fn, ext = split_filename(self.inputs.in_file[0]) self.inputs.out_file = fn + "_generated" + ext # if generated file will overwrite, raise error @@ -192,9 +238,11 @@ def _list_outputs(self): if len(self.inputs.out_files) == 1: _out_files = glob(os.path.abspath(self.inputs.out_files[0])) else: - _out_files = [os.path.abspath(f) for f in self.inputs.out_files - if os.path.exists(os.path.abspath(f))] + _out_files = [ + os.path.abspath(f) + for f in self.inputs.out_files + if os.path.exists(os.path.abspath(f)) + ] outputs["out_files"] = _out_files return outputs - diff --git a/nipype/interfaces/camino/__init__.py b/nipype/interfaces/camino/__init__.py index 0120732ef6..e90cc6f375 100644 --- a/nipype/interfaces/camino/__init__.py +++ b/nipype/interfaces/camino/__init__.py @@ -5,14 +5,36 @@ """ from .connectivity import Conmat -from .convert import (Image2Voxel, FSL2Scheme, VtkStreamlines, ProcStreamlines, - TractShredder, DT2NIfTI, NIfTIDT2Camino, AnalyzeHeader, - Shredder) -from .dti import (DTIFit, ModelFit, DTLUTGen, PicoPDFs, Track, TrackPICo, - TrackBayesDirac, TrackDT, TrackBallStick, TrackBootstrap, - TrackBedpostxDeter, TrackBedpostxProba, - ComputeFractionalAnisotropy, ComputeMeanDiffusivity, - ComputeTensorTrace, ComputeEigensystem, DTMetric) -from .calib import (SFPICOCalibData, SFLUTGen) -from .odf import (QBallMX, LinRecon, SFPeaks, MESD) +from .convert import ( + Image2Voxel, + FSL2Scheme, + VtkStreamlines, + ProcStreamlines, + TractShredder, + DT2NIfTI, + NIfTIDT2Camino, + AnalyzeHeader, + Shredder, +) +from .dti import ( + DTIFit, + ModelFit, + DTLUTGen, + PicoPDFs, + Track, + TrackPICo, + TrackBayesDirac, + TrackDT, + TrackBallStick, + TrackBootstrap, + TrackBedpostxDeter, + TrackBedpostxProba, + ComputeFractionalAnisotropy, + ComputeMeanDiffusivity, + ComputeTensorTrace, + ComputeEigensystem, + DTMetric, +) +from .calib import SFPICOCalibData, SFLUTGen +from .odf import QBallMX, LinRecon, SFPeaks, MESD from .utils import ImageStats diff --git a/nipype/interfaces/camino/calib.py b/nipype/interfaces/camino/calib.py index 24eb993bf6..a16dbd9149 100644 --- a/nipype/interfaces/camino/calib.py +++ b/nipype/interfaces/camino/calib.py @@ -3,95 +3,122 @@ import os from ...utils.filemanip import split_filename -from ..base import (traits, TraitedSpec, File, StdOutCommandLine, - StdOutCommandLineInputSpec) +from ..base import ( + traits, + TraitedSpec, + File, + StdOutCommandLine, + StdOutCommandLineInputSpec, +) class SFPICOCalibDataInputSpec(StdOutCommandLineInputSpec): snr = traits.Float( - argstr='-snr %f', - units='NA', - desc=('Specifies the signal-to-noise ratio of the ' - 'non-diffusion-weighted measurements to use in simulations.')) + argstr="-snr %f", + units="NA", + desc=( + "Specifies the signal-to-noise ratio of the " + "non-diffusion-weighted measurements to use in simulations." + ), + ) scheme_file = File( exists=True, - argstr='-schemefile %s', + argstr="-schemefile %s", mandatory=True, - desc='Specifies the scheme file for the diffusion MRI data') + desc="Specifies the scheme file for the diffusion MRI data", + ) info_file = File( - desc='The name to be given to the information output filename.', - argstr='-infooutputfile %s', + desc="The name to be given to the information output filename.", + argstr="-infooutputfile %s", mandatory=True, genfile=True, - hash_files=False) # Genfile and hash_files? + hash_files=False, + ) # Genfile and hash_files? trace = traits.Float( - argstr='-trace %f', - units='NA', - desc='Trace of the diffusion tensor(s) used in the test function.') + argstr="-trace %f", + units="NA", + desc="Trace of the diffusion tensor(s) used in the test function.", + ) onedtfarange = traits.List( traits.Float, - argstr='-onedtfarange %s', + argstr="-onedtfarange %s", minlen=2, maxlen=2, - units='NA', - desc=('Minimum and maximum FA for the single tensor ' - 'synthetic data.')) + units="NA", + desc=("Minimum and maximum FA for the single tensor " "synthetic data."), + ) onedtfastep = traits.Float( - argstr='-onedtfastep %f', - units='NA', - desc=('FA step size controlling how many steps there are ' - 'between the minimum and maximum FA settings.')) + argstr="-onedtfastep %f", + units="NA", + desc=( + "FA step size controlling how many steps there are " + "between the minimum and maximum FA settings." + ), + ) twodtfarange = traits.List( traits.Float, - argstr='-twodtfarange %s', + argstr="-twodtfarange %s", minlen=2, maxlen=2, - units='NA', - desc=('Minimum and maximum FA for the two tensor ' - 'synthetic data. FA is varied for both tensors ' - 'to give all the different permutations.')) + units="NA", + desc=( + "Minimum and maximum FA for the two tensor " + "synthetic data. FA is varied for both tensors " + "to give all the different permutations." + ), + ) twodtfastep = traits.Float( - argstr='-twodtfastep %f', - units='NA', - desc=('FA step size controlling how many steps there are ' - 'between the minimum and maximum FA settings ' - 'for the two tensor cases.')) + argstr="-twodtfastep %f", + units="NA", + desc=( + "FA step size controlling how many steps there are " + "between the minimum and maximum FA settings " + "for the two tensor cases." + ), + ) twodtanglerange = traits.List( traits.Float, - argstr='-twodtanglerange %s', + argstr="-twodtanglerange %s", minlen=2, maxlen=2, - units='NA', - desc=('Minimum and maximum crossing angles ' - 'between the two fibres.')) + units="NA", + desc=("Minimum and maximum crossing angles " "between the two fibres."), + ) twodtanglestep = traits.Float( - argstr='-twodtanglestep %f', - units='NA', - desc=('Angle step size controlling how many steps there are ' - 'between the minimum and maximum crossing angles for ' - 'the two tensor cases.')) + argstr="-twodtanglestep %f", + units="NA", + desc=( + "Angle step size controlling how many steps there are " + "between the minimum and maximum crossing angles for " + "the two tensor cases." + ), + ) twodtmixmax = traits.Float( - argstr='-twodtmixmax %f', - units='NA', - desc= - ('Mixing parameter controlling the proportion of one fibre population ' - 'to the other. The minimum mixing parameter is (1 - twodtmixmax).')) + argstr="-twodtmixmax %f", + units="NA", + desc=( + "Mixing parameter controlling the proportion of one fibre population " + "to the other. The minimum mixing parameter is (1 - twodtmixmax)." + ), + ) twodtmixstep = traits.Float( - argstr='-twodtmixstep %f', - units='NA', - desc=('Mixing parameter step size for the two tensor cases. ' - 'Specify how many mixing parameter increments to use.')) + argstr="-twodtmixstep %f", + units="NA", + desc=( + "Mixing parameter step size for the two tensor cases. " + "Specify how many mixing parameter increments to use." + ), + ) seed = traits.Float( - argstr='-seed %f', - units='NA', - desc= - 'Specifies the random seed to use for noise generation in simulation trials.' + argstr="-seed %f", + units="NA", + desc="Specifies the random seed to use for noise generation in simulation trials.", ) class SFPICOCalibDataOutputSpec(TraitedSpec): - PICOCalib = File(exists=True, desc='Calibration dataset') - calib_info = File(exists=True, desc='Calibration dataset') + PICOCalib = File(exists=True, desc="Calibration dataset") + calib_info = File(exists=True, desc="Calibration dataset") class SFPICOCalibData(StdOutCommandLine): @@ -146,88 +173,103 @@ class SFPICOCalibData(StdOutCommandLine): two fibre cases. However, care should be taken to ensure that enough data is generated for calculating the LUT. # doctest: +SKIP """ - _cmd = 'sfpicocalibdata' + + _cmd = "sfpicocalibdata" input_spec = SFPICOCalibDataInputSpec output_spec = SFPICOCalibDataOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['PICOCalib'] = os.path.abspath(self._gen_outfilename()) - outputs['calib_info'] = os.path.abspath(self.inputs.info_file) + outputs["PICOCalib"] = os.path.abspath(self._gen_outfilename()) + outputs["calib_info"] = os.path.abspath(self.inputs.info_file) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.scheme_file) - return name + '_PICOCalib.Bfloat' + return name + "_PICOCalib.Bfloat" class SFLUTGenInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='-inputfile %s', + argstr="-inputfile %s", mandatory=True, - desc='Voxel-order data of the spherical functions peaks.') + desc="Voxel-order data of the spherical functions peaks.", + ) info_file = File( - argstr='-infofile %s', + argstr="-infofile %s", mandatory=True, - desc=('The Info file that corresponds to the calibration ' - 'datafile used in the reconstruction.')) + desc=( + "The Info file that corresponds to the calibration " + "datafile used in the reconstruction." + ), + ) outputstem = traits.Str( - 'LUT', - argstr='-outputstem %s', - desc= - ('Define the name of the generated luts. The form of the filenames will be ' - '[outputstem]_oneFibreSurfaceCoeffs.Bdouble and ' - '[outputstem]_twoFibreSurfaceCoeffs.Bdouble'), - usedefault=True) + "LUT", + argstr="-outputstem %s", + desc=( + "Define the name of the generated luts. The form of the filenames will be " + "[outputstem]_oneFibreSurfaceCoeffs.Bdouble and " + "[outputstem]_twoFibreSurfaceCoeffs.Bdouble" + ), + usedefault=True, + ) pdf = traits.Enum( - 'bingham', - 'watson', - argstr='-pdf %s', - desc= - ('Sets the distribution to use for the calibration. The default is the Bingham ' - 'distribution, which allows elliptical probability density contours. ' - 'Currently supported options are: ' - ' bingham - The Bingham distribution, which allows elliptical probability ' - ' density contours. ' - ' watson - The Watson distribution. This distribution is rotationally symmetric.' - ), - usedefault=True) + "bingham", + "watson", + argstr="-pdf %s", + desc=( + "Sets the distribution to use for the calibration. The default is the Bingham " + "distribution, which allows elliptical probability density contours. " + "Currently supported options are: " + " bingham - The Bingham distribution, which allows elliptical probability " + " density contours. " + " watson - The Watson distribution. This distribution is rotationally symmetric." + ), + usedefault=True, + ) binincsize = traits.Int( - argstr='-binincsize %d', - units='NA', - desc= - ('Sets the size of the bins. In the case of 2D histograms such as the ' - 'Bingham, the bins are always square. Default is 1.')) + argstr="-binincsize %d", + units="NA", + desc=( + "Sets the size of the bins. In the case of 2D histograms such as the " + "Bingham, the bins are always square. Default is 1." + ), + ) minvectsperbin = traits.Int( - argstr='-minvectsperbin %d', - units='NA', - desc= - ('Specifies the minimum number of fibre-orientation estimates a bin ' - 'must contain before it is used in the lut line/surface generation. ' - 'Default is 50. If you get the error "no fibre-orientation estimates ' - 'in histogram!", the calibration data set is too small to get enough ' - 'samples in any of the histogram bins. You can decrease the minimum ' - 'number per bin to get things running in quick tests, but the sta- ' - 'tistics will not be reliable and for serious applications, you need ' - 'to increase the size of the calibration data set until the error goes.' - )) + argstr="-minvectsperbin %d", + units="NA", + desc=( + "Specifies the minimum number of fibre-orientation estimates a bin " + "must contain before it is used in the lut line/surface generation. " + 'Default is 50. If you get the error "no fibre-orientation estimates ' + 'in histogram!", the calibration data set is too small to get enough ' + "samples in any of the histogram bins. You can decrease the minimum " + "number per bin to get things running in quick tests, but the sta- " + "tistics will not be reliable and for serious applications, you need " + "to increase the size of the calibration data set until the error goes." + ), + ) directmap = traits.Bool( - argstr='-directmap', - desc= - ('Use direct mapping between the eigenvalues and the distribution parameters ' - 'instead of the log of the eigenvalues.')) + argstr="-directmap", + desc=( + "Use direct mapping between the eigenvalues and the distribution parameters " + "instead of the log of the eigenvalues." + ), + ) order = traits.Int( - argstr='-order %d', - units='NA', - desc= - ('The order of the polynomial fitting the surface. Order 1 is linear. ' - 'Order 2 (default) is quadratic.')) + argstr="-order %d", + units="NA", + desc=( + "The order of the polynomial fitting the surface. Order 1 is linear. " + "Order 2 (default) is quadratic." + ), + ) class SFLUTGenOutputSpec(TraitedSpec): - lut_one_fibre = File(exists=True, desc='PICo lut for one-fibre model') - lut_two_fibres = File(exists=True, desc='PICo lut for two-fibre model') + lut_one_fibre = File(exists=True, desc="PICo lut for one-fibre model") + lut_two_fibres = File(exists=True, desc="PICo lut for two-fibre model") class SFLUTGen(StdOutCommandLine): @@ -278,17 +320,20 @@ class SFLUTGen(StdOutCommandLine): >>> lutgen.inputs.info_file = 'PICO_calib.info' >>> lutgen.run() # doctest: +SKIP """ - _cmd = 'sflutgen' + + _cmd = "sflutgen" input_spec = SFLUTGenInputSpec output_spec = SFLUTGenOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs[ - 'lut_one_fibre'] = self.inputs.outputstem + '_oneFibreSurfaceCoeffs.Bdouble' - outputs[ - 'lut_two_fibres'] = self.inputs.outputstem + '_twoFibreSurfaceCoeffs.Bdouble' + outputs["lut_one_fibre"] = ( + self.inputs.outputstem + "_oneFibreSurfaceCoeffs.Bdouble" + ) + outputs["lut_two_fibres"] = ( + self.inputs.outputstem + "_twoFibreSurfaceCoeffs.Bdouble" + ) return outputs def _gen_outfilename(self): - return '/dev/null' + return "/dev/null" diff --git a/nipype/interfaces/camino/connectivity.py b/nipype/interfaces/camino/connectivity.py index 78172db9cc..5ec7fe8c63 100644 --- a/nipype/interfaces/camino/connectivity.py +++ b/nipype/interfaces/camino/connectivity.py @@ -2,43 +2,55 @@ import os from ...utils.filemanip import split_filename -from ..base import (traits, TraitedSpec, File, CommandLine, - CommandLineInputSpec, isdefined) +from ..base import ( + traits, + TraitedSpec, + File, + CommandLine, + CommandLineInputSpec, + isdefined, +) class ConmatInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='-inputfile %s', + argstr="-inputfile %s", mandatory=True, - desc='Streamlines as generated by the Track interface') + desc="Streamlines as generated by the Track interface", + ) target_file = File( exists=True, - argstr='-targetfile %s', + argstr="-targetfile %s", mandatory=True, - desc= - 'An image containing targets, as used in ProcStreamlines interface.') + desc="An image containing targets, as used in ProcStreamlines interface.", + ) scalar_file = File( exists=True, - argstr='-scalarfile %s', - desc=('Optional scalar file for computing tract-based statistics. ' - 'Must be in the same space as the target file.'), - requires=['tract_stat']) + argstr="-scalarfile %s", + desc=( + "Optional scalar file for computing tract-based statistics. " + "Must be in the same space as the target file." + ), + requires=["tract_stat"], + ) targetname_file = File( exists=True, - argstr='-targetnamefile %s', - desc= - ('Optional names of targets. This file should contain one entry per line, ' - 'with the target intensity followed by the name, separated by white space. ' - 'For example: ' - ' 1 some_brain_region ' - ' 2 some_other_region ' - 'These names will be used in the output. The names themselves should not ' - 'contain spaces or commas. The labels may be in any order but the output ' - 'matrices will be ordered by label intensity.')) + argstr="-targetnamefile %s", + desc=( + "Optional names of targets. This file should contain one entry per line, " + "with the target intensity followed by the name, separated by white space. " + "For example: " + " 1 some_brain_region " + " 2 some_other_region " + "These names will be used in the output. The names themselves should not " + "contain spaces or commas. The labels may be in any order but the output " + "matrices will be ordered by label intensity." + ), + ) tract_stat = traits.Enum( "mean", @@ -47,31 +59,38 @@ class ConmatInputSpec(CommandLineInputSpec): "sum", "median", "var", - argstr='-tractstat %s', - units='NA', + argstr="-tractstat %s", + units="NA", desc=("Tract statistic to use. See TractStats for other options."), - requires=['scalar_file'], - xor=['tract_prop']) + requires=["scalar_file"], + xor=["tract_prop"], + ) tract_prop = traits.Enum( "length", "endpointsep", - argstr='-tractstat %s', - units='NA', - xor=['tract_stat'], - desc=('Tract property average to compute in the connectivity matrix. ' - 'See TractStats for details.')) + argstr="-tractstat %s", + units="NA", + xor=["tract_stat"], + desc=( + "Tract property average to compute in the connectivity matrix. " + "See TractStats for details." + ), + ) output_root = File( - argstr='-outputroot %s', + argstr="-outputroot %s", genfile=True, - desc=('filename root prepended onto the names of the output files. ' - 'The extension will be determined from the input.')) + desc=( + "filename root prepended onto the names of the output files. " + "The extension will be determined from the input." + ), + ) class ConmatOutputSpec(TraitedSpec): - conmat_sc = File(exists=True, desc='Connectivity matrix in CSV file.') - conmat_ts = File(desc='Tract statistics in CSV file.') + conmat_sc = File(exists=True, desc="Connectivity matrix in CSV file.") + conmat_ts = File(desc="Tract statistics in CSV file.") class Conmat(CommandLine): @@ -149,15 +168,16 @@ class Conmat(CommandLine): >>> conmat.tract_stat = 'mean' >>> conmat.run() # doctest: +SKIP """ - _cmd = 'conmat' + + _cmd = "conmat" input_spec = ConmatInputSpec output_spec = ConmatOutputSpec def _list_outputs(self): outputs = self.output_spec().get() output_root = self._gen_outputroot() - outputs['conmat_sc'] = os.path.abspath(output_root + "sc.csv") - outputs['conmat_ts'] = os.path.abspath(output_root + "ts.csv") + outputs["conmat_sc"] = os.path.abspath(output_root + "sc.csv") + outputs["conmat_ts"] = os.path.abspath(output_root + "ts.csv") return outputs def _gen_outfilename(self): @@ -166,11 +186,11 @@ def _gen_outfilename(self): def _gen_outputroot(self): output_root = self.inputs.output_root if not isdefined(output_root): - output_root = self._gen_filename('output_root') + output_root = self._gen_filename("output_root") return output_root def _gen_filename(self, name): - if name == 'output_root': + if name == "output_root": _, filename, _ = split_filename(self.inputs.in_file) filename = filename + "_" return filename diff --git a/nipype/interfaces/camino/convert.py b/nipype/interfaces/camino/convert.py index 2ac96befaa..a5b4b70fb3 100644 --- a/nipype/interfaces/camino/convert.py +++ b/nipype/interfaces/camino/convert.py @@ -4,18 +4,27 @@ import glob from ...utils.filemanip import split_filename -from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec, - File, StdOutCommandLine, OutputMultiPath, - StdOutCommandLineInputSpec, isdefined) +from ..base import ( + CommandLineInputSpec, + CommandLine, + traits, + TraitedSpec, + File, + StdOutCommandLine, + OutputMultiPath, + StdOutCommandLineInputSpec, + isdefined, +) class Image2VoxelInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='-4dimage %s', + argstr="-4dimage %s", mandatory=True, position=1, - desc='4d image file') + desc="4d image file", + ) # TODO convert list of files on the fly # imagelist = File(exists=True, argstr='-imagelist %s', # mandatory=True, position=1, @@ -31,16 +40,15 @@ class Image2VoxelInputSpec(StdOutCommandLineInputSpec): "int", "long", "double", - argstr='-outputdatatype %s', + argstr="-outputdatatype %s", position=2, - desc= - '"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"', - usedefault=True) + desc='"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"', + usedefault=True, + ) class Image2VoxelOutputSpec(TraitedSpec): - voxel_order = File( - exists=True, desc='path/name of 4D volume in voxel order') + voxel_order = File(exists=True, desc="path/name of 4D volume in voxel order") class Image2Voxel(StdOutCommandLine): @@ -59,71 +67,76 @@ class Image2Voxel(StdOutCommandLine): >>> img2vox.inputs.in_file = '4d_dwi.nii' >>> img2vox.run() # doctest: +SKIP """ - _cmd = 'image2voxel' + + _cmd = "image2voxel" input_spec = Image2VoxelInputSpec output_spec = Image2VoxelOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['voxel_order'] = os.path.abspath(self._gen_outfilename()) + outputs["voxel_order"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '.B' + self.inputs.out_type + return name + ".B" + self.inputs.out_type class FSL2SchemeInputSpec(StdOutCommandLineInputSpec): bvec_file = File( exists=True, - argstr='-bvecfile %s', + argstr="-bvecfile %s", mandatory=True, position=1, - desc='b vector file') + desc="b vector file", + ) bval_file = File( exists=True, - argstr='-bvalfile %s', + argstr="-bvalfile %s", mandatory=True, position=2, - desc='b value file') + desc="b value file", + ) numscans = traits.Int( - argstr='-numscans %d', - units='NA', - desc= - "Output all measurements numerous (n) times, used when combining multiple scans from the same imaging session." + argstr="-numscans %d", + units="NA", + desc="Output all measurements numerous (n) times, used when combining multiple scans from the same imaging session.", ) interleave = traits.Bool( - argstr='-interleave', - desc="Interleave repeated scans. Only used with -numscans.") + argstr="-interleave", + desc="Interleave repeated scans. Only used with -numscans.", + ) bscale = traits.Float( - argstr='-bscale %d', - units='NA', - desc= - "Scaling factor to convert the b-values into different units. Default is 10^6." + argstr="-bscale %d", + units="NA", + desc="Scaling factor to convert the b-values into different units. Default is 10^6.", ) diffusiontime = traits.Float( - argstr='-diffusiontime %f', units='NA', desc="Diffusion time") + argstr="-diffusiontime %f", units="NA", desc="Diffusion time" + ) flipx = traits.Bool( - argstr='-flipx', desc="Negate the x component of all the vectors.") + argstr="-flipx", desc="Negate the x component of all the vectors." + ) flipy = traits.Bool( - argstr='-flipy', desc="Negate the y component of all the vectors.") + argstr="-flipy", desc="Negate the y component of all the vectors." + ) flipz = traits.Bool( - argstr='-flipz', desc="Negate the z component of all the vectors.") + argstr="-flipz", desc="Negate the z component of all the vectors." + ) usegradmod = traits.Bool( - argstr='-usegradmod', - desc= - "Use the gradient magnitude to scale b. This option has no effect if your gradient directions have unit magnitude." + argstr="-usegradmod", + desc="Use the gradient magnitude to scale b. This option has no effect if your gradient directions have unit magnitude.", ) class FSL2SchemeOutputSpec(TraitedSpec): - scheme = File(exists=True, desc='Scheme file') + scheme = File(exists=True, desc="Scheme file") class FSL2Scheme(StdOutCommandLine): @@ -140,80 +153,81 @@ class FSL2Scheme(StdOutCommandLine): >>> makescheme.run() # doctest: +SKIP """ - _cmd = 'fsl2scheme' + + _cmd = "fsl2scheme" input_spec = FSL2SchemeInputSpec output_spec = FSL2SchemeOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['scheme'] = os.path.abspath(self._gen_outfilename()) + outputs["scheme"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.bvec_file) - return name + '.scheme' + return name + ".scheme" class VtkStreamlinesInputSpec(StdOutCommandLineInputSpec): inputmodel = traits.Enum( - 'raw', - 'voxels', - argstr='-inputmodel %s', - desc='input model type (raw or voxels)', - usedefault=True) + "raw", + "voxels", + argstr="-inputmodel %s", + desc="input model type (raw or voxels)", + usedefault=True, + ) in_file = File( - exists=True, - argstr=' < %s', - mandatory=True, - position=-2, - desc='data file') + exists=True, argstr=" < %s", mandatory=True, position=-2, desc="data file" + ) voxeldims = traits.List( traits.Int, - desc='voxel dimensions in mm', - argstr='-voxeldims %s', + desc="voxel dimensions in mm", + argstr="-voxeldims %s", minlen=3, maxlen=3, position=4, - units='mm') + units="mm", + ) seed_file = File( exists=False, - argstr='-seedfile %s', + argstr="-seedfile %s", position=1, - desc='image containing seed points') + desc="image containing seed points", + ) target_file = File( exists=False, - argstr='-targetfile %s', + argstr="-targetfile %s", position=2, - desc='image containing integer-valued target regions') + desc="image containing integer-valued target regions", + ) scalar_file = File( exists=False, - argstr='-scalarfile %s', + argstr="-scalarfile %s", position=3, - desc='image that is in the same physical space as the tracts') + desc="image that is in the same physical space as the tracts", + ) colourorient = traits.Bool( - argstr='-colourorient', - desc= - "Each point on the streamline is coloured by the local orientation.") + argstr="-colourorient", + desc="Each point on the streamline is coloured by the local orientation.", + ) interpolatescalars = traits.Bool( - argstr='-interpolatescalars', - desc= - "the scalar value at each point on the streamline is calculated by trilinear interpolation" + argstr="-interpolatescalars", + desc="the scalar value at each point on the streamline is calculated by trilinear interpolation", ) interpolate = traits.Bool( - argstr='-interpolate', - desc= - "the scalar value at each point on the streamline is calculated by trilinear interpolation" + argstr="-interpolate", + desc="the scalar value at each point on the streamline is calculated by trilinear interpolation", ) class VtkStreamlinesOutputSpec(TraitedSpec): - vtk = File(exists=True, desc='Streamlines in VTK format') + vtk = File(exists=True, desc="Streamlines in VTK format") class VtkStreamlines(StdOutCommandLine): @@ -229,189 +243,183 @@ class VtkStreamlines(StdOutCommandLine): >>> vtk.inputs.voxeldims = [1,1,1] >>> vtk.run() # doctest: +SKIP """ - _cmd = 'vtkstreamlines' + + _cmd = "vtkstreamlines" input_spec = VtkStreamlinesInputSpec output_spec = VtkStreamlinesOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['vtk'] = os.path.abspath(self._gen_outfilename()) + outputs["vtk"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '.vtk' + return name + ".vtk" class ProcStreamlinesInputSpec(StdOutCommandLineInputSpec): inputmodel = traits.Enum( - 'raw', - 'voxels', - argstr='-inputmodel %s', - desc='input model type (raw or voxels)', - usedefault=True) + "raw", + "voxels", + argstr="-inputmodel %s", + desc="input model type (raw or voxels)", + usedefault=True, + ) in_file = File( exists=True, - argstr='-inputfile %s', + argstr="-inputfile %s", mandatory=True, position=1, - desc='data file') + desc="data file", + ) maxtractpoints = traits.Int( - argstr='-maxtractpoints %d', - units='NA', - desc="maximum number of tract points") + argstr="-maxtractpoints %d", units="NA", desc="maximum number of tract points" + ) mintractpoints = traits.Int( - argstr='-mintractpoints %d', - units='NA', - desc="minimum number of tract points") + argstr="-mintractpoints %d", units="NA", desc="minimum number of tract points" + ) maxtractlength = traits.Int( - argstr='-maxtractlength %d', - units='mm', - desc="maximum length of tracts") + argstr="-maxtractlength %d", units="mm", desc="maximum length of tracts" + ) mintractlength = traits.Int( - argstr='-mintractlength %d', - units='mm', - desc="minimum length of tracts") + argstr="-mintractlength %d", units="mm", desc="minimum length of tracts" + ) datadims = traits.List( traits.Int, - desc='data dimensions in voxels', - argstr='-datadims %s', + desc="data dimensions in voxels", + argstr="-datadims %s", minlen=3, maxlen=3, - units='voxels') + units="voxels", + ) voxeldims = traits.List( traits.Int, - desc='voxel dimensions in mm', - argstr='-voxeldims %s', + desc="voxel dimensions in mm", + argstr="-voxeldims %s", minlen=3, maxlen=3, - units='mm') + units="mm", + ) seedpointmm = traits.List( traits.Int, - desc='The coordinates of a single seed point for tractography in mm', - argstr='-seedpointmm %s', + desc="The coordinates of a single seed point for tractography in mm", + argstr="-seedpointmm %s", minlen=3, maxlen=3, - units='mm') + units="mm", + ) seedpointvox = traits.List( traits.Int, - desc= - 'The coordinates of a single seed point for tractography in voxels', - argstr='-seedpointvox %s', + desc="The coordinates of a single seed point for tractography in voxels", + argstr="-seedpointvox %s", minlen=3, maxlen=3, - units='voxels') + units="voxels", + ) seedfile = File( - exists=False, - argstr='-seedfile %s', - desc='Image Containing Seed Points') + exists=False, argstr="-seedfile %s", desc="Image Containing Seed Points" + ) regionindex = traits.Int( - argstr='-regionindex %d', - units='mm', - desc="index of specific region to process") + argstr="-regionindex %d", units="mm", desc="index of specific region to process" + ) iterations = traits.Float( - argstr='-iterations %d', - units='NA', - desc= - "Number of streamlines generated for each seed. Not required when outputting streamlines, but needed to create PICo images. The default is 1 if the output is streamlines, and 5000 if the output is connection probability images." + argstr="-iterations %d", + units="NA", + desc="Number of streamlines generated for each seed. Not required when outputting streamlines, but needed to create PICo images. The default is 1 if the output is streamlines, and 5000 if the output is connection probability images.", ) targetfile = File( - exists=False, - argstr='-targetfile %s', - desc='Image containing target volumes.') + exists=False, argstr="-targetfile %s", desc="Image containing target volumes." + ) allowmultitargets = traits.Bool( - argstr='-allowmultitargets', - desc="Allows streamlines to connect to multiple target volumes.") + argstr="-allowmultitargets", + desc="Allows streamlines to connect to multiple target volumes.", + ) directional = traits.List( traits.Int, - desc= - 'Splits the streamlines at the seed point and computes separate connection probabilities for each segment. Streamline segments are grouped according to their dot product with the vector (X, Y, Z). The ideal vector will be tangential to the streamline trajectory at the seed, such that the streamline projects from the seed along (X, Y, Z) and -(X, Y, Z). However, it is only necessary for the streamline trajectory to not be orthogonal to (X, Y, Z).', - argstr='-directional %s', + desc="Splits the streamlines at the seed point and computes separate connection probabilities for each segment. Streamline segments are grouped according to their dot product with the vector (X, Y, Z). The ideal vector will be tangential to the streamline trajectory at the seed, such that the streamline projects from the seed along (X, Y, Z) and -(X, Y, Z). However, it is only necessary for the streamline trajectory to not be orthogonal to (X, Y, Z).", + argstr="-directional %s", minlen=3, maxlen=3, - units='NA') + units="NA", + ) waypointfile = File( exists=False, - argstr='-waypointfile %s', - desc= - 'Image containing waypoints. Waypoints are defined as regions of the image with the same intensity, where 0 is background and any value > 0 is a waypoint.' + argstr="-waypointfile %s", + desc="Image containing waypoints. Waypoints are defined as regions of the image with the same intensity, where 0 is background and any value > 0 is a waypoint.", ) truncateloops = traits.Bool( - argstr='-truncateloops', - desc= - "This option allows streamlines to enter a waypoint exactly once. After the streamline leaves the waypoint, it is truncated upon a second entry to the waypoint." + argstr="-truncateloops", + desc="This option allows streamlines to enter a waypoint exactly once. After the streamline leaves the waypoint, it is truncated upon a second entry to the waypoint.", ) discardloops = traits.Bool( - argstr='-discardloops', - desc= - "This option allows streamlines to enter a waypoint exactly once. After the streamline leaves the waypoint, the entire streamline is discarded upon a second entry to the waypoint." + argstr="-discardloops", + desc="This option allows streamlines to enter a waypoint exactly once. After the streamline leaves the waypoint, the entire streamline is discarded upon a second entry to the waypoint.", ) exclusionfile = File( exists=False, - argstr='-exclusionfile %s', - desc= - 'Image containing exclusion ROIs. This should be an Analyze 7.5 header / image file.hdr and file.img.' + argstr="-exclusionfile %s", + desc="Image containing exclusion ROIs. This should be an Analyze 7.5 header / image file.hdr and file.img.", ) truncateinexclusion = traits.Bool( - argstr='-truncateinexclusion', - desc="Retain segments of a streamline before entry to an exclusion ROI." + argstr="-truncateinexclusion", + desc="Retain segments of a streamline before entry to an exclusion ROI.", ) endpointfile = File( exists=False, - argstr='-endpointfile %s', - desc= - 'Image containing endpoint ROIs. This should be an Analyze 7.5 header / image file.hdr and file.img.' + argstr="-endpointfile %s", + desc="Image containing endpoint ROIs. This should be an Analyze 7.5 header / image file.hdr and file.img.", ) resamplestepsize = traits.Float( - argstr='-resamplestepsize %d', - units='NA', - desc= - "Each point on a streamline is tested for entry into target, exclusion or waypoint volumes. If the length between points on a tract is not much smaller than the voxel length, then streamlines may pass through part of a voxel without being counted. To avoid this, the program resamples streamlines such that the step size is one tenth of the smallest voxel dimension in the image. This increases the size of raw or oogl streamline output and incurs some performance penalty. The resample resolution can be controlled with this option or disabled altogether by passing a negative step size or by passing the -noresample option." + argstr="-resamplestepsize %d", + units="NA", + desc="Each point on a streamline is tested for entry into target, exclusion or waypoint volumes. If the length between points on a tract is not much smaller than the voxel length, then streamlines may pass through part of a voxel without being counted. To avoid this, the program resamples streamlines such that the step size is one tenth of the smallest voxel dimension in the image. This increases the size of raw or oogl streamline output and incurs some performance penalty. The resample resolution can be controlled with this option or disabled altogether by passing a negative step size or by passing the -noresample option.", ) noresample = traits.Bool( - argstr='-noresample', - desc= - "Disables resampling of input streamlines. Resampling is automatically disabled if the input model is voxels." + argstr="-noresample", + desc="Disables resampling of input streamlines. Resampling is automatically disabled if the input model is voxels.", ) outputtracts = traits.Bool( - argstr='-outputtracts', - desc="Output streamlines in raw binary format.") + argstr="-outputtracts", desc="Output streamlines in raw binary format." + ) outputroot = File( exists=False, - argstr='-outputroot %s', - desc='Prepended onto all output file names.') + argstr="-outputroot %s", + desc="Prepended onto all output file names.", + ) - gzip = traits.Bool( - argstr='-gzip', desc="save the output image in gzip format") + gzip = traits.Bool(argstr="-gzip", desc="save the output image in gzip format") outputcp = traits.Bool( - argstr='-outputcp', + argstr="-outputcp", desc="output the connection probability map (Analyze image, float)", - requires=['outputroot', 'seedfile']) + requires=["outputroot", "seedfile"], + ) outputsc = traits.Bool( - argstr='-outputsc', + argstr="-outputsc", desc="output the connection probability map (raw streamlines, int)", - requires=['outputroot', 'seedfile']) + requires=["outputroot", "seedfile"], + ) outputacm = traits.Bool( - argstr='-outputacm', - desc= - "output all tracts in a single connection probability map (Analyze image)", - requires=['outputroot', 'seedfile']) + argstr="-outputacm", + desc="output all tracts in a single connection probability map (Analyze image)", + requires=["outputroot", "seedfile"], + ) outputcbs = traits.Bool( - argstr='-outputcbs', - desc= - "outputs connectivity-based segmentation maps; requires target outputfile", - requires=['outputroot', 'targetfile', 'seedfile']) + argstr="-outputcbs", + desc="outputs connectivity-based segmentation maps; requires target outputfile", + requires=["outputroot", "targetfile", "seedfile"], + ) class ProcStreamlinesOutputSpec(TraitedSpec): - proc = File(exists=True, desc='Processed Streamlines') + proc = File(exists=True, desc="Processed Streamlines") outputroot_files = OutputMultiPath(File(exists=True)) @@ -430,18 +438,19 @@ class ProcStreamlines(StdOutCommandLine): >>> proc.inputs.in_file = 'tract_data.Bfloat' >>> proc.run() # doctest: +SKIP """ - _cmd = 'procstreamlines' + + _cmd = "procstreamlines" input_spec = ProcStreamlinesInputSpec output_spec = ProcStreamlinesOutputSpec def _format_arg(self, name, spec, value): - if name == 'outputroot': + if name == "outputroot": return spec.argstr % self._get_actual_outputroot(value) return super(ProcStreamlines, self)._format_arg(name, spec, value) def __init__(self, *args, **kwargs): - super(ProcStreamlines, self).__init__(*args, **kwargs) - self.outputroot_files = [] + super(ProcStreamlines, self).__init__(*args, **kwargs) + self.outputroot_files = [] def _run_interface(self, runtime): outputroot = self.inputs.outputroot @@ -452,53 +461,49 @@ def _run_interface(self, runtime): os.makedirs(base) new_runtime = super(ProcStreamlines, self)._run_interface(runtime) self.outputroot_files = glob.glob( - os.path.join(os.getcwd(), actual_outputroot + '*')) + os.path.join(os.getcwd(), actual_outputroot + "*") + ) return new_runtime else: new_runtime = super(ProcStreamlines, self)._run_interface(runtime) return new_runtime def _get_actual_outputroot(self, outputroot): - actual_outputroot = os.path.join('procstream_outfiles', outputroot) + actual_outputroot = os.path.join("procstream_outfiles", outputroot) return actual_outputroot def _list_outputs(self): outputs = self.output_spec().get() - outputs['proc'] = os.path.abspath(self._gen_outfilename()) - outputs['outputroot_files'] = self.outputroot_files + outputs["proc"] = os.path.abspath(self._gen_outfilename()) + outputs["outputroot_files"] = self.outputroot_files return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_proc' + return name + "_proc" class TractShredderInputSpec(StdOutCommandLineInputSpec): in_file = File( - exists=True, - argstr='< %s', - mandatory=True, - position=-2, - desc='tract file') + exists=True, argstr="< %s", mandatory=True, position=-2, desc="tract file" + ) offset = traits.Int( - argstr='%d', - units='NA', - desc='initial offset of offset tracts', - position=1) + argstr="%d", units="NA", desc="initial offset of offset tracts", position=1 + ) bunchsize = traits.Int( - argstr='%d', - units='NA', - desc='reads and outputs a group of bunchsize tracts', - position=2) + argstr="%d", + units="NA", + desc="reads and outputs a group of bunchsize tracts", + position=2, + ) - space = traits.Int( - argstr='%d', units='NA', desc='skips space tracts', position=3) + space = traits.Int(argstr="%d", units="NA", desc="skips space tracts", position=3) class TractShredderOutputSpec(TraitedSpec): - shredded = File(exists=True, desc='Shredded tract file') + shredded = File(exists=True, desc="Shredded tract file") class TractShredder(StdOutCommandLine): @@ -522,13 +527,14 @@ class TractShredder(StdOutCommandLine): >>> shred.inputs.space = 2 >>> shred.run() # doctest: +SKIP """ - _cmd = 'tractshredder' + + _cmd = "tractshredder" input_spec = TractShredderInputSpec output_spec = TractShredderOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['shredded'] = os.path.abspath(self._gen_outfilename()) + outputs["shredded"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): @@ -539,35 +545,38 @@ def _gen_outfilename(self): class DT2NIfTIInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='-inputfile %s', + argstr="-inputfile %s", mandatory=True, position=1, - desc='tract file') + desc="tract file", + ) output_root = File( - argstr='-outputroot %s', + argstr="-outputroot %s", position=2, genfile=True, - desc='filename root prepended onto the names of three output files.') + desc="filename root prepended onto the names of three output files.", + ) header_file = File( exists=True, - argstr='-header %s', + argstr="-header %s", mandatory=True, position=3, - desc=' A Nifti .nii or .hdr file containing the header information') + desc=" A Nifti .nii or .hdr file containing the header information", + ) class DT2NIfTIOutputSpec(TraitedSpec): - dt = File(exists=True, desc='diffusion tensors in NIfTI format') + dt = File(exists=True, desc="diffusion tensors in NIfTI format") exitcode = File( - exists=True, - desc='exit codes from Camino reconstruction in NIfTI format') + exists=True, desc="exit codes from Camino reconstruction in NIfTI format" + ) lns0 = File( - exists=True, - desc='estimated lns0 from Camino reconstruction in NIfTI format') + exists=True, desc="estimated lns0 from Camino reconstruction in NIfTI format" + ) class DT2NIfTI(CommandLine): @@ -576,7 +585,8 @@ class DT2NIfTI(CommandLine): Reads Camino diffusion tensors, and converts them to NIFTI format as three .nii files. """ - _cmd = 'dt2nii' + + _cmd = "dt2nii" input_spec = DT2NIfTIInputSpec output_spec = DT2NIfTIOutputSpec @@ -594,11 +604,11 @@ def _gen_outfilename(self): def _gen_outputroot(self): output_root = self.inputs.output_root if not isdefined(output_root): - output_root = self._gen_filename('output_root') + output_root = self._gen_filename("output_root") return output_root def _gen_filename(self, name): - if name == 'output_root': + if name == "output_root": _, filename, _ = split_filename(self.inputs.in_file) filename = filename + "_" return filename @@ -607,56 +617,55 @@ def _gen_filename(self, name): class NIfTIDT2CaminoInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='-inputfile %s', + argstr="-inputfile %s", mandatory=True, position=1, - desc= - 'A NIFTI-1 dataset containing diffusion tensors. The tensors are assumed to be ' - 'in lower-triangular order as specified by the NIFTI standard for the storage of ' - 'symmetric matrices. This file should be either a .nii or a .hdr file.' + desc="A NIFTI-1 dataset containing diffusion tensors. The tensors are assumed to be " + "in lower-triangular order as specified by the NIFTI standard for the storage of " + "symmetric matrices. This file should be either a .nii or a .hdr file.", ) s0_file = File( - argstr='-s0 %s', + argstr="-s0 %s", exists=True, - desc= - 'File containing the unweighted signal for each voxel, may be a raw binary ' - 'file (specify type with -inputdatatype) or a supported image file.') + desc="File containing the unweighted signal for each voxel, may be a raw binary " + "file (specify type with -inputdatatype) or a supported image file.", + ) lns0_file = File( - argstr='-lns0 %s', + argstr="-lns0 %s", exists=True, - desc= - 'File containing the log of the unweighted signal for each voxel, may be a ' - 'raw binary file (specify type with -inputdatatype) or a supported image file.' + desc="File containing the log of the unweighted signal for each voxel, may be a " + "raw binary file (specify type with -inputdatatype) or a supported image file.", ) bgmask = File( - argstr='-bgmask %s', + argstr="-bgmask %s", exists=True, - desc= - 'Binary valued brain / background segmentation, may be a raw binary file ' - '(specify type with -maskdatatype) or a supported image file.') + desc="Binary valued brain / background segmentation, may be a raw binary file " + "(specify type with -maskdatatype) or a supported image file.", + ) scaleslope = traits.Float( - argstr='-scaleslope %s', - desc='A value v in the diffusion tensor is scaled to v * s + i. This is ' - 'applied after any scaling specified by the input image. Default is 1.0.' + argstr="-scaleslope %s", + desc="A value v in the diffusion tensor is scaled to v * s + i. This is " + "applied after any scaling specified by the input image. Default is 1.0.", ) scaleinter = traits.Float( - argstr='-scaleinter %s', - desc='A value v in the diffusion tensor is scaled to v * s + i. This is ' - 'applied after any scaling specified by the input image. Default is 0.0.' + argstr="-scaleinter %s", + desc="A value v in the diffusion tensor is scaled to v * s + i. This is " + "applied after any scaling specified by the input image. Default is 0.0.", ) uppertriangular = traits.Bool( - argstr='-uppertriangular %s', - desc='Specifies input in upper-triangular (VTK style) order.') + argstr="-uppertriangular %s", + desc="Specifies input in upper-triangular (VTK style) order.", + ) class NIfTIDT2CaminoOutputSpec(TraitedSpec): - out_file = File(desc='diffusion tensors data in Camino format') + out_file = File(desc="diffusion tensors data in Camino format") class NIfTIDT2Camino(CommandLine): @@ -680,17 +689,18 @@ class NIfTIDT2Camino(CommandLine): to use the -uppertriangular option to convert these correctly. """ - _cmd = 'niftidt2camino' + + _cmd = "niftidt2camino" input_spec = NIfTIDT2CaminoInputSpec output_spec = NIfTIDT2CaminoOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs["out_file"] = self._gen_filename('out_file') + outputs["out_file"] = self._gen_filename("out_file") return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": _, filename, _ = split_filename(self.inputs.in_file) return filename @@ -698,33 +708,39 @@ def _gen_filename(self, name): class AnalyzeHeaderInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='< %s', + argstr="< %s", mandatory=True, position=1, - desc='Tensor-fitted data filename') + desc="Tensor-fitted data filename", + ) scheme_file = File( exists=True, - argstr='%s', + argstr="%s", position=2, - desc=('Camino scheme file (b values / vectors, ' - 'see camino.fsl2scheme)')) + desc=("Camino scheme file (b values / vectors, " "see camino.fsl2scheme)"), + ) readheader = File( exists=True, - argstr='-readheader %s', + argstr="-readheader %s", position=3, - desc=('Reads header information from file and prints to ' - 'stdout. If this option is not specified, then the ' - 'program writes a header based on the other ' - 'arguments.')) + desc=( + "Reads header information from file and prints to " + "stdout. If this option is not specified, then the " + "program writes a header based on the other " + "arguments." + ), + ) printimagedims = File( exists=True, - argstr='-printimagedims %s', + argstr="-printimagedims %s", position=3, - desc=('Prints image data and voxel dimensions as ' - 'Camino arguments and exits.')) + desc=( + "Prints image data and voxel dimensions as " "Camino arguments and exits." + ), + ) # How do we implement both file and enum (for the program) in one argument? # Is this option useful anyway? @@ -734,143 +750,170 @@ class AnalyzeHeaderInputSpec(StdOutCommandLineInputSpec): # vcthreshselect, pdview, track. printprogargs = File( exists=True, - argstr='-printprogargs %s', + argstr="-printprogargs %s", position=3, - desc=('Prints data dimension (and type, if relevant) ' - 'arguments for a specific Camino program, where ' - 'prog is one of shredder, scanner2voxel, ' - 'vcthreshselect, pdview, track.')) + desc=( + "Prints data dimension (and type, if relevant) " + "arguments for a specific Camino program, where " + "prog is one of shredder, scanner2voxel, " + "vcthreshselect, pdview, track." + ), + ) printintelbyteorder = File( exists=True, - argstr='-printintelbyteorder %s', + argstr="-printintelbyteorder %s", position=3, - desc=('Prints 1 if the header is little-endian, ' - '0 otherwise.')) + desc=("Prints 1 if the header is little-endian, " "0 otherwise."), + ) printbigendian = File( exists=True, - argstr='-printbigendian %s', + argstr="-printbigendian %s", position=3, - desc=('Prints 1 if the header is big-endian, 0 ' - 'otherwise.')) + desc=("Prints 1 if the header is big-endian, 0 " "otherwise."), + ) initfromheader = File( exists=True, - argstr='-initfromheader %s', + argstr="-initfromheader %s", position=3, - desc=('Reads header information from file and ' - 'intializes a new header with the values read ' - 'from the file. You may replace any ' - 'combination of fields in the new header by ' - 'specifying subsequent options.')) + desc=( + "Reads header information from file and " + "intializes a new header with the values read " + "from the file. You may replace any " + "combination of fields in the new header by " + "specifying subsequent options." + ), + ) data_dims = traits.List( traits.Int, - desc='data dimensions in voxels', - argstr='-datadims %s', + desc="data dimensions in voxels", + argstr="-datadims %s", minlen=3, maxlen=3, - units='voxels') + units="voxels", + ) voxel_dims = traits.List( traits.Float, - desc='voxel dimensions in mm', - argstr='-voxeldims %s', + desc="voxel dimensions in mm", + argstr="-voxeldims %s", minlen=3, maxlen=3, - units='mm') + units="mm", + ) centre = traits.List( traits.Int, - argstr='-centre %s', + argstr="-centre %s", minlen=3, maxlen=3, - units='mm', - desc=('Voxel specifying origin of Talairach ' - 'coordinate system for SPM, default [0 0 0].')) + units="mm", + desc=( + "Voxel specifying origin of Talairach " + "coordinate system for SPM, default [0 0 0]." + ), + ) picoseed = traits.List( traits.Int, - argstr='-picoseed %s', + argstr="-picoseed %s", minlen=3, maxlen=3, - desc=('Voxel specifying the seed (for PICo maps), ' - 'default [0 0 0].'), - units='mm') + desc=("Voxel specifying the seed (for PICo maps), " "default [0 0 0]."), + units="mm", + ) nimages = traits.Int( - argstr='-nimages %d', - units='NA', - desc="Number of images in the img file. Default 1.") + argstr="-nimages %d", + units="NA", + desc="Number of images in the img file. Default 1.", + ) datatype = traits.Enum( - 'byte', - 'char', - '[u]short', - '[u]int', - 'float', - 'complex', - 'double', - argstr='-datatype %s', - desc=('The char datatype is 8 bit (not the 16 bit ' - 'char of Java), as specified by the Analyze ' - '7.5 standard. The byte, ushort and uint ' - 'types are not part of the Analyze ' - 'specification but are supported by SPM.'), - mandatory=True) + "byte", + "char", + "[u]short", + "[u]int", + "float", + "complex", + "double", + argstr="-datatype %s", + desc=( + "The char datatype is 8 bit (not the 16 bit " + "char of Java), as specified by the Analyze " + "7.5 standard. The byte, ushort and uint " + "types are not part of the Analyze " + "specification but are supported by SPM." + ), + mandatory=True, + ) offset = traits.Int( - argstr='-offset %d', - units='NA', - desc=('According to the Analyze 7.5 standard, this is ' - 'the byte offset in the .img file at which ' - 'voxels start. This value can be negative to ' - 'specify that the absolute value is applied for ' - 'every image in the file.')) + argstr="-offset %d", + units="NA", + desc=( + "According to the Analyze 7.5 standard, this is " + "the byte offset in the .img file at which " + "voxels start. This value can be negative to " + "specify that the absolute value is applied for " + "every image in the file." + ), + ) greylevels = traits.List( traits.Int, - argstr='-gl %s', + argstr="-gl %s", minlen=2, maxlen=2, - desc=('Minimum and maximum greylevels. Stored as ' - 'shorts in the header.'), - units='NA') + desc=("Minimum and maximum greylevels. Stored as " "shorts in the header."), + units="NA", + ) scaleslope = traits.Float( - argstr='-scaleslope %d', - units='NA', - desc=('Intensities in the image are scaled by ' - 'this factor by SPM and MRICro. Default is ' - '1.0.')) + argstr="-scaleslope %d", + units="NA", + desc=( + "Intensities in the image are scaled by " + "this factor by SPM and MRICro. Default is " + "1.0." + ), + ) scaleinter = traits.Float( - argstr='-scaleinter %d', - units='NA', - desc=('Constant to add to the image intensities. ' - 'Used by SPM and MRIcro.')) + argstr="-scaleinter %d", + units="NA", + desc=("Constant to add to the image intensities. " "Used by SPM and MRIcro."), + ) description = traits.String( - argstr='-description %s', - desc=('Short description - No spaces, max ' - 'length 79 bytes. Will be null ' - 'terminated automatically.')) + argstr="-description %s", + desc=( + "Short description - No spaces, max " + "length 79 bytes. Will be null " + "terminated automatically." + ), + ) intelbyteorder = traits.Bool( - argstr='-intelbyteorder', - desc=("Write header in intel byte order " - "(little-endian).")) + argstr="-intelbyteorder", + desc=("Write header in intel byte order " "(little-endian)."), + ) networkbyteorder = traits.Bool( - argstr='-networkbyteorder', - desc=("Write header in network byte order " - "(big-endian). This is the default " - "for new headers.")) + argstr="-networkbyteorder", + desc=( + "Write header in network byte order " + "(big-endian). This is the default " + "for new headers." + ), + ) class AnalyzeHeaderOutputSpec(TraitedSpec): - header = File(exists=True, desc='Analyze header') + header = File(exists=True, desc="Analyze header") class AnalyzeHeader(StdOutCommandLine): @@ -897,13 +940,14 @@ class AnalyzeHeader(StdOutCommandLine): >>> hdr.inputs.voxel_dims = [1,1,1] >>> hdr.run() # doctest: +SKIP """ - _cmd = 'analyzeheader' + + _cmd = "analyzeheader" input_spec = AnalyzeHeaderInputSpec output_spec = AnalyzeHeaderOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['header'] = os.path.abspath(self._gen_outfilename()) + outputs["header"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): @@ -914,29 +958,28 @@ def _gen_outfilename(self): class ShredderInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='< %s', + argstr="< %s", mandatory=True, position=-2, - desc='raw binary data file') + desc="raw binary data file", + ) offset = traits.Int( - argstr='%d', - units='NA', - desc='initial offset of offset bytes', - position=1) + argstr="%d", units="NA", desc="initial offset of offset bytes", position=1 + ) chunksize = traits.Int( - argstr='%d', - units='NA', - desc='reads and outputs a chunk of chunksize bytes', - position=2) + argstr="%d", + units="NA", + desc="reads and outputs a chunk of chunksize bytes", + position=2, + ) - space = traits.Int( - argstr='%d', units='NA', desc='skips space bytes', position=3) + space = traits.Int(argstr="%d", units="NA", desc="skips space bytes", position=3) class ShredderOutputSpec(TraitedSpec): - shredded = File(exists=True, desc='Shredded binary data file') + shredded = File(exists=True, desc="Shredded binary data file") class Shredder(StdOutCommandLine): @@ -962,13 +1005,14 @@ class Shredder(StdOutCommandLine): >>> shred.inputs.space = 2 >>> shred.run() # doctest: +SKIP """ - _cmd = 'shredder' + + _cmd = "shredder" input_spec = ShredderInputSpec output_spec = ShredderOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['shredded_file'] = os.path.abspath(self._gen_outfilename()) + outputs["shredded_file"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): diff --git a/nipype/interfaces/camino/dti.py b/nipype/interfaces/camino/dti.py index 703b98f9fb..6a17271bcf 100644 --- a/nipype/interfaces/camino/dti.py +++ b/nipype/interfaces/camino/dti.py @@ -3,45 +3,56 @@ import os from ...utils.filemanip import split_filename -from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec, - File, Directory, StdOutCommandLine, - StdOutCommandLineInputSpec, isdefined, InputMultiPath) +from ..base import ( + CommandLineInputSpec, + CommandLine, + traits, + TraitedSpec, + File, + Directory, + StdOutCommandLine, + StdOutCommandLineInputSpec, + isdefined, + InputMultiPath, +) class DTIFitInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=1, - desc='voxel-order data filename') + desc="voxel-order data filename", + ) bgmask = File( - argstr='-bgmask %s', + argstr="-bgmask %s", exists=True, - desc= - ('Provides the name of a file containing a background mask computed using, ' - 'for example, FSL bet2 program. The mask file contains zero in background ' - 'voxels and non-zero in foreground.')) + desc=( + "Provides the name of a file containing a background mask computed using, " + "for example, FSL bet2 program. The mask file contains zero in background " + "voxels and non-zero in foreground." + ), + ) scheme_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=2, - desc='Camino scheme file (b values / vectors, see camino.fsl2scheme)') + desc="Camino scheme file (b values / vectors, see camino.fsl2scheme)", + ) non_linear = traits.Bool( - argstr='-nonlinear', + argstr="-nonlinear", position=3, - desc= - "Use non-linear fitting instead of the default linear regression to the log measurements. " + desc="Use non-linear fitting instead of the default linear regression to the log measurements. ", ) class DTIFitOutputSpec(TraitedSpec): - tensor_fitted = File( - exists=True, desc='path/name of 4D volume in voxel order') + tensor_fitted = File(exists=True, desc="path/name of 4D volume in voxel order") class DTIFit(StdOutCommandLine): @@ -66,95 +77,108 @@ class DTIFit(StdOutCommandLine): >>> fit.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> fit.run() # doctest: +SKIP """ - _cmd = 'dtfit' + + _cmd = "dtfit" input_spec = DTIFitInputSpec output_spec = DTIFitOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['tensor_fitted'] = os.path.abspath(self._gen_outfilename()) + outputs["tensor_fitted"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_DT.Bdouble' + return name + "_DT.Bdouble" class DTMetricInputSpec(CommandLineInputSpec): eigen_data = File( exists=True, - argstr='-inputfile %s', + argstr="-inputfile %s", mandatory=True, - desc='voxel-order data filename') + desc="voxel-order data filename", + ) metric = traits.Enum( - 'fa', - 'md', - 'rd', - 'l1', - 'l2', - 'l3', - 'tr', - 'ra', - '2dfa', - 'cl', - 'cp', - 'cs', - argstr='-stat %s', + "fa", + "md", + "rd", + "l1", + "l2", + "l3", + "tr", + "ra", + "2dfa", + "cl", + "cp", + "cs", + argstr="-stat %s", mandatory=True, - desc= - ('Specifies the metric to compute. Possible choices are: ' - '"fa", "md", "rd", "l1", "l2", "l3", "tr", "ra", "2dfa", "cl", "cp" or "cs".' - )) + desc=( + "Specifies the metric to compute. Possible choices are: " + '"fa", "md", "rd", "l1", "l2", "l3", "tr", "ra", "2dfa", "cl", "cp" or "cs".' + ), + ) inputdatatype = traits.Enum( - 'double', - 'float', - 'long', - 'int', - 'short', - 'char', - argstr='-inputdatatype %s', + "double", + "float", + "long", + "int", + "short", + "char", + argstr="-inputdatatype %s", usedefault=True, - desc=('Specifies the data type of the input data. ' - 'The data type can be any of the following strings: ' - '"char", "short", "int", "long", "float" or "double".' - 'Default is double data type')) + desc=( + "Specifies the data type of the input data. " + "The data type can be any of the following strings: " + '"char", "short", "int", "long", "float" or "double".' + "Default is double data type" + ), + ) outputdatatype = traits.Enum( - 'double', - 'float', - 'long', - 'int', - 'short', - 'char', - argstr='-outputdatatype %s', + "double", + "float", + "long", + "int", + "short", + "char", + argstr="-outputdatatype %s", usedefault=True, - desc=('Specifies the data type of the output data. ' - 'The data type can be any of the following strings: ' - '"char", "short", "int", "long", "float" or "double".' - 'Default is double data type')) + desc=( + "Specifies the data type of the output data. " + "The data type can be any of the following strings: " + '"char", "short", "int", "long", "float" or "double".' + "Default is double data type" + ), + ) data_header = File( - argstr='-header %s', + argstr="-header %s", exists=True, desc=( - 'A Nifti .nii or .nii.gz file containing the header information. ' - 'Usually this will be the header of the raw data file from which ' - 'the diffusion tensors were reconstructed.')) + "A Nifti .nii or .nii.gz file containing the header information. " + "Usually this will be the header of the raw data file from which " + "the diffusion tensors were reconstructed." + ), + ) outputfile = File( - argstr='-outputfile %s', + argstr="-outputfile %s", genfile=True, - desc= - ('Output name. Output will be a .nii.gz file if data_header is provided and' - 'in voxel order with outputdatatype datatype (default: double) otherwise.' - )) + desc=( + "Output name. Output will be a .nii.gz file if data_header is provided and" + "in voxel order with outputdatatype datatype (default: double) otherwise." + ), + ) class DTMetricOutputSpec(TraitedSpec): metric_stats = File( - exists=True, desc='Diffusion Tensor statistics of the chosen metric') + exists=True, desc="Diffusion Tensor statistics of the chosen metric" + ) class DTMetric(CommandLine): @@ -192,13 +216,14 @@ class DTMetric(CommandLine): >>> dtmetric.inputs.outputdatatype = 'float' >>> dtmetric.run() # doctest: +SKIP """ - _cmd = 'dtshape' + + _cmd = "dtshape" input_spec = DTMetricInputSpec output_spec = DTMetricOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['metric_stats'] = os.path.abspath(self._gen_outfilename()) + outputs["metric_stats"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): @@ -207,18 +232,18 @@ def _gen_outfilename(self): def _gen_outputfile(self): outputfile = self.inputs.outputfile if not isdefined(outputfile): - outputfile = self._gen_filename('outputfile') + outputfile = self._gen_filename("outputfile") return outputfile def _gen_filename(self, name): - if name == 'outputfile': + if name == "outputfile": _, name, _ = split_filename(self.inputs.eigen_data) metric = self.inputs.metric datatype = self.inputs.outputdatatype if isdefined(self.inputs.data_header): - filename = name + '_' + metric + '.nii.gz' + filename = name + "_" + metric + ".nii.gz" else: - filename = name + '_' + metric + '.B' + datatype + filename = name + "_" + metric + ".B" + datatype return filename @@ -228,132 +253,129 @@ def _gen_model_options(): # @NoSelf Generate all possible permutations of < multi - tensor > < single - tensor > options """ - single_tensor = [ - 'dt', 'restore', 'algdt', 'nldt_pos', 'nldt', 'ldt_wtd' - ] + single_tensor = ["dt", "restore", "algdt", "nldt_pos", "nldt", "ldt_wtd"] multi_tensor = [ - 'cylcyl', 'cylcyl_eq', 'pospos', 'pospos_eq', 'poscyl', - 'poscyl_eq', 'cylcylcyl', 'cylcylcyl_eq', 'pospospos', - 'pospospos_eq', 'posposcyl', 'posposcyl_eq', 'poscylcyl', - 'poscylcyl_eq' + "cylcyl", + "cylcyl_eq", + "pospos", + "pospos_eq", + "poscyl", + "poscyl_eq", + "cylcylcyl", + "cylcylcyl_eq", + "pospospos", + "pospospos_eq", + "posposcyl", + "posposcyl_eq", + "poscylcyl", + "poscylcyl_eq", ] - other = ['adc', 'ball_stick'] + other = ["adc", "ball_stick"] model_list = single_tensor model_list.extend(other) - model_list.extend([ - multi + ' ' + single for multi in multi_tensor - for single in single_tensor - ]) + model_list.extend( + [multi + " " + single for multi in multi_tensor for single in single_tensor] + ) return model_list model = traits.Enum( _gen_model_options(), - argstr='-model %s', + argstr="-model %s", mandatory=True, - desc='Specifies the model to be fit to the data.') + desc="Specifies the model to be fit to the data.", + ) in_file = File( exists=True, - argstr='-inputfile %s', + argstr="-inputfile %s", mandatory=True, - desc='voxel-order data filename') + desc="voxel-order data filename", + ) inputdatatype = traits.Enum( - 'float', - 'char', - 'short', - 'int', - 'long', - 'double', - argstr='-inputdatatype %s', - desc= - 'Specifies the data type of the input file: "char", "short", "int", "long", "float" or "double". The input file must have BIG-ENDIAN ordering. By default, the input type is "float".' + "float", + "char", + "short", + "int", + "long", + "double", + argstr="-inputdatatype %s", + desc='Specifies the data type of the input file: "char", "short", "int", "long", "float" or "double". The input file must have BIG-ENDIAN ordering. By default, the input type is "float".', ) scheme_file = File( exists=True, - argstr='-schemefile %s', + argstr="-schemefile %s", mandatory=True, - desc='Camino scheme file (b values / vectors, see camino.fsl2scheme)') + desc="Camino scheme file (b values / vectors, see camino.fsl2scheme)", + ) - outputfile = File( - argstr='-outputfile %s', desc='Filename of the output file.') + outputfile = File(argstr="-outputfile %s", desc="Filename of the output file.") outlier = File( - argstr='-outliermap %s', + argstr="-outliermap %s", exists=True, - desc= - 'Specifies the name of the file to contain the outlier map generated by the RESTORE algorithm.' + desc="Specifies the name of the file to contain the outlier map generated by the RESTORE algorithm.", ) noisemap = File( - argstr='-noisemap %s', + argstr="-noisemap %s", exists=True, - desc= - 'Specifies the name of the file to contain the estimated noise variance on the diffusion-weighted signal, generated by a weighted tensor fit. The data type of this file is big-endian double.' + desc="Specifies the name of the file to contain the estimated noise variance on the diffusion-weighted signal, generated by a weighted tensor fit. The data type of this file is big-endian double.", ) residualmap = File( - argstr='-residualmap %s', + argstr="-residualmap %s", exists=True, - desc= - 'Specifies the name of the file to contain the weighted residual errors after computing a weighted linear tensor fit. One value is produced per measurement, in voxel order.The data type of this file is big-endian double. Images of the residuals for each measurement can be extracted with shredder.' + desc="Specifies the name of the file to contain the weighted residual errors after computing a weighted linear tensor fit. One value is produced per measurement, in voxel order.The data type of this file is big-endian double. Images of the residuals for each measurement can be extracted with shredder.", ) sigma = traits.Float( - argstr='-sigma %G', - desc= - 'Specifies the standard deviation of the noise in the data. Required by the RESTORE algorithm.' + argstr="-sigma %G", + desc="Specifies the standard deviation of the noise in the data. Required by the RESTORE algorithm.", ) bgthresh = traits.Float( - argstr='-bgthresh %G', - desc= - 'Sets a threshold on the average q=0 measurement to separate foreground and background. The program does not process background voxels, but outputs the same number of values in background voxels and foreground voxels. Each value is zero in background voxels apart from the exit code which is -1.' + argstr="-bgthresh %G", + desc="Sets a threshold on the average q=0 measurement to separate foreground and background. The program does not process background voxels, but outputs the same number of values in background voxels and foreground voxels. Each value is zero in background voxels apart from the exit code which is -1.", ) bgmask = File( - argstr='-bgmask %s', + argstr="-bgmask %s", exists=True, - desc= - 'Provides the name of a file containing a background mask computed using, for example, FSL\'s bet2 program. The mask file contains zero in background voxels and non-zero in foreground.' + desc="Provides the name of a file containing a background mask computed using, for example, FSL's bet2 program. The mask file contains zero in background voxels and non-zero in foreground.", ) cfthresh = traits.Float( - argstr='-csfthresh %G', - desc= - 'Sets a threshold on the average q=0 measurement to determine which voxels are CSF. This program does not treat CSF voxels any different to other voxels.' + argstr="-csfthresh %G", + desc="Sets a threshold on the average q=0 measurement to determine which voxels are CSF. This program does not treat CSF voxels any different to other voxels.", ) fixedmodq = traits.List( traits.Float, - argstr='-fixedmod %s', + argstr="-fixedmod %s", minlen=4, maxlen=4, - desc= - 'Specifies a spherical acquisition scheme with M measurements with q=0 and N measurements with |q|=Q and diffusion time tau. The N measurements with |q|=Q have unique directions. The program reads in the directions from the files in directory PointSets.' + desc="Specifies a spherical acquisition scheme with M measurements with q=0 and N measurements with |q|=Q and diffusion time tau. The N measurements with |q|=Q have unique directions. The program reads in the directions from the files in directory PointSets.", ) fixedbvalue = traits.List( traits.Float, - argstr='-fixedbvalue %s', + argstr="-fixedbvalue %s", minlen=3, maxlen=3, - desc= - 'As above, but specifies . The resulting scheme is the same whether you specify b directly or indirectly using -fixedmodq.' + desc="As above, but specifies . The resulting scheme is the same whether you specify b directly or indirectly using -fixedmodq.", ) tau = traits.Float( - argstr='-tau %G', - desc= - 'Sets the diffusion time separately. This overrides the diffusion time specified in a scheme file or by a scheme index for both the acquisition scheme and in the data synthesis.' + argstr="-tau %G", + desc="Sets the diffusion time separately. This overrides the diffusion time specified in a scheme file or by a scheme index for both the acquisition scheme and in the data synthesis.", ) class ModelFitOutputSpec(TraitedSpec): - fitted_data = File( - exists=True, desc='output file of 4D volume in voxel order') + fitted_data = File(exists=True, desc="output file of 4D volume in voxel order") class ModelFit(StdOutCommandLine): @@ -376,98 +398,99 @@ class ModelFit(StdOutCommandLine): >>> fit.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> fit.run() # doctest: +SKIP """ - _cmd = 'modelfit' + + _cmd = "modelfit" input_spec = ModelFitInputSpec output_spec = ModelFitOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['fitted_data'] = os.path.abspath(self._gen_outfilename()) + outputs["fitted_data"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_fit.Bdouble' + return name + "_fit.Bdouble" class DTLUTGenInputSpec(StdOutCommandLineInputSpec): lrange = traits.List( traits.Float, - desc='Index to one-tensor LUTs. This is the ratio L1/L3 and L2 / L3.' - 'The LUT is square, with half the values calculated (because L2 / L3 cannot be less than L1 / L3 by definition).' - 'The minimum must be >= 1. For comparison, a ratio L1 / L3 = 10 with L2 / L3 = 1 corresponds to an FA of 0.891, ' - 'and L1 / L3 = 15 with L2 / L3 = 1 corresponds to an FA of 0.929. The default range is 1 to 10.', - argstr='-lrange %s', + desc="Index to one-tensor LUTs. This is the ratio L1/L3 and L2 / L3." + "The LUT is square, with half the values calculated (because L2 / L3 cannot be less than L1 / L3 by definition)." + "The minimum must be >= 1. For comparison, a ratio L1 / L3 = 10 with L2 / L3 = 1 corresponds to an FA of 0.891, " + "and L1 / L3 = 15 with L2 / L3 = 1 corresponds to an FA of 0.929. The default range is 1 to 10.", + argstr="-lrange %s", minlen=2, maxlen=2, position=1, - units='NA') + units="NA", + ) frange = traits.List( traits.Float, - desc='Index to two-tensor LUTs. This is the fractional anisotropy' - ' of the two tensors. The default is 0.3 to 0.94', - argstr='-frange %s', + desc="Index to two-tensor LUTs. This is the fractional anisotropy" + " of the two tensors. The default is 0.3 to 0.94", + argstr="-frange %s", minlen=2, maxlen=2, position=1, - units='NA') + units="NA", + ) step = traits.Float( - argstr='-step %f', - units='NA', - desc='Distance between points in the LUT.' - 'For example, if lrange is 1 to 10 and the step is 0.1, LUT entries will be computed ' - 'at L1 / L3 = 1, 1.1, 1.2 ... 10.0 and at L2 / L3 = 1.0, 1.1 ... L1 / L3.' - 'For single tensor LUTs, the default step is 0.2, for two-tensor LUTs it is 0.02.' + argstr="-step %f", + units="NA", + desc="Distance between points in the LUT." + "For example, if lrange is 1 to 10 and the step is 0.1, LUT entries will be computed " + "at L1 / L3 = 1, 1.1, 1.2 ... 10.0 and at L2 / L3 = 1.0, 1.1 ... L1 / L3." + "For single tensor LUTs, the default step is 0.2, for two-tensor LUTs it is 0.02.", ) samples = traits.Int( - argstr='-samples %d', - units='NA', - desc= - 'The number of synthetic measurements to generate at each point in the LUT. The default is 2000.' + argstr="-samples %d", + units="NA", + desc="The number of synthetic measurements to generate at each point in the LUT. The default is 2000.", ) snr = traits.Float( - argstr='-snr %f', - units='NA', - desc='The signal to noise ratio of the unweighted (q = 0) measurements.' - 'This should match the SNR (in white matter) of the images that the LUTs are used with.' + argstr="-snr %f", + units="NA", + desc="The signal to noise ratio of the unweighted (q = 0) measurements." + "This should match the SNR (in white matter) of the images that the LUTs are used with.", ) bingham = traits.Bool( - argstr='-bingham', - desc="Compute a LUT for the Bingham PDF. This is the default.") + argstr="-bingham", + desc="Compute a LUT for the Bingham PDF. This is the default.", + ) - acg = traits.Bool(argstr='-acg', desc="Compute a LUT for the ACG PDF.") + acg = traits.Bool(argstr="-acg", desc="Compute a LUT for the ACG PDF.") - watson = traits.Bool( - argstr='-watson', desc="Compute a LUT for the Watson PDF.") + watson = traits.Bool(argstr="-watson", desc="Compute a LUT for the Watson PDF.") inversion = traits.Int( - argstr='-inversion %d', - units='NA', - desc= - 'Index of the inversion to use. The default is 1 (linear single tensor inversion).' + argstr="-inversion %d", + units="NA", + desc="Index of the inversion to use. The default is 1 (linear single tensor inversion).", ) trace = traits.Float( - argstr='-trace %G', - units='NA', - desc= - 'Trace of the diffusion tensor(s) used in the test function in the LUT generation. The default is 2100E-12 m^2 s^-1.' + argstr="-trace %G", + units="NA", + desc="Trace of the diffusion tensor(s) used in the test function in the LUT generation. The default is 2100E-12 m^2 s^-1.", ) scheme_file = File( - argstr='-schemefile %s', + argstr="-schemefile %s", mandatory=True, position=2, - desc='The scheme file of the images to be processed using this LUT.') + desc="The scheme file of the images to be processed using this LUT.", + ) class DTLUTGenOutputSpec(TraitedSpec): - dtLUT = File(exists=True, desc='Lookup Table') + dtLUT = File(exists=True, desc="Lookup Table") class DTLUTGen(StdOutCommandLine): @@ -492,84 +515,87 @@ class DTLUTGen(StdOutCommandLine): >>> dtl.inputs.scheme_file = 'A.scheme' >>> dtl.run() # doctest: +SKIP """ - _cmd = 'dtlutgen' + + _cmd = "dtlutgen" input_spec = DTLUTGenInputSpec output_spec = DTLUTGenOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['dtLUT'] = os.path.abspath(self._gen_outfilename()) + outputs["dtLUT"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.scheme_file) - return name + '.dat' + return name + ".dat" class PicoPDFsInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='< %s', + argstr="< %s", mandatory=True, position=1, - desc='voxel-order data filename') + desc="voxel-order data filename", + ) inputmodel = traits.Enum( - 'dt', - 'multitensor', - 'pds', - argstr='-inputmodel %s', + "dt", + "multitensor", + "pds", + argstr="-inputmodel %s", position=2, - desc='input model type', - usedefault=True) + desc="input model type", + usedefault=True, + ) luts = InputMultiPath( File(exists=True), - argstr='-luts %s', + argstr="-luts %s", mandatory=True, - desc='Files containing the lookup tables.' - 'For tensor data, one lut must be specified for each type of inversion used in the image (one-tensor, two-tensor, three-tensor).' - 'For pds, the number of LUTs must match -numpds (it is acceptable to use the same LUT several times - see example, above).' - 'These LUTs may be generated with dtlutgen.') + desc="Files containing the lookup tables." + "For tensor data, one lut must be specified for each type of inversion used in the image (one-tensor, two-tensor, three-tensor)." + "For pds, the number of LUTs must match -numpds (it is acceptable to use the same LUT several times - see example, above)." + "These LUTs may be generated with dtlutgen.", + ) pdf = traits.Enum( - 'bingham', - 'watson', - 'acg', - argstr='-pdf %s', + "bingham", + "watson", + "acg", + argstr="-pdf %s", position=4, - desc=' Specifies the PDF to use. There are three choices:' - 'watson - The Watson distribution. This distribution is rotationally symmetric.' - 'bingham - The Bingham distributionn, which allows elliptical probability density contours.' - 'acg - The Angular Central Gaussian distribution, which also allows elliptical probability density contours', - usedefault=True) + desc=" Specifies the PDF to use. There are three choices:" + "watson - The Watson distribution. This distribution is rotationally symmetric." + "bingham - The Bingham distributionn, which allows elliptical probability density contours." + "acg - The Angular Central Gaussian distribution, which also allows elliptical probability density contours", + usedefault=True, + ) directmap = traits.Bool( - argstr='-directmap', - desc= - "Only applicable when using pds as the inputmodel. Use direct mapping between the eigenvalues and the distribution parameters instead of the log of the eigenvalues." + argstr="-directmap", + desc="Only applicable when using pds as the inputmodel. Use direct mapping between the eigenvalues and the distribution parameters instead of the log of the eigenvalues.", ) maxcomponents = traits.Int( - argstr='-maxcomponents %d', - units='NA', - desc= - 'The maximum number of tensor components in a voxel (default 2) for multitensor data.' - 'Currently, only the default is supported, but future releases may allow the input of three-tensor data using this option.' + argstr="-maxcomponents %d", + units="NA", + desc="The maximum number of tensor components in a voxel (default 2) for multitensor data." + "Currently, only the default is supported, but future releases may allow the input of three-tensor data using this option.", ) numpds = traits.Int( - argstr='-numpds %d', - units='NA', - desc='The maximum number of PDs in a voxel (default 3) for PD data.' - 'This option determines the size of the input and output voxels.' - 'This means that the data file may be large enough to accomodate three or more PDs,' - 'but does not mean that any of the voxels are classified as containing three or more PDs.' + argstr="-numpds %d", + units="NA", + desc="The maximum number of PDs in a voxel (default 3) for PD data." + "This option determines the size of the input and output voxels." + "This means that the data file may be large enough to accomodate three or more PDs," + "but does not mean that any of the voxels are classified as containing three or more PDs.", ) class PicoPDFsOutputSpec(TraitedSpec): - pdfs = File(exists=True, desc='path/name of 4D volume in voxel order') + pdfs = File(exists=True, desc="path/name of 4D volume in voxel order") class PicoPDFs(StdOutCommandLine): @@ -586,210 +612,237 @@ class PicoPDFs(StdOutCommandLine): >>> pdf.inputs.in_file = 'voxel-order_data.Bfloat' >>> pdf.run() # doctest: +SKIP """ - _cmd = 'picopdfs' + + _cmd = "picopdfs" input_spec = PicoPDFsInputSpec output_spec = PicoPDFsOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['pdfs'] = os.path.abspath(self._gen_outfilename()) + outputs["pdfs"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_pdfs.Bdouble' + return name + "_pdfs.Bdouble" class TrackInputSpec(CommandLineInputSpec): in_file = File( - exists=True, - argstr='-inputfile %s', - position=1, - desc='input data file') + exists=True, argstr="-inputfile %s", position=1, desc="input data file" + ) - seed_file = File( - exists=True, argstr='-seedfile %s', position=2, desc='seed file') + seed_file = File(exists=True, argstr="-seedfile %s", position=2, desc="seed file") inputmodel = traits.Enum( - 'dt', - 'multitensor', - 'sfpeak', - 'pico', - 'repbs_dt', - 'repbs_multitensor', - 'ballstick', - 'wildbs_dt', - 'bayesdirac', - 'bayesdirac_dt', - 'bedpostx_dyad', - 'bedpostx', - argstr='-inputmodel %s', - desc='input model type', - usedefault=True) + "dt", + "multitensor", + "sfpeak", + "pico", + "repbs_dt", + "repbs_multitensor", + "ballstick", + "wildbs_dt", + "bayesdirac", + "bayesdirac_dt", + "bedpostx_dyad", + "bedpostx", + argstr="-inputmodel %s", + desc="input model type", + usedefault=True, + ) tracker = traits.Enum( - 'fact', - 'euler', - 'rk4', - argstr='-tracker %s', - desc=("The tracking algorithm controls streamlines are " - "generated from the data. The choices are: " - "- FACT, which follows the local fibre orientation " - "in each voxel. No interpolation is used." - "- EULER, which uses a fixed step size along the " - "local fibre orientation. With nearest-neighbour " - "interpolation, this method may be very similar to " - "FACT, except that the step size is fixed, whereas " - "FACT steps extend to the boundary of the next voxel " - "(distance variable depending on the entry and exit " - "points to the voxel)." - "- RK4: Fourth-order Runge-Kutta method. The step " - "size is fixed, however the eventual direction of " - "the step is determined by taking and averaging a " - "series of partial steps."), - usedefault=True) + "fact", + "euler", + "rk4", + argstr="-tracker %s", + desc=( + "The tracking algorithm controls streamlines are " + "generated from the data. The choices are: " + "- FACT, which follows the local fibre orientation " + "in each voxel. No interpolation is used." + "- EULER, which uses a fixed step size along the " + "local fibre orientation. With nearest-neighbour " + "interpolation, this method may be very similar to " + "FACT, except that the step size is fixed, whereas " + "FACT steps extend to the boundary of the next voxel " + "(distance variable depending on the entry and exit " + "points to the voxel)." + "- RK4: Fourth-order Runge-Kutta method. The step " + "size is fixed, however the eventual direction of " + "the step is determined by taking and averaging a " + "series of partial steps." + ), + usedefault=True, + ) interpolator = traits.Enum( - 'nn', - 'prob_nn', - 'linear', - argstr='-interpolator %s', - desc=("The interpolation algorithm determines how " - "the fiber orientation(s) are defined at a given " - "continuous point within the input image. " - "Interpolators are only used when the tracking " - "algorithm is not FACT. The choices are: " - "- NN: Nearest-neighbour interpolation, just " - "uses the local voxel data directly." - "- PROB_NN: Probabilistic nearest-neighbor " - "interpolation, similar to the method pro- " - "posed by Behrens et al [Magnetic Resonance " - "in Medicine, 50:1077-1088, 2003]. The data " - "is not interpolated, but at each point we " - "randomly choose one of the 8 voxels sur- " - "rounding a point. The probability of choosing " - "a particular voxel is based on how close the " - "point is to the centre of that voxel." - "- LINEAR: Linear interpolation of the vector " - "field containing the principal directions at " - "each point.")) + "nn", + "prob_nn", + "linear", + argstr="-interpolator %s", + desc=( + "The interpolation algorithm determines how " + "the fiber orientation(s) are defined at a given " + "continuous point within the input image. " + "Interpolators are only used when the tracking " + "algorithm is not FACT. The choices are: " + "- NN: Nearest-neighbour interpolation, just " + "uses the local voxel data directly." + "- PROB_NN: Probabilistic nearest-neighbor " + "interpolation, similar to the method pro- " + "posed by Behrens et al [Magnetic Resonance " + "in Medicine, 50:1077-1088, 2003]. The data " + "is not interpolated, but at each point we " + "randomly choose one of the 8 voxels sur- " + "rounding a point. The probability of choosing " + "a particular voxel is based on how close the " + "point is to the centre of that voxel." + "- LINEAR: Linear interpolation of the vector " + "field containing the principal directions at " + "each point." + ), + ) stepsize = traits.Float( - argstr='-stepsize %f', - requires=['tracker'], - desc=('Step size for EULER and RK4 tracking. ' - 'The default is 1mm.')) + argstr="-stepsize %f", + requires=["tracker"], + desc=("Step size for EULER and RK4 tracking. " "The default is 1mm."), + ) inputdatatype = traits.Enum( - 'float', 'double', argstr='-inputdatatype %s', desc='input file type') + "float", "double", argstr="-inputdatatype %s", desc="input file type" + ) - gzip = traits.Bool( - argstr='-gzip', desc="save the output image in gzip format") + gzip = traits.Bool(argstr="-gzip", desc="save the output image in gzip format") maxcomponents = traits.Int( - argstr='-maxcomponents %d', - units='NA', - desc=("The maximum number of tensor components in a " - "voxel. This determines the size of the input " - "file and does not say anything about the " - "voxel classification. The default is 2 if " - "the input model is multitensor and 1 if the " - "input model is dt.")) + argstr="-maxcomponents %d", + units="NA", + desc=( + "The maximum number of tensor components in a " + "voxel. This determines the size of the input " + "file and does not say anything about the " + "voxel classification. The default is 2 if " + "the input model is multitensor and 1 if the " + "input model is dt." + ), + ) numpds = traits.Int( - argstr='-numpds %d', - units='NA', - desc=("The maximum number of PDs in a voxel for input " - "models sfpeak and pico. The default is 3 for input " - "model sfpeak and 1 for input model pico. This option " - "determines the size of the voxels in the input file " - "and does not affect tracking. For tensor data, use " - "the -maxcomponents option.")) + argstr="-numpds %d", + units="NA", + desc=( + "The maximum number of PDs in a voxel for input " + "models sfpeak and pico. The default is 3 for input " + "model sfpeak and 1 for input model pico. This option " + "determines the size of the voxels in the input file " + "and does not affect tracking. For tensor data, use " + "the -maxcomponents option." + ), + ) data_dims = traits.List( traits.Int, - desc='data dimensions in voxels', - argstr='-datadims %s', + desc="data dimensions in voxels", + argstr="-datadims %s", minlen=3, maxlen=3, - units='voxels') + units="voxels", + ) voxel_dims = traits.List( traits.Float, - desc='voxel dimensions in mm', - argstr='-voxeldims %s', + desc="voxel dimensions in mm", + argstr="-voxeldims %s", minlen=3, maxlen=3, - units='mm') + units="mm", + ) ipthresh = traits.Float( - argstr='-ipthresh %f', - desc=('Curvature threshold for tracking, expressed as ' - 'the minimum dot product between two streamline ' - 'orientations calculated over the length of a ' - 'voxel. If the dot product between the previous ' - 'and current directions is less than this ' - 'threshold, then the streamline terminates. The ' - 'default setting will terminate fibres that curve ' - 'by more than 80 degrees. Set this to -1.0 to ' - 'disable curvature checking completely.')) + argstr="-ipthresh %f", + desc=( + "Curvature threshold for tracking, expressed as " + "the minimum dot product between two streamline " + "orientations calculated over the length of a " + "voxel. If the dot product between the previous " + "and current directions is less than this " + "threshold, then the streamline terminates. The " + "default setting will terminate fibres that curve " + "by more than 80 degrees. Set this to -1.0 to " + "disable curvature checking completely." + ), + ) curvethresh = traits.Float( - argstr='-curvethresh %f', - desc=('Curvature threshold for tracking, expressed ' - 'as the maximum angle (in degrees) between ' - 'between two streamline orientations ' - 'calculated over the length of a voxel. If ' - 'the angle is greater than this, then the ' - 'streamline terminates.')) + argstr="-curvethresh %f", + desc=( + "Curvature threshold for tracking, expressed " + "as the maximum angle (in degrees) between " + "between two streamline orientations " + "calculated over the length of a voxel. If " + "the angle is greater than this, then the " + "streamline terminates." + ), + ) curveinterval = traits.Float( - argstr='-curveinterval %f', - requires=['curvethresh'], - desc=('Interval over which the curvature threshold ' - 'should be evaluated, in mm. The default is ' - '5mm. When using the default curvature ' - 'threshold of 90 degrees, this means that ' - 'streamlines will terminate if they curve by ' - 'more than 90 degrees over a path length ' - 'of 5mm.')) + argstr="-curveinterval %f", + requires=["curvethresh"], + desc=( + "Interval over which the curvature threshold " + "should be evaluated, in mm. The default is " + "5mm. When using the default curvature " + "threshold of 90 degrees, this means that " + "streamlines will terminate if they curve by " + "more than 90 degrees over a path length " + "of 5mm." + ), + ) anisthresh = traits.Float( - argstr='-anisthresh %f', - desc=('Terminate fibres that enter a voxel with lower ' - 'anisotropy than the threshold.')) + argstr="-anisthresh %f", + desc=( + "Terminate fibres that enter a voxel with lower " + "anisotropy than the threshold." + ), + ) anisfile = File( - argstr='-anisfile %s', + argstr="-anisfile %s", exists=True, - desc=('File containing the anisotropy map. This is required to ' - 'apply an anisotropy threshold with non tensor data. If ' - 'the map issupplied it is always used, even in tensor ' - 'data.')) + desc=( + "File containing the anisotropy map. This is required to " + "apply an anisotropy threshold with non tensor data. If " + "the map issupplied it is always used, even in tensor " + "data." + ), + ) outputtracts = traits.Enum( - 'float', - 'double', - 'oogl', - argstr='-outputtracts %s', - desc='output tract file type') + "float", + "double", + "oogl", + argstr="-outputtracts %s", + desc="output tract file type", + ) out_file = File( - argstr='-outputfile %s', - position=-1, - genfile=True, - desc='output data file') + argstr="-outputfile %s", position=-1, genfile=True, desc="output data file" + ) output_root = File( exists=False, - argstr='-outputroot %s', + argstr="-outputroot %s", position=-1, - desc='root directory for output') + desc="root directory for output", + ) class TrackOutputSpec(TraitedSpec): - tracked = File( - exists=True, desc='output file containing reconstructed tracts') + tracked = File(exists=True, desc="output file containing reconstructed tracts") class Track(CommandLine): @@ -808,7 +861,7 @@ class Track(CommandLine): >>> track.run() # doctest: +SKIP """ - _cmd = 'track' + _cmd = "track" input_spec = TrackInputSpec output_spec = TrackOutputSpec @@ -819,11 +872,11 @@ def _list_outputs(self): out_file_path = os.path.abspath(self.inputs.out_file) else: out_file_path = os.path.abspath(self._gen_outfilename()) - outputs['tracked'] = out_file_path + outputs["tracked"] = out_file_path return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._gen_outfilename() else: return None @@ -831,10 +884,10 @@ def _gen_filename(self, name): def _gen_outfilename(self): # Currently in_file is only undefined for bedpostx input if not isdefined(self.inputs.in_file): - name = 'bedpostx' + name = "bedpostx" else: _, name, _ = split_filename(self.inputs.in_file) - return name + '_tracked' + return name + "_tracked" class TrackDT(Track): @@ -858,18 +911,17 @@ def __init__(self, command=None, **inputs): class TrackPICoInputSpec(TrackInputSpec): pdf = traits.Enum( - 'bingham', - 'watson', - 'acg', - argstr='-pdf %s', - desc='Specifies the model for PICo parameters. The default is "bingham.' + "bingham", + "watson", + "acg", + argstr="-pdf %s", + desc='Specifies the model for PICo parameters. The default is "bingham.', ) iterations = traits.Int( - argstr='-iterations %d', - units='NA', - desc= - "Number of streamlines to generate at each seed point. The default is 5000." + argstr="-iterations %d", + units="NA", + desc="Number of streamlines to generate at each seed point. The default is 5000.", ) @@ -896,17 +948,21 @@ def __init__(self, command=None, **inputs): class TrackBedpostxDeterInputSpec(TrackInputSpec): bedpostxdir = Directory( - argstr='-bedpostxdir %s', + argstr="-bedpostxdir %s", mandatory=True, exists=True, - desc=('Directory containing bedpostx output')) + desc=("Directory containing bedpostx output"), + ) min_vol_frac = traits.Float( - argstr='-bedpostxminf %d', - units='NA', - desc=("Zeros out compartments in bedpostx data " - "with a mean volume fraction f of less than " - "min_vol_frac. The default is 0.01.")) + argstr="-bedpostxminf %d", + units="NA", + desc=( + "Zeros out compartments in bedpostx data " + "with a mean volume fraction f of less than " + "min_vol_frac. The default is 0.01." + ), + ) class TrackBedpostxDeter(Track): @@ -942,23 +998,29 @@ def __init__(self, command=None, **inputs): class TrackBedpostxProbaInputSpec(TrackInputSpec): bedpostxdir = Directory( - argstr='-bedpostxdir %s', + argstr="-bedpostxdir %s", mandatory=True, exists=True, - desc=('Directory containing bedpostx output')) + desc=("Directory containing bedpostx output"), + ) min_vol_frac = traits.Float( - argstr='-bedpostxminf %d', - units='NA', - desc=("Zeros out compartments in bedpostx data " - "with a mean volume fraction f of less than " - "min_vol_frac. The default is 0.01.")) + argstr="-bedpostxminf %d", + units="NA", + desc=( + "Zeros out compartments in bedpostx data " + "with a mean volume fraction f of less than " + "min_vol_frac. The default is 0.01." + ), + ) iterations = traits.Int( - argstr='-iterations %d', - units='NA', - desc=("Number of streamlines to generate at each " - "seed point. The default is 1.")) + argstr="-iterations %d", + units="NA", + desc=( + "Number of streamlines to generate at each " "seed point. The default is 1." + ), + ) class TrackBedpostxProba(Track): @@ -999,65 +1061,63 @@ def __init__(self, command=None, **inputs): class TrackBayesDiracInputSpec(TrackInputSpec): scheme_file = File( - argstr='-schemefile %s', + argstr="-schemefile %s", mandatory=True, exists=True, - desc=('The scheme file corresponding to the data being ' - 'processed.')) + desc=("The scheme file corresponding to the data being " "processed."), + ) iterations = traits.Int( - argstr='-iterations %d', - units='NA', - desc=("Number of streamlines to generate at each " - "seed point. The default is 5000.")) + argstr="-iterations %d", + units="NA", + desc=( + "Number of streamlines to generate at each " + "seed point. The default is 5000." + ), + ) pdf = traits.Enum( - 'bingham', - 'watson', - 'acg', - argstr='-pdf %s', - desc= - 'Specifies the model for PICo priors (not the curvature priors). The default is "bingham".' + "bingham", + "watson", + "acg", + argstr="-pdf %s", + desc='Specifies the model for PICo priors (not the curvature priors). The default is "bingham".', ) pointset = traits.Int( - argstr='-pointset %s', - desc= - 'Index to the point set to use for Bayesian likelihood calculation. The index specifies a set of evenly distributed points on the unit sphere, where each point x defines two possible step directions (x or -x) for the streamline path. A larger number indexes a larger point set, which gives higher angular resolution at the expense of computation time. The default is index 1, which gives 1922 points, index 0 gives 1082 points, index 2 gives 3002 points.' + argstr="-pointset %s", + desc="Index to the point set to use for Bayesian likelihood calculation. The index specifies a set of evenly distributed points on the unit sphere, where each point x defines two possible step directions (x or -x) for the streamline path. A larger number indexes a larger point set, which gives higher angular resolution at the expense of computation time. The default is index 1, which gives 1922 points, index 0 gives 1082 points, index 2 gives 3002 points.", ) datamodel = traits.Enum( - 'cylsymmdt', - 'ballstick', - argstr='-datamodel %s', - desc= - 'Model of the data for Bayesian tracking. The default model is "cylsymmdt", a diffusion tensor with cylindrical symmetry about e_1, ie L1 >= L_2 = L_3. The other model is "ballstick", the partial volume model (see ballstickfit).' + "cylsymmdt", + "ballstick", + argstr="-datamodel %s", + desc='Model of the data for Bayesian tracking. The default model is "cylsymmdt", a diffusion tensor with cylindrical symmetry about e_1, ie L1 >= L_2 = L_3. The other model is "ballstick", the partial volume model (see ballstickfit).', ) curvepriork = traits.Float( - argstr='-curvepriork %G', - desc= - 'Concentration parameter for the prior distribution on fibre orientations given the fibre orientation at the previous step. Larger values of k make curvature less likely.' + argstr="-curvepriork %G", + desc="Concentration parameter for the prior distribution on fibre orientations given the fibre orientation at the previous step. Larger values of k make curvature less likely.", ) curvepriorg = traits.Float( - argstr='-curvepriorg %G', - desc= - 'Concentration parameter for the prior distribution on fibre orientations given the fibre orientation at the previous step. Larger values of g make curvature less likely.' + argstr="-curvepriorg %G", + desc="Concentration parameter for the prior distribution on fibre orientations given the fibre orientation at the previous step. Larger values of g make curvature less likely.", ) extpriorfile = File( exists=True, - argstr='-extpriorfile %s', - desc= - 'Path to a PICo image produced by picopdfs. The PDF in each voxel is used as a prior for the fibre orientation in Bayesian tracking. The prior image must be in the same space as the diffusion data.' + argstr="-extpriorfile %s", + desc="Path to a PICo image produced by picopdfs. The PDF in each voxel is used as a prior for the fibre orientation in Bayesian tracking. The prior image must be in the same space as the diffusion data.", ) extpriordatatype = traits.Enum( - 'float', - 'double', - argstr='-extpriordatatype %s', - desc='Datatype of the prior image. The default is "double".') + "float", + "double", + argstr="-extpriordatatype %s", + desc='Datatype of the prior image. The default is "double".', + ) class TrackBayesDirac(Track): @@ -1103,35 +1163,34 @@ def __init__(self, command=None, **inputs): class TrackBootstrapInputSpec(TrackInputSpec): scheme_file = File( - argstr='-schemefile %s', + argstr="-schemefile %s", mandatory=True, exists=True, - desc='The scheme file corresponding to the data being processed.') + desc="The scheme file corresponding to the data being processed.", + ) iterations = traits.Int( - argstr='-iterations %d', - units='NA', - desc="Number of streamlines to generate at each seed point.") + argstr="-iterations %d", + units="NA", + desc="Number of streamlines to generate at each seed point.", + ) inversion = traits.Int( - argstr='-inversion %s', - desc= - 'Tensor reconstruction algorithm for repetition bootstrapping. Default is 1 (linear reconstruction, single tensor).' + argstr="-inversion %s", + desc="Tensor reconstruction algorithm for repetition bootstrapping. Default is 1 (linear reconstruction, single tensor).", ) bsdatafiles = traits.List( File(exists=True), mandatory=True, - argstr='-bsdatafile %s', - desc= - 'Specifies files containing raw data for repetition bootstrapping. Use -inputfile for wild bootstrap data.' + argstr="-bsdatafile %s", + desc="Specifies files containing raw data for repetition bootstrapping. Use -inputfile for wild bootstrap data.", ) bgmask = File( - argstr='-bgmask %s', + argstr="-bgmask %s", exists=True, - desc= - 'Provides the name of a file containing a background mask computed using, for example, FSL\'s bet2 program. The mask file contains zero in background voxels and non-zero in foreground.' + desc="Provides the name of a file containing a background mask computed using, for example, FSL's bet2 program. The mask file contains zero in background voxels and non-zero in foreground.", ) @@ -1160,59 +1219,59 @@ def __init__(self, command=None, **inputs): class ComputeMeanDiffusivityInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='< %s', + argstr="< %s", mandatory=True, position=1, - desc='Tensor-fitted data filename') + desc="Tensor-fitted data filename", + ) scheme_file = File( exists=True, - argstr='%s', + argstr="%s", position=2, - desc='Camino scheme file (b values / vectors, see camino.fsl2scheme)') + desc="Camino scheme file (b values / vectors, see camino.fsl2scheme)", + ) out_file = File(argstr="> %s", position=-1, genfile=True) inputmodel = traits.Enum( - 'dt', - 'twotensor', - 'threetensor', - argstr='-inputmodel %s', - desc= - 'Specifies the model that the input tensor data contains parameters for.' + "dt", + "twotensor", + "threetensor", + argstr="-inputmodel %s", + desc="Specifies the model that the input tensor data contains parameters for." 'Possible model types are: "dt" (diffusion-tensor data), "twotensor" (two-tensor data), ' '"threetensor" (three-tensor data). By default, the program assumes that the input data ' - 'contains a single diffusion tensor in each voxel.') + "contains a single diffusion tensor in each voxel.", + ) inputdatatype = traits.Enum( - 'char', - 'short', - 'int', - 'long', - 'float', - 'double', - argstr='-inputdatatype %s', - desc= - 'Specifies the data type of the input file. The data type can be any of the' - 'following strings: "char", "short", "int", "long", "float" or "double".' + "char", + "short", + "int", + "long", + "float", + "double", + argstr="-inputdatatype %s", + desc="Specifies the data type of the input file. The data type can be any of the" + 'following strings: "char", "short", "int", "long", "float" or "double".', ) outputdatatype = traits.Enum( - 'char', - 'short', - 'int', - 'long', - 'float', - 'double', - argstr='-outputdatatype %s', - desc= - 'Specifies the data type of the output data. The data type can be any of the' - 'following strings: "char", "short", "int", "long", "float" or "double".' + "char", + "short", + "int", + "long", + "float", + "double", + argstr="-outputdatatype %s", + desc="Specifies the data type of the output data. The data type can be any of the" + 'following strings: "char", "short", "int", "long", "float" or "double".', ) class ComputeMeanDiffusivityOutputSpec(TraitedSpec): - md = File(exists=True, desc='Mean Diffusivity Map') + md = File(exists=True, desc="Mean Diffusivity Map") class ComputeMeanDiffusivity(StdOutCommandLine): @@ -1228,7 +1287,8 @@ class ComputeMeanDiffusivity(StdOutCommandLine): >>> md.inputs.scheme_file = 'A.scheme' >>> md.run() # doctest: +SKIP """ - _cmd = 'md' + + _cmd = "md" input_spec = ComputeMeanDiffusivityInputSpec output_spec = ComputeMeanDiffusivityOutputSpec @@ -1245,58 +1305,58 @@ def _gen_outfilename(self): class ComputeFractionalAnisotropyInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='< %s', + argstr="< %s", mandatory=True, position=1, - desc='Tensor-fitted data filename') + desc="Tensor-fitted data filename", + ) scheme_file = File( exists=True, - argstr='%s', + argstr="%s", position=2, - desc='Camino scheme file (b values / vectors, see camino.fsl2scheme)') + desc="Camino scheme file (b values / vectors, see camino.fsl2scheme)", + ) inputmodel = traits.Enum( - 'dt', - 'twotensor', - 'threetensor', - 'multitensor', - argstr='-inputmodel %s', - desc= - 'Specifies the model that the input tensor data contains parameters for.' + "dt", + "twotensor", + "threetensor", + "multitensor", + argstr="-inputmodel %s", + desc="Specifies the model that the input tensor data contains parameters for." 'Possible model types are: "dt" (diffusion-tensor data), "twotensor" (two-tensor data), ' '"threetensor" (three-tensor data). By default, the program assumes that the input data ' - 'contains a single diffusion tensor in each voxel.') + "contains a single diffusion tensor in each voxel.", + ) inputdatatype = traits.Enum( - 'char', - 'short', - 'int', - 'long', - 'float', - 'double', - argstr='-inputdatatype %s', - desc= - 'Specifies the data type of the input file. The data type can be any of the' - 'following strings: "char", "short", "int", "long", "float" or "double".' + "char", + "short", + "int", + "long", + "float", + "double", + argstr="-inputdatatype %s", + desc="Specifies the data type of the input file. The data type can be any of the" + 'following strings: "char", "short", "int", "long", "float" or "double".', ) outputdatatype = traits.Enum( - 'char', - 'short', - 'int', - 'long', - 'float', - 'double', - argstr='-outputdatatype %s', - desc= - 'Specifies the data type of the output data. The data type can be any of the' - 'following strings: "char", "short", "int", "long", "float" or "double".' + "char", + "short", + "int", + "long", + "float", + "double", + argstr="-outputdatatype %s", + desc="Specifies the data type of the output data. The data type can be any of the" + 'following strings: "char", "short", "int", "long", "float" or "double".', ) class ComputeFractionalAnisotropyOutputSpec(TraitedSpec): - fa = File(exists=True, desc='Fractional Anisotropy Map') + fa = File(exists=True, desc="Fractional Anisotropy Map") class ComputeFractionalAnisotropy(StdOutCommandLine): @@ -1318,75 +1378,76 @@ class ComputeFractionalAnisotropy(StdOutCommandLine): >>> fa.inputs.scheme_file = 'A.scheme' >>> fa.run() # doctest: +SKIP """ - _cmd = 'fa' + + _cmd = "fa" input_spec = ComputeFractionalAnisotropyInputSpec output_spec = ComputeFractionalAnisotropyOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['fa'] = os.path.abspath(self._gen_outfilename()) + outputs["fa"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_FA.Bdouble' # Need to change to self.inputs.outputdatatype + return name + "_FA.Bdouble" # Need to change to self.inputs.outputdatatype class ComputeTensorTraceInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='< %s', + argstr="< %s", mandatory=True, position=1, - desc='Tensor-fitted data filename') + desc="Tensor-fitted data filename", + ) scheme_file = File( exists=True, - argstr='%s', + argstr="%s", position=2, - desc='Camino scheme file (b values / vectors, see camino.fsl2scheme)') + desc="Camino scheme file (b values / vectors, see camino.fsl2scheme)", + ) inputmodel = traits.Enum( - 'dt', - 'twotensor', - 'threetensor', - 'multitensor', - argstr='-inputmodel %s', - desc= - 'Specifies the model that the input tensor data contains parameters for.' + "dt", + "twotensor", + "threetensor", + "multitensor", + argstr="-inputmodel %s", + desc="Specifies the model that the input tensor data contains parameters for." 'Possible model types are: "dt" (diffusion-tensor data), "twotensor" (two-tensor data), ' '"threetensor" (three-tensor data). By default, the program assumes that the input data ' - 'contains a single diffusion tensor in each voxel.') + "contains a single diffusion tensor in each voxel.", + ) inputdatatype = traits.Enum( - 'char', - 'short', - 'int', - 'long', - 'float', - 'double', - argstr='-inputdatatype %s', - desc= - 'Specifies the data type of the input file. The data type can be any of the' - 'following strings: "char", "short", "int", "long", "float" or "double".' + "char", + "short", + "int", + "long", + "float", + "double", + argstr="-inputdatatype %s", + desc="Specifies the data type of the input file. The data type can be any of the" + 'following strings: "char", "short", "int", "long", "float" or "double".', ) outputdatatype = traits.Enum( - 'char', - 'short', - 'int', - 'long', - 'float', - 'double', - argstr='-outputdatatype %s', - desc= - 'Specifies the data type of the output data. The data type can be any of the' - 'following strings: "char", "short", "int", "long", "float" or "double".' + "char", + "short", + "int", + "long", + "float", + "double", + argstr="-outputdatatype %s", + desc="Specifies the data type of the output data. The data type can be any of the" + 'following strings: "char", "short", "int", "long", "float" or "double".', ) class ComputeTensorTraceOutputSpec(TraitedSpec): - trace = File(exists=True, desc='Trace of the diffusion tensor') + trace = File(exists=True, desc="Trace of the diffusion tensor") class ComputeTensorTrace(StdOutCommandLine): @@ -1410,73 +1471,79 @@ class ComputeTensorTrace(StdOutCommandLine): >>> trace.inputs.scheme_file = 'A.scheme' >>> trace.run() # doctest: +SKIP """ - _cmd = 'trd' + + _cmd = "trd" input_spec = ComputeTensorTraceInputSpec output_spec = ComputeTensorTraceOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['trace'] = os.path.abspath(self._gen_outfilename()) + outputs["trace"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_TrD.img' # Need to change to self.inputs.outputdatatype + return name + "_TrD.img" # Need to change to self.inputs.outputdatatype class ComputeEigensystemInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='< %s', + argstr="< %s", mandatory=True, position=1, - desc='Tensor-fitted data filename') + desc="Tensor-fitted data filename", + ) inputmodel = traits.Enum( - 'dt', - 'multitensor', - argstr='-inputmodel %s', - desc= - 'Specifies the model that the input data contains parameters for. Possible model types are: "dt" (diffusion-tensor data) and "multitensor"' + "dt", + "multitensor", + argstr="-inputmodel %s", + desc='Specifies the model that the input data contains parameters for. Possible model types are: "dt" (diffusion-tensor data) and "multitensor"', ) maxcomponents = traits.Int( - argstr='-maxcomponents %d', - desc= - 'The maximum number of tensor components in a voxel of the input data.' + argstr="-maxcomponents %d", + desc="The maximum number of tensor components in a voxel of the input data.", ) inputdatatype = traits.Enum( - 'double', - 'float', - 'long', - 'int', - 'short', - 'char', - argstr='-inputdatatype %s', + "double", + "float", + "long", + "int", + "short", + "char", + argstr="-inputdatatype %s", usedefault=True, - desc=('Specifies the data type of the input data. ' - 'The data type can be any of the following strings: ' - '"char", "short", "int", "long", "float" or "double".' - 'Default is double data type')) + desc=( + "Specifies the data type of the input data. " + "The data type can be any of the following strings: " + '"char", "short", "int", "long", "float" or "double".' + "Default is double data type" + ), + ) outputdatatype = traits.Enum( - 'double', - 'float', - 'long', - 'int', - 'short', - 'char', - argstr='-outputdatatype %s', + "double", + "float", + "long", + "int", + "short", + "char", + argstr="-outputdatatype %s", usedefault=True, - desc=('Specifies the data type of the output data. ' - 'The data type can be any of the following strings: ' - '"char", "short", "int", "long", "float" or "double".' - 'Default is double data type')) + desc=( + "Specifies the data type of the output data. " + "The data type can be any of the following strings: " + '"char", "short", "int", "long", "float" or "double".' + "Default is double data type" + ), + ) class ComputeEigensystemOutputSpec(TraitedSpec): - eigen = File(exists=True, desc='Trace of the diffusion tensor') + eigen = File(exists=True, desc="Trace of the diffusion tensor") class ComputeEigensystem(StdOutCommandLine): @@ -1499,7 +1566,8 @@ class ComputeEigensystem(StdOutCommandLine): >>> dteig.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> dteig.run() # doctest: +SKIP """ - _cmd = 'dteig' + + _cmd = "dteig" input_spec = ComputeEigensystemInputSpec output_spec = ComputeEigensystemOutputSpec @@ -1511,4 +1579,4 @@ def _list_outputs(self): def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) datatype = self.inputs.outputdatatype - return name + '_eig.B' + datatype + return name + "_eig.B" + datatype diff --git a/nipype/interfaces/camino/odf.py b/nipype/interfaces/camino/odf.py index 163c41fd87..f152f32762 100644 --- a/nipype/interfaces/camino/odf.py +++ b/nipype/interfaces/camino/odf.py @@ -3,52 +3,69 @@ import os from ...utils.filemanip import split_filename -from ..base import (traits, TraitedSpec, File, StdOutCommandLine, - StdOutCommandLineInputSpec) +from ..base import ( + traits, + TraitedSpec, + File, + StdOutCommandLine, + StdOutCommandLineInputSpec, +) class QBallMXInputSpec(StdOutCommandLineInputSpec): basistype = traits.Enum( - 'rbf', - 'sh', - argstr='-basistype %s', - desc=('Basis function type. "rbf" to use radial basis functions ' - '"sh" to use spherical harmonics'), - usedefault=True) + "rbf", + "sh", + argstr="-basistype %s", + desc=( + 'Basis function type. "rbf" to use radial basis functions ' + '"sh" to use spherical harmonics' + ), + usedefault=True, + ) scheme_file = File( exists=True, - argstr='-schemefile %s', + argstr="-schemefile %s", mandatory=True, - desc='Specifies the scheme file for the diffusion MRI data') + desc="Specifies the scheme file for the diffusion MRI data", + ) order = traits.Int( - argstr='-order %d', - units='NA', + argstr="-order %d", + units="NA", desc=( - 'Specific to sh. Maximum order of the spherical harmonic series. ' - 'Default is 4.')) + "Specific to sh. Maximum order of the spherical harmonic series. " + "Default is 4." + ), + ) rbfpointset = traits.Int( - argstr='-rbfpointset %d', - units='NA', + argstr="-rbfpointset %d", + units="NA", desc=( - 'Specific to rbf. Sets the number of radial basis functions to use. ' - 'The value specified must be present in the Pointsets directory. ' - 'The default value is 246.')) + "Specific to rbf. Sets the number of radial basis functions to use. " + "The value specified must be present in the Pointsets directory. " + "The default value is 246." + ), + ) rbfsigma = traits.Float( - argstr='-rbfsigma %f', - units='NA', - desc= - ('Specific to rbf. Sets the width of the interpolating basis functions. ' - 'The default value is 0.2618 (15 degrees).')) + argstr="-rbfsigma %f", + units="NA", + desc=( + "Specific to rbf. Sets the width of the interpolating basis functions. " + "The default value is 0.2618 (15 degrees)." + ), + ) smoothingsigma = traits.Float( - argstr='-smoothingsigma %f', - units='NA', + argstr="-smoothingsigma %f", + units="NA", desc=( - 'Specific to rbf. Sets the width of the smoothing basis functions. ' - 'The default value is 0.1309 (7.5 degrees).')) + "Specific to rbf. Sets the width of the smoothing basis functions. " + "The default value is 0.1309 (7.5 degrees)." + ), + ) class QBallMXOutputSpec(TraitedSpec): - qmat = File(exists=True, desc='Q-Ball reconstruction matrix') + qmat = File(exists=True, desc="Q-Ball reconstruction matrix") class QBallMX(StdOutCommandLine): @@ -90,52 +107,61 @@ class QBallMX(StdOutCommandLine): >>> qballcoeffs.inputs.bgmask = 'brain_mask.nii' >>> qballcoeffs.run() # doctest: +SKIP """ - _cmd = 'qballmx' + + _cmd = "qballmx" input_spec = QBallMXInputSpec output_spec = QBallMXOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['qmat'] = os.path.abspath(self._gen_outfilename()) + outputs["qmat"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.scheme_file) - return name + '_qmat.Bdouble' + return name + "_qmat.Bdouble" class LinReconInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=1, - desc='voxel-order data filename') + desc="voxel-order data filename", + ) scheme_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=2, - desc='Specifies the scheme file for the diffusion MRI data') + desc="Specifies the scheme file for the diffusion MRI data", + ) qball_mat = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=3, - desc='Linear transformation matrix.') + desc="Linear transformation matrix.", + ) normalize = traits.Bool( - argstr='-normalize', - desc=('Normalize the measurements and discard ' - 'the zero measurements before the linear transform.')) + argstr="-normalize", + desc=( + "Normalize the measurements and discard " + "the zero measurements before the linear transform." + ), + ) log = traits.Bool( - argstr='-log', - desc=('Transform the log measurements rather than the ' - 'measurements themselves')) - bgmask = File(exists=True, argstr='-bgmask %s', desc='background mask') + argstr="-log", + desc=( + "Transform the log measurements rather than the " "measurements themselves" + ), + ) + bgmask = File(exists=True, argstr="-bgmask %s", desc="background mask") class LinReconOutputSpec(TraitedSpec): - recon_data = File(exists=True, desc='Transformed data') + recon_data = File(exists=True, desc="Transformed data") class LinRecon(StdOutCommandLine): @@ -181,84 +207,96 @@ class LinRecon(StdOutCommandLine): >>> qballcoeffs.inputs.normalize = True >>> qballcoeffs.run() # doctest: +SKIP """ - _cmd = 'linrecon' + + _cmd = "linrecon" input_spec = LinReconInputSpec output_spec = LinReconOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['recon_data'] = os.path.abspath(self._gen_outfilename()) + outputs["recon_data"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.scheme_file) - return name + '_recondata.Bdouble' + return name + "_recondata.Bdouble" class MESDInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='-inputfile %s', + argstr="-inputfile %s", mandatory=True, position=1, - desc='voxel-order data filename') + desc="voxel-order data filename", + ) inverter = traits.Enum( - 'SPIKE', - 'PAS', - argstr='-filter %s', + "SPIKE", + "PAS", + argstr="-filter %s", position=2, mandatory=True, - desc= - ('The inversion index specifies the type of inversion to perform on the data.' - 'The currently available choices are:' - 'Inverter name | Inverter parameters' - '---------------|------------------' - 'SPIKE | bd (b-value x diffusivity along the fibre.)' - 'PAS | r')) + desc=( + "The inversion index specifies the type of inversion to perform on the data." + "The currently available choices are:" + "Inverter name | Inverter parameters" + "---------------|------------------" + "SPIKE | bd (b-value x diffusivity along the fibre.)" + "PAS | r" + ), + ) inverter_param = traits.Float( - argstr='%f', - units='NA', + argstr="%f", + units="NA", position=3, mandatory=True, - desc= - ('Parameter associated with the inverter. Cf. inverter description for' - 'more information.')) + desc=( + "Parameter associated with the inverter. Cf. inverter description for" + "more information." + ), + ) fastmesd = traits.Bool( - argstr='-fastmesd', - requires=['mepointset'], - desc= - ('Turns off numerical integration checks and fixes the integration point set size at that of' - 'the index specified by -basepointset..')) + argstr="-fastmesd", + requires=["mepointset"], + desc=( + "Turns off numerical integration checks and fixes the integration point set size at that of" + "the index specified by -basepointset.." + ), + ) mepointset = traits.Int( - argstr='-mepointset %d', - units='NA', - desc= - ('Use a set of directions other than those in the scheme file for the deconvolution kernel.' - 'The number refers to the number of directions on the unit sphere. For example, ' - '"-mepointset 54" uses the directions in "camino/PointSets/Elec054.txt".' - )) + argstr="-mepointset %d", + units="NA", + desc=( + "Use a set of directions other than those in the scheme file for the deconvolution kernel." + "The number refers to the number of directions on the unit sphere. For example, " + '"-mepointset 54" uses the directions in "camino/PointSets/Elec054.txt".' + ), + ) scheme_file = File( exists=True, - argstr='-schemefile %s', + argstr="-schemefile %s", mandatory=True, - desc='Specifies the scheme file for the diffusion MRI data') - bgmask = File(exists=True, argstr='-bgmask %s', desc='background mask') + desc="Specifies the scheme file for the diffusion MRI data", + ) + bgmask = File(exists=True, argstr="-bgmask %s", desc="background mask") inputdatatype = traits.Enum( - 'float', - 'char', - 'short', - 'int', - 'long', - 'double', - argstr='-inputdatatype %s', - desc= - ('Specifies the data type of the input file: "char", "short", "int", "long",' - '"float" or "double". The input file must have BIG-ENDIAN ordering.' - 'By default, the input type is "float".')) + "float", + "char", + "short", + "int", + "long", + "double", + argstr="-inputdatatype %s", + desc=( + 'Specifies the data type of the input file: "char", "short", "int", "long",' + '"float" or "double". The input file must have BIG-ENDIAN ordering.' + 'By default, the input type is "float".' + ), + ) class MESDOutputSpec(TraitedSpec): - mesd_data = File(exists=True, desc='MESD data') + mesd_data = File(exists=True, desc="MESD data") class MESD(StdOutCommandLine): @@ -338,116 +376,128 @@ class MESD(StdOutCommandLine): >>> mesd.inputs.inverter_param = 1.4 >>> mesd.run() # doctest: +SKIP """ - _cmd = 'mesd' + + _cmd = "mesd" input_spec = MESDInputSpec output_spec = MESDOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['mesd_data'] = os.path.abspath(self._gen_outfilename()) + outputs["mesd_data"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.scheme_file) - return name + '_MESD.Bdouble' + return name + "_MESD.Bdouble" class SFPeaksInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, - argstr='-inputfile %s', + argstr="-inputfile %s", mandatory=True, - desc='Voxel-order data of spherical functions') + desc="Voxel-order data of spherical functions", + ) inputmodel = traits.Enum( - 'sh', - 'maxent', - 'rbf', - argstr='-inputmodel %s', + "sh", + "maxent", + "rbf", + argstr="-inputmodel %s", mandatory=True, - desc= - ('Type of functions input via in_file. Currently supported options are: ' - ' sh - Spherical harmonic series. Specify the maximum order of the SH series ' - ' with the "order" attribute if different from the default of 4. ' - ' maxent - Maximum entropy representations output by MESD. The reconstruction ' - ' directions input to MESD must be specified. By default this is the ' - ' same set of gradient directions (excluding zero gradients) in the ' - ' scheme file, so specify the "schemefile" attribute unless the ' - ' "mepointset" attribute was set in MESD. ' - ' rbf - Sums of radial basis functions. Specify the pointset with the attribute ' - ' "rbfpointset" if different from the default. See QBallMX.')) + desc=( + "Type of functions input via in_file. Currently supported options are: " + " sh - Spherical harmonic series. Specify the maximum order of the SH series " + ' with the "order" attribute if different from the default of 4. ' + " maxent - Maximum entropy representations output by MESD. The reconstruction " + " directions input to MESD must be specified. By default this is the " + " same set of gradient directions (excluding zero gradients) in the " + ' scheme file, so specify the "schemefile" attribute unless the ' + ' "mepointset" attribute was set in MESD. ' + " rbf - Sums of radial basis functions. Specify the pointset with the attribute " + ' "rbfpointset" if different from the default. See QBallMX.' + ), + ) order = traits.Int( - argstr='-order %d', - units='NA', - desc='Specific to sh. Maximum order of the spherical harmonic series.') + argstr="-order %d", + units="NA", + desc="Specific to sh. Maximum order of the spherical harmonic series.", + ) scheme_file = File( - exists=True, - argstr='%s', - desc='Specific to maxent. Specifies the scheme file.') + exists=True, argstr="%s", desc="Specific to maxent. Specifies the scheme file." + ) rbfpointset = traits.Int( - argstr='-rbfpointset %d', - units='NA', + argstr="-rbfpointset %d", + units="NA", desc=( - 'Specific to rbf. Sets the number of radial basis functions to use. ' - 'The value specified must be present in the Pointsets directory. ' - 'The default value is 246.')) + "Specific to rbf. Sets the number of radial basis functions to use. " + "The value specified must be present in the Pointsets directory. " + "The default value is 246." + ), + ) mepointset = traits.Int( - argstr='-mepointset %d', - units='NA', - desc= - ('Use a set of directions other than those in the scheme file for the deconvolution ' - 'kernel. The number refers to the number of directions on the unit sphere. ' - 'For example, "mepointset = 54" uses the directions in "camino/PointSets/Elec054.txt" ' - 'Use this option only if you told MESD to use a custom set of directions with the same ' - 'option. Otherwise, specify the scheme file with the "schemefile" attribute.' - )) + argstr="-mepointset %d", + units="NA", + desc=( + "Use a set of directions other than those in the scheme file for the deconvolution " + "kernel. The number refers to the number of directions on the unit sphere. " + 'For example, "mepointset = 54" uses the directions in "camino/PointSets/Elec054.txt" ' + "Use this option only if you told MESD to use a custom set of directions with the same " + 'option. Otherwise, specify the scheme file with the "schemefile" attribute.' + ), + ) numpds = traits.Int( - argstr='-numpds %d', - units='NA', - desc='The largest number of peak directions to output in each voxel.') + argstr="-numpds %d", + units="NA", + desc="The largest number of peak directions to output in each voxel.", + ) noconsistencycheck = traits.Bool( - argstr='-noconsistencycheck', - desc= - 'Turns off the consistency check. The output shows all consistencies as true.' + argstr="-noconsistencycheck", + desc="Turns off the consistency check. The output shows all consistencies as true.", ) searchradius = traits.Float( - argstr='-searchradius %f', - units='NA', - desc= - 'The search radius in the peak finding algorithm. The default is 0.4 (cf. "density")' + argstr="-searchradius %f", + units="NA", + desc='The search radius in the peak finding algorithm. The default is 0.4 (cf. "density")', ) density = traits.Int( - argstr='-density %d', - units='NA', - desc= - ('The number of randomly rotated icosahedra to use in constructing the set of points for ' - 'random sampling in the peak finding algorithm. Default is 1000, which works well for very ' - 'spiky maxent functions. For other types of function, it is reasonable to set the density ' - 'much lower and increase the search radius slightly, which speeds up the computation.' - )) + argstr="-density %d", + units="NA", + desc=( + "The number of randomly rotated icosahedra to use in constructing the set of points for " + "random sampling in the peak finding algorithm. Default is 1000, which works well for very " + "spiky maxent functions. For other types of function, it is reasonable to set the density " + "much lower and increase the search radius slightly, which speeds up the computation." + ), + ) pointset = traits.Int( - argstr='-pointset %d', - units='NA', - desc= - ('To sample using an evenly distributed set of points instead. The integer can be ' - '0, 1, ..., 7. Index 0 gives 1082 points, 1 gives 1922, 2 gives 3002, 3 gives 4322, ' - '4 gives 5882, 5 gives 8672, 6 gives 12002, 7 gives 15872.')) + argstr="-pointset %d", + units="NA", + desc=( + "To sample using an evenly distributed set of points instead. The integer can be " + "0, 1, ..., 7. Index 0 gives 1082 points, 1 gives 1922, 2 gives 3002, 3 gives 4322, " + "4 gives 5882, 5 gives 8672, 6 gives 12002, 7 gives 15872." + ), + ) pdthresh = traits.Float( - argstr='-pdthresh %f', - units='NA', - desc= - ('Base threshold on the actual peak direction strength divided by the mean of the ' - 'function. The default is 1.0 (the peak must be equal or greater than the mean).' - )) + argstr="-pdthresh %f", + units="NA", + desc=( + "Base threshold on the actual peak direction strength divided by the mean of the " + "function. The default is 1.0 (the peak must be equal or greater than the mean)." + ), + ) stdsfrommean = traits.Float( - argstr='-stdsfrommean %f', - units='NA', - desc= - ('This is the number of standard deviations of the function to be added to the ' - '"pdthresh" attribute in the peak directions pruning.')) + argstr="-stdsfrommean %f", + units="NA", + desc=( + "This is the number of standard deviations of the function to be added to the " + '"pdthresh" attribute in the peak directions pruning.' + ), + ) class SFPeaksOutputSpec(TraitedSpec): - peaks = File(exists=True, desc='Peaks of the spherical functions.') + peaks = File(exists=True, desc="Peaks of the spherical functions.") class SFPeaks(StdOutCommandLine): @@ -528,15 +578,16 @@ class SFPeaks(StdOutCommandLine): >>> sf_peaks.inputs.searchradius = 1.0 >>> sf_peaks.run() # doctest: +SKIP """ - _cmd = 'sfpeaks' + + _cmd = "sfpeaks" input_spec = SFPeaksInputSpec output_spec = SFPeaksOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['peaks'] = os.path.abspath(self._gen_outfilename()) + outputs["peaks"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_peaks.Bdouble' + return name + "_peaks.Bdouble" diff --git a/nipype/interfaces/camino/tests/test_auto_AnalyzeHeader.py b/nipype/interfaces/camino/tests/test_auto_AnalyzeHeader.py index 6341c5fb46..419330da13 100644 --- a/nipype/interfaces/camino/tests/test_auto_AnalyzeHeader.py +++ b/nipype/interfaces/camino/tests/test_auto_AnalyzeHeader.py @@ -4,109 +4,42 @@ def test_AnalyzeHeader_inputs(): input_map = dict( - args=dict(argstr='%s', ), - centre=dict( - argstr='-centre %s', - units='mm', - ), - data_dims=dict( - argstr='-datadims %s', - units='voxels', - ), - datatype=dict( - argstr='-datatype %s', - mandatory=True, - ), - description=dict(argstr='-description %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - greylevels=dict( - argstr='-gl %s', - units='NA', - ), - in_file=dict( - argstr='< %s', - extensions=None, - mandatory=True, - position=1, - ), - initfromheader=dict( - argstr='-initfromheader %s', - extensions=None, - position=3, - ), - intelbyteorder=dict(argstr='-intelbyteorder', ), - networkbyteorder=dict(argstr='-networkbyteorder', ), - nimages=dict( - argstr='-nimages %d', - units='NA', - ), - offset=dict( - argstr='-offset %d', - units='NA', - ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), - picoseed=dict( - argstr='-picoseed %s', - units='mm', - ), - printbigendian=dict( - argstr='-printbigendian %s', - extensions=None, - position=3, - ), - printimagedims=dict( - argstr='-printimagedims %s', - extensions=None, - position=3, - ), + args=dict(argstr="%s",), + centre=dict(argstr="-centre %s", units="mm",), + data_dims=dict(argstr="-datadims %s", units="voxels",), + datatype=dict(argstr="-datatype %s", mandatory=True,), + description=dict(argstr="-description %s",), + environ=dict(nohash=True, usedefault=True,), + greylevels=dict(argstr="-gl %s", units="NA",), + in_file=dict(argstr="< %s", extensions=None, mandatory=True, position=1,), + initfromheader=dict(argstr="-initfromheader %s", extensions=None, position=3,), + intelbyteorder=dict(argstr="-intelbyteorder",), + networkbyteorder=dict(argstr="-networkbyteorder",), + nimages=dict(argstr="-nimages %d", units="NA",), + offset=dict(argstr="-offset %d", units="NA",), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + picoseed=dict(argstr="-picoseed %s", units="mm",), + printbigendian=dict(argstr="-printbigendian %s", extensions=None, position=3,), + printimagedims=dict(argstr="-printimagedims %s", extensions=None, position=3,), printintelbyteorder=dict( - argstr='-printintelbyteorder %s', - extensions=None, - position=3, - ), - printprogargs=dict( - argstr='-printprogargs %s', - extensions=None, - position=3, - ), - readheader=dict( - argstr='-readheader %s', - extensions=None, - position=3, - ), - scaleinter=dict( - argstr='-scaleinter %d', - units='NA', - ), - scaleslope=dict( - argstr='-scaleslope %d', - units='NA', - ), - scheme_file=dict( - argstr='%s', - extensions=None, - position=2, - ), - voxel_dims=dict( - argstr='-voxeldims %s', - units='mm', - ), + argstr="-printintelbyteorder %s", extensions=None, position=3, + ), + printprogargs=dict(argstr="-printprogargs %s", extensions=None, position=3,), + readheader=dict(argstr="-readheader %s", extensions=None, position=3,), + scaleinter=dict(argstr="-scaleinter %d", units="NA",), + scaleslope=dict(argstr="-scaleslope %d", units="NA",), + scheme_file=dict(argstr="%s", extensions=None, position=2,), + voxel_dims=dict(argstr="-voxeldims %s", units="mm",), ) inputs = AnalyzeHeader.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AnalyzeHeader_outputs(): - output_map = dict(header=dict(extensions=None, ), ) + output_map = dict(header=dict(extensions=None,),) outputs = AnalyzeHeader.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_ComputeEigensystem.py b/nipype/interfaces/camino/tests/test_auto_ComputeEigensystem.py index 63c652319c..70e1603a33 100644 --- a/nipype/interfaces/camino/tests/test_auto_ComputeEigensystem.py +++ b/nipype/interfaces/camino/tests/test_auto_ComputeEigensystem.py @@ -4,41 +4,24 @@ def test_ComputeEigensystem_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='< %s', - extensions=None, - mandatory=True, - position=1, - ), - inputdatatype=dict( - argstr='-inputdatatype %s', - usedefault=True, - ), - inputmodel=dict(argstr='-inputmodel %s', ), - maxcomponents=dict(argstr='-maxcomponents %d', ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), - outputdatatype=dict( - argstr='-outputdatatype %s', - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="< %s", extensions=None, mandatory=True, position=1,), + inputdatatype=dict(argstr="-inputdatatype %s", usedefault=True,), + inputmodel=dict(argstr="-inputmodel %s",), + maxcomponents=dict(argstr="-maxcomponents %d",), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + outputdatatype=dict(argstr="-outputdatatype %s", usedefault=True,), ) inputs = ComputeEigensystem.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComputeEigensystem_outputs(): - output_map = dict(eigen=dict(extensions=None, ), ) + output_map = dict(eigen=dict(extensions=None,),) outputs = ComputeEigensystem.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_ComputeFractionalAnisotropy.py b/nipype/interfaces/camino/tests/test_auto_ComputeFractionalAnisotropy.py index 6475557c40..cecdcd6dd9 100644 --- a/nipype/interfaces/camino/tests/test_auto_ComputeFractionalAnisotropy.py +++ b/nipype/interfaces/camino/tests/test_auto_ComputeFractionalAnisotropy.py @@ -4,39 +4,24 @@ def test_ComputeFractionalAnisotropy_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='< %s', - extensions=None, - mandatory=True, - position=1, - ), - inputdatatype=dict(argstr='-inputdatatype %s', ), - inputmodel=dict(argstr='-inputmodel %s', ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), - outputdatatype=dict(argstr='-outputdatatype %s', ), - scheme_file=dict( - argstr='%s', - extensions=None, - position=2, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="< %s", extensions=None, mandatory=True, position=1,), + inputdatatype=dict(argstr="-inputdatatype %s",), + inputmodel=dict(argstr="-inputmodel %s",), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + outputdatatype=dict(argstr="-outputdatatype %s",), + scheme_file=dict(argstr="%s", extensions=None, position=2,), ) inputs = ComputeFractionalAnisotropy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComputeFractionalAnisotropy_outputs(): - output_map = dict(fa=dict(extensions=None, ), ) + output_map = dict(fa=dict(extensions=None,),) outputs = ComputeFractionalAnisotropy.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_ComputeMeanDiffusivity.py b/nipype/interfaces/camino/tests/test_auto_ComputeMeanDiffusivity.py index a068d7de24..692d900494 100644 --- a/nipype/interfaces/camino/tests/test_auto_ComputeMeanDiffusivity.py +++ b/nipype/interfaces/camino/tests/test_auto_ComputeMeanDiffusivity.py @@ -4,39 +4,24 @@ def test_ComputeMeanDiffusivity_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='< %s', - extensions=None, - mandatory=True, - position=1, - ), - inputdatatype=dict(argstr='-inputdatatype %s', ), - inputmodel=dict(argstr='-inputmodel %s', ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), - outputdatatype=dict(argstr='-outputdatatype %s', ), - scheme_file=dict( - argstr='%s', - extensions=None, - position=2, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="< %s", extensions=None, mandatory=True, position=1,), + inputdatatype=dict(argstr="-inputdatatype %s",), + inputmodel=dict(argstr="-inputmodel %s",), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + outputdatatype=dict(argstr="-outputdatatype %s",), + scheme_file=dict(argstr="%s", extensions=None, position=2,), ) inputs = ComputeMeanDiffusivity.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComputeMeanDiffusivity_outputs(): - output_map = dict(md=dict(extensions=None, ), ) + output_map = dict(md=dict(extensions=None,),) outputs = ComputeMeanDiffusivity.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_ComputeTensorTrace.py b/nipype/interfaces/camino/tests/test_auto_ComputeTensorTrace.py index a05dbf331c..3a7469378e 100644 --- a/nipype/interfaces/camino/tests/test_auto_ComputeTensorTrace.py +++ b/nipype/interfaces/camino/tests/test_auto_ComputeTensorTrace.py @@ -4,39 +4,24 @@ def test_ComputeTensorTrace_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='< %s', - extensions=None, - mandatory=True, - position=1, - ), - inputdatatype=dict(argstr='-inputdatatype %s', ), - inputmodel=dict(argstr='-inputmodel %s', ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), - outputdatatype=dict(argstr='-outputdatatype %s', ), - scheme_file=dict( - argstr='%s', - extensions=None, - position=2, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="< %s", extensions=None, mandatory=True, position=1,), + inputdatatype=dict(argstr="-inputdatatype %s",), + inputmodel=dict(argstr="-inputmodel %s",), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + outputdatatype=dict(argstr="-outputdatatype %s",), + scheme_file=dict(argstr="%s", extensions=None, position=2,), ) inputs = ComputeTensorTrace.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComputeTensorTrace_outputs(): - output_map = dict(trace=dict(extensions=None, ), ) + output_map = dict(trace=dict(extensions=None,),) outputs = ComputeTensorTrace.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_Conmat.py b/nipype/interfaces/camino/tests/test_auto_Conmat.py index 52a5b668d7..65c84dc64c 100644 --- a/nipype/interfaces/camino/tests/test_auto_Conmat.py +++ b/nipype/interfaces/camino/tests/test_auto_Conmat.py @@ -4,45 +4,21 @@ def test_Conmat_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-inputfile %s', - extensions=None, - mandatory=True, - ), - output_root=dict( - argstr='-outputroot %s', - extensions=None, - genfile=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-inputfile %s", extensions=None, mandatory=True,), + output_root=dict(argstr="-outputroot %s", extensions=None, genfile=True,), scalar_file=dict( - argstr='-scalarfile %s', - extensions=None, - requires=['tract_stat'], - ), - target_file=dict( - argstr='-targetfile %s', - extensions=None, - mandatory=True, - ), - targetname_file=dict( - argstr='-targetnamefile %s', - extensions=None, - ), - tract_prop=dict( - argstr='-tractstat %s', - units='NA', - xor=['tract_stat'], + argstr="-scalarfile %s", extensions=None, requires=["tract_stat"], ), + target_file=dict(argstr="-targetfile %s", extensions=None, mandatory=True,), + targetname_file=dict(argstr="-targetnamefile %s", extensions=None,), + tract_prop=dict(argstr="-tractstat %s", units="NA", xor=["tract_stat"],), tract_stat=dict( - argstr='-tractstat %s', - requires=['scalar_file'], - units='NA', - xor=['tract_prop'], + argstr="-tractstat %s", + requires=["scalar_file"], + units="NA", + xor=["tract_prop"], ), ) inputs = Conmat.input_spec() @@ -50,10 +26,11 @@ def test_Conmat_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Conmat_outputs(): output_map = dict( - conmat_sc=dict(extensions=None, ), - conmat_ts=dict(extensions=None, ), + conmat_sc=dict(extensions=None,), conmat_ts=dict(extensions=None,), ) outputs = Conmat.output_spec() diff --git a/nipype/interfaces/camino/tests/test_auto_DT2NIfTI.py b/nipype/interfaces/camino/tests/test_auto_DT2NIfTI.py index ab006820f8..fce7560dd2 100644 --- a/nipype/interfaces/camino/tests/test_auto_DT2NIfTI.py +++ b/nipype/interfaces/camino/tests/test_auto_DT2NIfTI.py @@ -4,28 +4,16 @@ def test_DT2NIfTI_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), header_file=dict( - argstr='-header %s', - extensions=None, - mandatory=True, - position=3, + argstr="-header %s", extensions=None, mandatory=True, position=3, ), in_file=dict( - argstr='-inputfile %s', - extensions=None, - mandatory=True, - position=1, + argstr="-inputfile %s", extensions=None, mandatory=True, position=1, ), output_root=dict( - argstr='-outputroot %s', - extensions=None, - genfile=True, - position=2, + argstr="-outputroot %s", extensions=None, genfile=True, position=2, ), ) inputs = DT2NIfTI.input_spec() @@ -33,11 +21,13 @@ def test_DT2NIfTI_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DT2NIfTI_outputs(): output_map = dict( - dt=dict(extensions=None, ), - exitcode=dict(extensions=None, ), - lns0=dict(extensions=None, ), + dt=dict(extensions=None,), + exitcode=dict(extensions=None,), + lns0=dict(extensions=None,), ) outputs = DT2NIfTI.output_spec() diff --git a/nipype/interfaces/camino/tests/test_auto_DTIFit.py b/nipype/interfaces/camino/tests/test_auto_DTIFit.py index 9322028345..467e2d54ea 100644 --- a/nipype/interfaces/camino/tests/test_auto_DTIFit.py +++ b/nipype/interfaces/camino/tests/test_auto_DTIFit.py @@ -4,45 +4,23 @@ def test_DTIFit_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bgmask=dict( - argstr='-bgmask %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), - non_linear=dict( - argstr='-nonlinear', - position=3, - ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), - scheme_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), + args=dict(argstr="%s",), + bgmask=dict(argstr="-bgmask %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=1,), + non_linear=dict(argstr="-nonlinear", position=3,), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + scheme_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), ) inputs = DTIFit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DTIFit_outputs(): - output_map = dict(tensor_fitted=dict(extensions=None, ), ) + output_map = dict(tensor_fitted=dict(extensions=None,),) outputs = DTIFit.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_DTLUTGen.py b/nipype/interfaces/camino/tests/test_auto_DTLUTGen.py index 9a564cf37b..674d38a37b 100644 --- a/nipype/interfaces/camino/tests/test_auto_DTLUTGen.py +++ b/nipype/interfaces/camino/tests/test_auto_DTLUTGen.py @@ -4,64 +4,32 @@ def test_DTLUTGen_inputs(): input_map = dict( - acg=dict(argstr='-acg', ), - args=dict(argstr='%s', ), - bingham=dict(argstr='-bingham', ), - environ=dict( - nohash=True, - usedefault=True, - ), - frange=dict( - argstr='-frange %s', - position=1, - units='NA', - ), - inversion=dict( - argstr='-inversion %d', - units='NA', - ), - lrange=dict( - argstr='-lrange %s', - position=1, - units='NA', - ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), - samples=dict( - argstr='-samples %d', - units='NA', - ), + acg=dict(argstr="-acg",), + args=dict(argstr="%s",), + bingham=dict(argstr="-bingham",), + environ=dict(nohash=True, usedefault=True,), + frange=dict(argstr="-frange %s", position=1, units="NA",), + inversion=dict(argstr="-inversion %d", units="NA",), + lrange=dict(argstr="-lrange %s", position=1, units="NA",), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + samples=dict(argstr="-samples %d", units="NA",), scheme_file=dict( - argstr='-schemefile %s', - extensions=None, - mandatory=True, - position=2, + argstr="-schemefile %s", extensions=None, mandatory=True, position=2, ), - snr=dict( - argstr='-snr %f', - units='NA', - ), - step=dict( - argstr='-step %f', - units='NA', - ), - trace=dict( - argstr='-trace %G', - units='NA', - ), - watson=dict(argstr='-watson', ), + snr=dict(argstr="-snr %f", units="NA",), + step=dict(argstr="-step %f", units="NA",), + trace=dict(argstr="-trace %G", units="NA",), + watson=dict(argstr="-watson",), ) inputs = DTLUTGen.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DTLUTGen_outputs(): - output_map = dict(dtLUT=dict(extensions=None, ), ) + output_map = dict(dtLUT=dict(extensions=None,),) outputs = DTLUTGen.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_DTMetric.py b/nipype/interfaces/camino/tests/test_auto_DTMetric.py index 2458921c8c..fd62a3d329 100644 --- a/nipype/interfaces/camino/tests/test_auto_DTMetric.py +++ b/nipype/interfaces/camino/tests/test_auto_DTMetric.py @@ -4,45 +4,24 @@ def test_DTMetric_inputs(): input_map = dict( - args=dict(argstr='%s', ), - data_header=dict( - argstr='-header %s', - extensions=None, - ), - eigen_data=dict( - argstr='-inputfile %s', - extensions=None, - mandatory=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputdatatype=dict( - argstr='-inputdatatype %s', - usedefault=True, - ), - metric=dict( - argstr='-stat %s', - mandatory=True, - ), - outputdatatype=dict( - argstr='-outputdatatype %s', - usedefault=True, - ), - outputfile=dict( - argstr='-outputfile %s', - extensions=None, - genfile=True, - ), + args=dict(argstr="%s",), + data_header=dict(argstr="-header %s", extensions=None,), + eigen_data=dict(argstr="-inputfile %s", extensions=None, mandatory=True,), + environ=dict(nohash=True, usedefault=True,), + inputdatatype=dict(argstr="-inputdatatype %s", usedefault=True,), + metric=dict(argstr="-stat %s", mandatory=True,), + outputdatatype=dict(argstr="-outputdatatype %s", usedefault=True,), + outputfile=dict(argstr="-outputfile %s", extensions=None, genfile=True,), ) inputs = DTMetric.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DTMetric_outputs(): - output_map = dict(metric_stats=dict(extensions=None, ), ) + output_map = dict(metric_stats=dict(extensions=None,),) outputs = DTMetric.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_FSL2Scheme.py b/nipype/interfaces/camino/tests/test_auto_FSL2Scheme.py index cd06996609..00b11eb751 100644 --- a/nipype/interfaces/camino/tests/test_auto_FSL2Scheme.py +++ b/nipype/interfaces/camino/tests/test_auto_FSL2Scheme.py @@ -4,54 +4,33 @@ def test_FSL2Scheme_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bscale=dict( - argstr='-bscale %d', - units='NA', - ), + args=dict(argstr="%s",), + bscale=dict(argstr="-bscale %d", units="NA",), bval_file=dict( - argstr='-bvalfile %s', - extensions=None, - mandatory=True, - position=2, + argstr="-bvalfile %s", extensions=None, mandatory=True, position=2, ), bvec_file=dict( - argstr='-bvecfile %s', - extensions=None, - mandatory=True, - position=1, - ), - diffusiontime=dict( - argstr='-diffusiontime %f', - units='NA', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - flipx=dict(argstr='-flipx', ), - flipy=dict(argstr='-flipy', ), - flipz=dict(argstr='-flipz', ), - interleave=dict(argstr='-interleave', ), - numscans=dict( - argstr='-numscans %d', - units='NA', + argstr="-bvecfile %s", extensions=None, mandatory=True, position=1, ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), - usegradmod=dict(argstr='-usegradmod', ), + diffusiontime=dict(argstr="-diffusiontime %f", units="NA",), + environ=dict(nohash=True, usedefault=True,), + flipx=dict(argstr="-flipx",), + flipy=dict(argstr="-flipy",), + flipz=dict(argstr="-flipz",), + interleave=dict(argstr="-interleave",), + numscans=dict(argstr="-numscans %d", units="NA",), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + usegradmod=dict(argstr="-usegradmod",), ) inputs = FSL2Scheme.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FSL2Scheme_outputs(): - output_map = dict(scheme=dict(extensions=None, ), ) + output_map = dict(scheme=dict(extensions=None,),) outputs = FSL2Scheme.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_Image2Voxel.py b/nipype/interfaces/camino/tests/test_auto_Image2Voxel.py index 13a3107b6d..47379c7f54 100644 --- a/nipype/interfaces/camino/tests/test_auto_Image2Voxel.py +++ b/nipype/interfaces/camino/tests/test_auto_Image2Voxel.py @@ -4,36 +4,23 @@ def test_Image2Voxel_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='-4dimage %s', - extensions=None, - mandatory=True, - position=1, - ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), - out_type=dict( - argstr='-outputdatatype %s', - position=2, - usedefault=True, + argstr="-4dimage %s", extensions=None, mandatory=True, position=1, ), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + out_type=dict(argstr="-outputdatatype %s", position=2, usedefault=True,), ) inputs = Image2Voxel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Image2Voxel_outputs(): - output_map = dict(voxel_order=dict(extensions=None, ), ) + output_map = dict(voxel_order=dict(extensions=None,),) outputs = Image2Voxel.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_ImageStats.py b/nipype/interfaces/camino/tests/test_auto_ImageStats.py index 77f23481fc..9d817f4ca9 100644 --- a/nipype/interfaces/camino/tests/test_auto_ImageStats.py +++ b/nipype/interfaces/camino/tests/test_auto_ImageStats.py @@ -4,38 +4,22 @@ def test_ImageStats_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_files=dict( - argstr='-images %s', - mandatory=True, - position=-1, - ), - out_type=dict( - argstr='-outputdatatype %s', - usedefault=True, - ), - output_root=dict( - argstr='-outputroot %s', - extensions=None, - mandatory=True, - ), - stat=dict( - argstr='-stat %s', - mandatory=True, - units='NA', - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_files=dict(argstr="-images %s", mandatory=True, position=-1,), + out_type=dict(argstr="-outputdatatype %s", usedefault=True,), + output_root=dict(argstr="-outputroot %s", extensions=None, mandatory=True,), + stat=dict(argstr="-stat %s", mandatory=True, units="NA",), ) inputs = ImageStats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ImageStats_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ImageStats.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_LinRecon.py b/nipype/interfaces/camino/tests/test_auto_LinRecon.py index 8d7d991708..a8a7731b70 100644 --- a/nipype/interfaces/camino/tests/test_auto_LinRecon.py +++ b/nipype/interfaces/camino/tests/test_auto_LinRecon.py @@ -4,49 +4,25 @@ def test_LinRecon_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bgmask=dict( - argstr='-bgmask %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), - log=dict(argstr='-log', ), - normalize=dict(argstr='-normalize', ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), - qball_mat=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=3, - ), - scheme_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), + args=dict(argstr="%s",), + bgmask=dict(argstr="-bgmask %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=1,), + log=dict(argstr="-log",), + normalize=dict(argstr="-normalize",), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + qball_mat=dict(argstr="%s", extensions=None, mandatory=True, position=3,), + scheme_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), ) inputs = LinRecon.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LinRecon_outputs(): - output_map = dict(recon_data=dict(extensions=None, ), ) + output_map = dict(recon_data=dict(extensions=None,),) outputs = LinRecon.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_MESD.py b/nipype/interfaces/camino/tests/test_auto_MESD.py index b41a8ed4ad..dd91241d0a 100644 --- a/nipype/interfaces/camino/tests/test_auto_MESD.py +++ b/nipype/interfaces/camino/tests/test_auto_MESD.py @@ -4,60 +4,29 @@ def test_MESD_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bgmask=dict( - argstr='-bgmask %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fastmesd=dict( - argstr='-fastmesd', - requires=['mepointset'], - ), + args=dict(argstr="%s",), + bgmask=dict(argstr="-bgmask %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + fastmesd=dict(argstr="-fastmesd", requires=["mepointset"],), in_file=dict( - argstr='-inputfile %s', - extensions=None, - mandatory=True, - position=1, - ), - inputdatatype=dict(argstr='-inputdatatype %s', ), - inverter=dict( - argstr='-filter %s', - mandatory=True, - position=2, - ), - inverter_param=dict( - argstr='%f', - mandatory=True, - position=3, - units='NA', - ), - mepointset=dict( - argstr='-mepointset %d', - units='NA', - ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), - scheme_file=dict( - argstr='-schemefile %s', - extensions=None, - mandatory=True, - ), + argstr="-inputfile %s", extensions=None, mandatory=True, position=1, + ), + inputdatatype=dict(argstr="-inputdatatype %s",), + inverter=dict(argstr="-filter %s", mandatory=True, position=2,), + inverter_param=dict(argstr="%f", mandatory=True, position=3, units="NA",), + mepointset=dict(argstr="-mepointset %d", units="NA",), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + scheme_file=dict(argstr="-schemefile %s", extensions=None, mandatory=True,), ) inputs = MESD.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MESD_outputs(): - output_map = dict(mesd_data=dict(extensions=None, ), ) + output_map = dict(mesd_data=dict(extensions=None,),) outputs = MESD.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_ModelFit.py b/nipype/interfaces/camino/tests/test_auto_ModelFit.py index ab2f6e1d73..ca5ba4a9d6 100644 --- a/nipype/interfaces/camino/tests/test_auto_ModelFit.py +++ b/nipype/interfaces/camino/tests/test_auto_ModelFit.py @@ -4,66 +4,34 @@ def test_ModelFit_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bgmask=dict( - argstr='-bgmask %s', - extensions=None, - ), - bgthresh=dict(argstr='-bgthresh %G', ), - cfthresh=dict(argstr='-csfthresh %G', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixedbvalue=dict(argstr='-fixedbvalue %s', ), - fixedmodq=dict(argstr='-fixedmod %s', ), - in_file=dict( - argstr='-inputfile %s', - extensions=None, - mandatory=True, - ), - inputdatatype=dict(argstr='-inputdatatype %s', ), - model=dict( - argstr='-model %s', - mandatory=True, - ), - noisemap=dict( - argstr='-noisemap %s', - extensions=None, - ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), - outlier=dict( - argstr='-outliermap %s', - extensions=None, - ), - outputfile=dict( - argstr='-outputfile %s', - extensions=None, - ), - residualmap=dict( - argstr='-residualmap %s', - extensions=None, - ), - scheme_file=dict( - argstr='-schemefile %s', - extensions=None, - mandatory=True, - ), - sigma=dict(argstr='-sigma %G', ), - tau=dict(argstr='-tau %G', ), + args=dict(argstr="%s",), + bgmask=dict(argstr="-bgmask %s", extensions=None,), + bgthresh=dict(argstr="-bgthresh %G",), + cfthresh=dict(argstr="-csfthresh %G",), + environ=dict(nohash=True, usedefault=True,), + fixedbvalue=dict(argstr="-fixedbvalue %s",), + fixedmodq=dict(argstr="-fixedmod %s",), + in_file=dict(argstr="-inputfile %s", extensions=None, mandatory=True,), + inputdatatype=dict(argstr="-inputdatatype %s",), + model=dict(argstr="-model %s", mandatory=True,), + noisemap=dict(argstr="-noisemap %s", extensions=None,), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + outlier=dict(argstr="-outliermap %s", extensions=None,), + outputfile=dict(argstr="-outputfile %s", extensions=None,), + residualmap=dict(argstr="-residualmap %s", extensions=None,), + scheme_file=dict(argstr="-schemefile %s", extensions=None, mandatory=True,), + sigma=dict(argstr="-sigma %G",), + tau=dict(argstr="-tau %G",), ) inputs = ModelFit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ModelFit_outputs(): - output_map = dict(fitted_data=dict(extensions=None, ), ) + output_map = dict(fitted_data=dict(extensions=None,),) outputs = ModelFit.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_NIfTIDT2Camino.py b/nipype/interfaces/camino/tests/test_auto_NIfTIDT2Camino.py index 9bdd2cc3ba..951e4bdc0e 100644 --- a/nipype/interfaces/camino/tests/test_auto_NIfTIDT2Camino.py +++ b/nipype/interfaces/camino/tests/test_auto_NIfTIDT2Camino.py @@ -4,46 +4,28 @@ def test_NIfTIDT2Camino_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bgmask=dict( - argstr='-bgmask %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + bgmask=dict(argstr="-bgmask %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='-inputfile %s', - extensions=None, - mandatory=True, - position=1, - ), - lns0_file=dict( - argstr='-lns0 %s', - extensions=None, - ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, + argstr="-inputfile %s", extensions=None, mandatory=True, position=1, ), - s0_file=dict( - argstr='-s0 %s', - extensions=None, - ), - scaleinter=dict(argstr='-scaleinter %s', ), - scaleslope=dict(argstr='-scaleslope %s', ), - uppertriangular=dict(argstr='-uppertriangular %s', ), + lns0_file=dict(argstr="-lns0 %s", extensions=None,), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + s0_file=dict(argstr="-s0 %s", extensions=None,), + scaleinter=dict(argstr="-scaleinter %s",), + scaleslope=dict(argstr="-scaleslope %s",), + uppertriangular=dict(argstr="-uppertriangular %s",), ) inputs = NIfTIDT2Camino.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NIfTIDT2Camino_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = NIfTIDT2Camino.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_PicoPDFs.py b/nipype/interfaces/camino/tests/test_auto_PicoPDFs.py index 7d118392dc..5a321dddba 100644 --- a/nipype/interfaces/camino/tests/test_auto_PicoPDFs.py +++ b/nipype/interfaces/camino/tests/test_auto_PicoPDFs.py @@ -4,54 +4,26 @@ def test_PicoPDFs_inputs(): input_map = dict( - args=dict(argstr='%s', ), - directmap=dict(argstr='-directmap', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='< %s', - extensions=None, - mandatory=True, - position=1, - ), - inputmodel=dict( - argstr='-inputmodel %s', - position=2, - usedefault=True, - ), - luts=dict( - argstr='-luts %s', - mandatory=True, - ), - maxcomponents=dict( - argstr='-maxcomponents %d', - units='NA', - ), - numpds=dict( - argstr='-numpds %d', - units='NA', - ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), - pdf=dict( - argstr='-pdf %s', - position=4, - usedefault=True, - ), + args=dict(argstr="%s",), + directmap=dict(argstr="-directmap",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="< %s", extensions=None, mandatory=True, position=1,), + inputmodel=dict(argstr="-inputmodel %s", position=2, usedefault=True,), + luts=dict(argstr="-luts %s", mandatory=True,), + maxcomponents=dict(argstr="-maxcomponents %d", units="NA",), + numpds=dict(argstr="-numpds %d", units="NA",), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + pdf=dict(argstr="-pdf %s", position=4, usedefault=True,), ) inputs = PicoPDFs.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PicoPDFs_outputs(): - output_map = dict(pdfs=dict(extensions=None, ), ) + output_map = dict(pdfs=dict(extensions=None,),) outputs = PicoPDFs.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_ProcStreamlines.py b/nipype/interfaces/camino/tests/test_auto_ProcStreamlines.py index e4e9cb4d4f..7ccd071c99 100644 --- a/nipype/interfaces/camino/tests/test_auto_ProcStreamlines.py +++ b/nipype/interfaces/camino/tests/test_auto_ProcStreamlines.py @@ -4,133 +4,54 @@ def test_ProcStreamlines_inputs(): input_map = dict( - allowmultitargets=dict(argstr='-allowmultitargets', ), - args=dict(argstr='%s', ), - datadims=dict( - argstr='-datadims %s', - units='voxels', - ), - directional=dict( - argstr='-directional %s', - units='NA', - ), - discardloops=dict(argstr='-discardloops', ), - endpointfile=dict( - argstr='-endpointfile %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - exclusionfile=dict( - argstr='-exclusionfile %s', - extensions=None, - ), - gzip=dict(argstr='-gzip', ), + allowmultitargets=dict(argstr="-allowmultitargets",), + args=dict(argstr="%s",), + datadims=dict(argstr="-datadims %s", units="voxels",), + directional=dict(argstr="-directional %s", units="NA",), + discardloops=dict(argstr="-discardloops",), + endpointfile=dict(argstr="-endpointfile %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + exclusionfile=dict(argstr="-exclusionfile %s", extensions=None,), + gzip=dict(argstr="-gzip",), in_file=dict( - argstr='-inputfile %s', - extensions=None, - mandatory=True, - position=1, - ), - inputmodel=dict( - argstr='-inputmodel %s', - usedefault=True, - ), - iterations=dict( - argstr='-iterations %d', - units='NA', - ), - maxtractlength=dict( - argstr='-maxtractlength %d', - units='mm', - ), - maxtractpoints=dict( - argstr='-maxtractpoints %d', - units='NA', - ), - mintractlength=dict( - argstr='-mintractlength %d', - units='mm', - ), - mintractpoints=dict( - argstr='-mintractpoints %d', - units='NA', - ), - noresample=dict(argstr='-noresample', ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), - outputacm=dict( - argstr='-outputacm', - requires=['outputroot', 'seedfile'], - ), + argstr="-inputfile %s", extensions=None, mandatory=True, position=1, + ), + inputmodel=dict(argstr="-inputmodel %s", usedefault=True,), + iterations=dict(argstr="-iterations %d", units="NA",), + maxtractlength=dict(argstr="-maxtractlength %d", units="mm",), + maxtractpoints=dict(argstr="-maxtractpoints %d", units="NA",), + mintractlength=dict(argstr="-mintractlength %d", units="mm",), + mintractpoints=dict(argstr="-mintractpoints %d", units="NA",), + noresample=dict(argstr="-noresample",), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + outputacm=dict(argstr="-outputacm", requires=["outputroot", "seedfile"],), outputcbs=dict( - argstr='-outputcbs', - requires=['outputroot', 'targetfile', 'seedfile'], - ), - outputcp=dict( - argstr='-outputcp', - requires=['outputroot', 'seedfile'], - ), - outputroot=dict( - argstr='-outputroot %s', - extensions=None, - ), - outputsc=dict( - argstr='-outputsc', - requires=['outputroot', 'seedfile'], - ), - outputtracts=dict(argstr='-outputtracts', ), - regionindex=dict( - argstr='-regionindex %d', - units='mm', - ), - resamplestepsize=dict( - argstr='-resamplestepsize %d', - units='NA', - ), - seedfile=dict( - argstr='-seedfile %s', - extensions=None, - ), - seedpointmm=dict( - argstr='-seedpointmm %s', - units='mm', - ), - seedpointvox=dict( - argstr='-seedpointvox %s', - units='voxels', - ), - targetfile=dict( - argstr='-targetfile %s', - extensions=None, - ), - truncateinexclusion=dict(argstr='-truncateinexclusion', ), - truncateloops=dict(argstr='-truncateloops', ), - voxeldims=dict( - argstr='-voxeldims %s', - units='mm', - ), - waypointfile=dict( - argstr='-waypointfile %s', - extensions=None, - ), + argstr="-outputcbs", requires=["outputroot", "targetfile", "seedfile"], + ), + outputcp=dict(argstr="-outputcp", requires=["outputroot", "seedfile"],), + outputroot=dict(argstr="-outputroot %s", extensions=None,), + outputsc=dict(argstr="-outputsc", requires=["outputroot", "seedfile"],), + outputtracts=dict(argstr="-outputtracts",), + regionindex=dict(argstr="-regionindex %d", units="mm",), + resamplestepsize=dict(argstr="-resamplestepsize %d", units="NA",), + seedfile=dict(argstr="-seedfile %s", extensions=None,), + seedpointmm=dict(argstr="-seedpointmm %s", units="mm",), + seedpointvox=dict(argstr="-seedpointvox %s", units="voxels",), + targetfile=dict(argstr="-targetfile %s", extensions=None,), + truncateinexclusion=dict(argstr="-truncateinexclusion",), + truncateloops=dict(argstr="-truncateloops",), + voxeldims=dict(argstr="-voxeldims %s", units="mm",), + waypointfile=dict(argstr="-waypointfile %s", extensions=None,), ) inputs = ProcStreamlines.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ProcStreamlines_outputs(): - output_map = dict( - outputroot_files=dict(), - proc=dict(extensions=None, ), - ) + output_map = dict(outputroot_files=dict(), proc=dict(extensions=None,),) outputs = ProcStreamlines.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_QBallMX.py b/nipype/interfaces/camino/tests/test_auto_QBallMX.py index 6e30e8e019..180e5c6f83 100644 --- a/nipype/interfaces/camino/tests/test_auto_QBallMX.py +++ b/nipype/interfaces/camino/tests/test_auto_QBallMX.py @@ -4,50 +4,25 @@ def test_QBallMX_inputs(): input_map = dict( - args=dict(argstr='%s', ), - basistype=dict( - argstr='-basistype %s', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - order=dict( - argstr='-order %d', - units='NA', - ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), - rbfpointset=dict( - argstr='-rbfpointset %d', - units='NA', - ), - rbfsigma=dict( - argstr='-rbfsigma %f', - units='NA', - ), - scheme_file=dict( - argstr='-schemefile %s', - extensions=None, - mandatory=True, - ), - smoothingsigma=dict( - argstr='-smoothingsigma %f', - units='NA', - ), + args=dict(argstr="%s",), + basistype=dict(argstr="-basistype %s", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + order=dict(argstr="-order %d", units="NA",), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + rbfpointset=dict(argstr="-rbfpointset %d", units="NA",), + rbfsigma=dict(argstr="-rbfsigma %f", units="NA",), + scheme_file=dict(argstr="-schemefile %s", extensions=None, mandatory=True,), + smoothingsigma=dict(argstr="-smoothingsigma %f", units="NA",), ) inputs = QBallMX.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_QBallMX_outputs(): - output_map = dict(qmat=dict(extensions=None, ), ) + output_map = dict(qmat=dict(extensions=None,),) outputs = QBallMX.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_SFLUTGen.py b/nipype/interfaces/camino/tests/test_auto_SFLUTGen.py index 1b16f7b7d5..96dd1c2e5e 100644 --- a/nipype/interfaces/camino/tests/test_auto_SFLUTGen.py +++ b/nipype/interfaces/camino/tests/test_auto_SFLUTGen.py @@ -4,58 +4,28 @@ def test_SFLUTGen_inputs(): input_map = dict( - args=dict(argstr='%s', ), - binincsize=dict( - argstr='-binincsize %d', - units='NA', - ), - directmap=dict(argstr='-directmap', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-inputfile %s', - extensions=None, - mandatory=True, - ), - info_file=dict( - argstr='-infofile %s', - extensions=None, - mandatory=True, - ), - minvectsperbin=dict( - argstr='-minvectsperbin %d', - units='NA', - ), - order=dict( - argstr='-order %d', - units='NA', - ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), - outputstem=dict( - argstr='-outputstem %s', - usedefault=True, - ), - pdf=dict( - argstr='-pdf %s', - usedefault=True, - ), + args=dict(argstr="%s",), + binincsize=dict(argstr="-binincsize %d", units="NA",), + directmap=dict(argstr="-directmap",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-inputfile %s", extensions=None, mandatory=True,), + info_file=dict(argstr="-infofile %s", extensions=None, mandatory=True,), + minvectsperbin=dict(argstr="-minvectsperbin %d", units="NA",), + order=dict(argstr="-order %d", units="NA",), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + outputstem=dict(argstr="-outputstem %s", usedefault=True,), + pdf=dict(argstr="-pdf %s", usedefault=True,), ) inputs = SFLUTGen.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SFLUTGen_outputs(): output_map = dict( - lut_one_fibre=dict(extensions=None, ), - lut_two_fibres=dict(extensions=None, ), + lut_one_fibre=dict(extensions=None,), lut_two_fibres=dict(extensions=None,), ) outputs = SFLUTGen.output_spec() diff --git a/nipype/interfaces/camino/tests/test_auto_SFPICOCalibData.py b/nipype/interfaces/camino/tests/test_auto_SFPICOCalibData.py index 9c89fef228..b6a032e66d 100644 --- a/nipype/interfaces/camino/tests/test_auto_SFPICOCalibData.py +++ b/nipype/interfaces/camino/tests/test_auto_SFPICOCalibData.py @@ -4,83 +4,39 @@ def test_SFPICOCalibData_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), info_file=dict( - argstr='-infooutputfile %s', + argstr="-infooutputfile %s", extensions=None, genfile=True, hash_files=False, mandatory=True, ), - onedtfarange=dict( - argstr='-onedtfarange %s', - units='NA', - ), - onedtfastep=dict( - argstr='-onedtfastep %f', - units='NA', - ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), - scheme_file=dict( - argstr='-schemefile %s', - extensions=None, - mandatory=True, - ), - seed=dict( - argstr='-seed %f', - units='NA', - ), - snr=dict( - argstr='-snr %f', - units='NA', - ), - trace=dict( - argstr='-trace %f', - units='NA', - ), - twodtanglerange=dict( - argstr='-twodtanglerange %s', - units='NA', - ), - twodtanglestep=dict( - argstr='-twodtanglestep %f', - units='NA', - ), - twodtfarange=dict( - argstr='-twodtfarange %s', - units='NA', - ), - twodtfastep=dict( - argstr='-twodtfastep %f', - units='NA', - ), - twodtmixmax=dict( - argstr='-twodtmixmax %f', - units='NA', - ), - twodtmixstep=dict( - argstr='-twodtmixstep %f', - units='NA', - ), + onedtfarange=dict(argstr="-onedtfarange %s", units="NA",), + onedtfastep=dict(argstr="-onedtfastep %f", units="NA",), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + scheme_file=dict(argstr="-schemefile %s", extensions=None, mandatory=True,), + seed=dict(argstr="-seed %f", units="NA",), + snr=dict(argstr="-snr %f", units="NA",), + trace=dict(argstr="-trace %f", units="NA",), + twodtanglerange=dict(argstr="-twodtanglerange %s", units="NA",), + twodtanglestep=dict(argstr="-twodtanglestep %f", units="NA",), + twodtfarange=dict(argstr="-twodtfarange %s", units="NA",), + twodtfastep=dict(argstr="-twodtfastep %f", units="NA",), + twodtmixmax=dict(argstr="-twodtmixmax %f", units="NA",), + twodtmixstep=dict(argstr="-twodtmixstep %f", units="NA",), ) inputs = SFPICOCalibData.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SFPICOCalibData_outputs(): output_map = dict( - PICOCalib=dict(extensions=None, ), - calib_info=dict(extensions=None, ), + PICOCalib=dict(extensions=None,), calib_info=dict(extensions=None,), ) outputs = SFPICOCalibData.output_spec() diff --git a/nipype/interfaces/camino/tests/test_auto_SFPeaks.py b/nipype/interfaces/camino/tests/test_auto_SFPeaks.py index ef382ff133..8012e56d5d 100644 --- a/nipype/interfaces/camino/tests/test_auto_SFPeaks.py +++ b/nipype/interfaces/camino/tests/test_auto_SFPeaks.py @@ -4,75 +4,32 @@ def test_SFPeaks_inputs(): input_map = dict( - args=dict(argstr='%s', ), - density=dict( - argstr='-density %d', - units='NA', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-inputfile %s', - extensions=None, - mandatory=True, - ), - inputmodel=dict( - argstr='-inputmodel %s', - mandatory=True, - ), - mepointset=dict( - argstr='-mepointset %d', - units='NA', - ), - noconsistencycheck=dict(argstr='-noconsistencycheck', ), - numpds=dict( - argstr='-numpds %d', - units='NA', - ), - order=dict( - argstr='-order %d', - units='NA', - ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), - pdthresh=dict( - argstr='-pdthresh %f', - units='NA', - ), - pointset=dict( - argstr='-pointset %d', - units='NA', - ), - rbfpointset=dict( - argstr='-rbfpointset %d', - units='NA', - ), - scheme_file=dict( - argstr='%s', - extensions=None, - ), - searchradius=dict( - argstr='-searchradius %f', - units='NA', - ), - stdsfrommean=dict( - argstr='-stdsfrommean %f', - units='NA', - ), + args=dict(argstr="%s",), + density=dict(argstr="-density %d", units="NA",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-inputfile %s", extensions=None, mandatory=True,), + inputmodel=dict(argstr="-inputmodel %s", mandatory=True,), + mepointset=dict(argstr="-mepointset %d", units="NA",), + noconsistencycheck=dict(argstr="-noconsistencycheck",), + numpds=dict(argstr="-numpds %d", units="NA",), + order=dict(argstr="-order %d", units="NA",), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + pdthresh=dict(argstr="-pdthresh %f", units="NA",), + pointset=dict(argstr="-pointset %d", units="NA",), + rbfpointset=dict(argstr="-rbfpointset %d", units="NA",), + scheme_file=dict(argstr="%s", extensions=None,), + searchradius=dict(argstr="-searchradius %f", units="NA",), + stdsfrommean=dict(argstr="-stdsfrommean %f", units="NA",), ) inputs = SFPeaks.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SFPeaks_outputs(): - output_map = dict(peaks=dict(extensions=None, ), ) + output_map = dict(peaks=dict(extensions=None,),) outputs = SFPeaks.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_Shredder.py b/nipype/interfaces/camino/tests/test_auto_Shredder.py index 81a78f2feb..c7e82afbad 100644 --- a/nipype/interfaces/camino/tests/test_auto_Shredder.py +++ b/nipype/interfaces/camino/tests/test_auto_Shredder.py @@ -4,46 +4,23 @@ def test_Shredder_inputs(): input_map = dict( - args=dict(argstr='%s', ), - chunksize=dict( - argstr='%d', - position=2, - units='NA', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='< %s', - extensions=None, - mandatory=True, - position=-2, - ), - offset=dict( - argstr='%d', - position=1, - units='NA', - ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), - space=dict( - argstr='%d', - position=3, - units='NA', - ), + args=dict(argstr="%s",), + chunksize=dict(argstr="%d", position=2, units="NA",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="< %s", extensions=None, mandatory=True, position=-2,), + offset=dict(argstr="%d", position=1, units="NA",), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + space=dict(argstr="%d", position=3, units="NA",), ) inputs = Shredder.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Shredder_outputs(): - output_map = dict(shredded=dict(extensions=None, ), ) + output_map = dict(shredded=dict(extensions=None,),) outputs = Shredder.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_Track.py b/nipype/interfaces/camino/tests/test_auto_Track.py index 2b08ecf619..99f42e95c7 100644 --- a/nipype/interfaces/camino/tests/test_auto_Track.py +++ b/nipype/interfaces/camino/tests/test_auto_Track.py @@ -4,83 +4,40 @@ def test_Track_inputs(): input_map = dict( - anisfile=dict( - argstr='-anisfile %s', - extensions=None, - ), - anisthresh=dict(argstr='-anisthresh %f', ), - args=dict(argstr='%s', ), - curveinterval=dict( - argstr='-curveinterval %f', - requires=['curvethresh'], - ), - curvethresh=dict(argstr='-curvethresh %f', ), - data_dims=dict( - argstr='-datadims %s', - units='voxels', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - gzip=dict(argstr='-gzip', ), - in_file=dict( - argstr='-inputfile %s', - extensions=None, - position=1, - ), - inputdatatype=dict(argstr='-inputdatatype %s', ), - inputmodel=dict( - argstr='-inputmodel %s', - usedefault=True, - ), - interpolator=dict(argstr='-interpolator %s', ), - ipthresh=dict(argstr='-ipthresh %f', ), - maxcomponents=dict( - argstr='-maxcomponents %d', - units='NA', - ), - numpds=dict( - argstr='-numpds %d', - units='NA', - ), + anisfile=dict(argstr="-anisfile %s", extensions=None,), + anisthresh=dict(argstr="-anisthresh %f",), + args=dict(argstr="%s",), + curveinterval=dict(argstr="-curveinterval %f", requires=["curvethresh"],), + curvethresh=dict(argstr="-curvethresh %f",), + data_dims=dict(argstr="-datadims %s", units="voxels",), + environ=dict(nohash=True, usedefault=True,), + gzip=dict(argstr="-gzip",), + in_file=dict(argstr="-inputfile %s", extensions=None, position=1,), + inputdatatype=dict(argstr="-inputdatatype %s",), + inputmodel=dict(argstr="-inputmodel %s", usedefault=True,), + interpolator=dict(argstr="-interpolator %s",), + ipthresh=dict(argstr="-ipthresh %f",), + maxcomponents=dict(argstr="-maxcomponents %d", units="NA",), + numpds=dict(argstr="-numpds %d", units="NA",), out_file=dict( - argstr='-outputfile %s', - extensions=None, - genfile=True, - position=-1, - ), - output_root=dict( - argstr='-outputroot %s', - extensions=None, - position=-1, - ), - outputtracts=dict(argstr='-outputtracts %s', ), - seed_file=dict( - argstr='-seedfile %s', - extensions=None, - position=2, - ), - stepsize=dict( - argstr='-stepsize %f', - requires=['tracker'], - ), - tracker=dict( - argstr='-tracker %s', - usedefault=True, - ), - voxel_dims=dict( - argstr='-voxeldims %s', - units='mm', - ), + argstr="-outputfile %s", extensions=None, genfile=True, position=-1, + ), + output_root=dict(argstr="-outputroot %s", extensions=None, position=-1,), + outputtracts=dict(argstr="-outputtracts %s",), + seed_file=dict(argstr="-seedfile %s", extensions=None, position=2,), + stepsize=dict(argstr="-stepsize %f", requires=["tracker"],), + tracker=dict(argstr="-tracker %s", usedefault=True,), + voxel_dims=dict(argstr="-voxeldims %s", units="mm",), ) inputs = Track.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Track_outputs(): - output_map = dict(tracked=dict(extensions=None, ), ) + output_map = dict(tracked=dict(extensions=None,),) outputs = Track.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_TrackBallStick.py b/nipype/interfaces/camino/tests/test_auto_TrackBallStick.py index d5514b301b..ff13fbe241 100644 --- a/nipype/interfaces/camino/tests/test_auto_TrackBallStick.py +++ b/nipype/interfaces/camino/tests/test_auto_TrackBallStick.py @@ -4,83 +4,40 @@ def test_TrackBallStick_inputs(): input_map = dict( - anisfile=dict( - argstr='-anisfile %s', - extensions=None, - ), - anisthresh=dict(argstr='-anisthresh %f', ), - args=dict(argstr='%s', ), - curveinterval=dict( - argstr='-curveinterval %f', - requires=['curvethresh'], - ), - curvethresh=dict(argstr='-curvethresh %f', ), - data_dims=dict( - argstr='-datadims %s', - units='voxels', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - gzip=dict(argstr='-gzip', ), - in_file=dict( - argstr='-inputfile %s', - extensions=None, - position=1, - ), - inputdatatype=dict(argstr='-inputdatatype %s', ), - inputmodel=dict( - argstr='-inputmodel %s', - usedefault=True, - ), - interpolator=dict(argstr='-interpolator %s', ), - ipthresh=dict(argstr='-ipthresh %f', ), - maxcomponents=dict( - argstr='-maxcomponents %d', - units='NA', - ), - numpds=dict( - argstr='-numpds %d', - units='NA', - ), + anisfile=dict(argstr="-anisfile %s", extensions=None,), + anisthresh=dict(argstr="-anisthresh %f",), + args=dict(argstr="%s",), + curveinterval=dict(argstr="-curveinterval %f", requires=["curvethresh"],), + curvethresh=dict(argstr="-curvethresh %f",), + data_dims=dict(argstr="-datadims %s", units="voxels",), + environ=dict(nohash=True, usedefault=True,), + gzip=dict(argstr="-gzip",), + in_file=dict(argstr="-inputfile %s", extensions=None, position=1,), + inputdatatype=dict(argstr="-inputdatatype %s",), + inputmodel=dict(argstr="-inputmodel %s", usedefault=True,), + interpolator=dict(argstr="-interpolator %s",), + ipthresh=dict(argstr="-ipthresh %f",), + maxcomponents=dict(argstr="-maxcomponents %d", units="NA",), + numpds=dict(argstr="-numpds %d", units="NA",), out_file=dict( - argstr='-outputfile %s', - extensions=None, - genfile=True, - position=-1, - ), - output_root=dict( - argstr='-outputroot %s', - extensions=None, - position=-1, - ), - outputtracts=dict(argstr='-outputtracts %s', ), - seed_file=dict( - argstr='-seedfile %s', - extensions=None, - position=2, - ), - stepsize=dict( - argstr='-stepsize %f', - requires=['tracker'], - ), - tracker=dict( - argstr='-tracker %s', - usedefault=True, - ), - voxel_dims=dict( - argstr='-voxeldims %s', - units='mm', - ), + argstr="-outputfile %s", extensions=None, genfile=True, position=-1, + ), + output_root=dict(argstr="-outputroot %s", extensions=None, position=-1,), + outputtracts=dict(argstr="-outputtracts %s",), + seed_file=dict(argstr="-seedfile %s", extensions=None, position=2,), + stepsize=dict(argstr="-stepsize %f", requires=["tracker"],), + tracker=dict(argstr="-tracker %s", usedefault=True,), + voxel_dims=dict(argstr="-voxeldims %s", units="mm",), ) inputs = TrackBallStick.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TrackBallStick_outputs(): - output_map = dict(tracked=dict(extensions=None, ), ) + output_map = dict(tracked=dict(extensions=None,),) outputs = TrackBallStick.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_TrackBayesDirac.py b/nipype/interfaces/camino/tests/test_auto_TrackBayesDirac.py index d2b4b27dd3..7f174486d5 100644 --- a/nipype/interfaces/camino/tests/test_auto_TrackBayesDirac.py +++ b/nipype/interfaces/camino/tests/test_auto_TrackBayesDirac.py @@ -4,102 +4,49 @@ def test_TrackBayesDirac_inputs(): input_map = dict( - anisfile=dict( - argstr='-anisfile %s', - extensions=None, - ), - anisthresh=dict(argstr='-anisthresh %f', ), - args=dict(argstr='%s', ), - curveinterval=dict( - argstr='-curveinterval %f', - requires=['curvethresh'], - ), - curvepriorg=dict(argstr='-curvepriorg %G', ), - curvepriork=dict(argstr='-curvepriork %G', ), - curvethresh=dict(argstr='-curvethresh %f', ), - data_dims=dict( - argstr='-datadims %s', - units='voxels', - ), - datamodel=dict(argstr='-datamodel %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - extpriordatatype=dict(argstr='-extpriordatatype %s', ), - extpriorfile=dict( - argstr='-extpriorfile %s', - extensions=None, - ), - gzip=dict(argstr='-gzip', ), - in_file=dict( - argstr='-inputfile %s', - extensions=None, - position=1, - ), - inputdatatype=dict(argstr='-inputdatatype %s', ), - inputmodel=dict( - argstr='-inputmodel %s', - usedefault=True, - ), - interpolator=dict(argstr='-interpolator %s', ), - ipthresh=dict(argstr='-ipthresh %f', ), - iterations=dict( - argstr='-iterations %d', - units='NA', - ), - maxcomponents=dict( - argstr='-maxcomponents %d', - units='NA', - ), - numpds=dict( - argstr='-numpds %d', - units='NA', - ), + anisfile=dict(argstr="-anisfile %s", extensions=None,), + anisthresh=dict(argstr="-anisthresh %f",), + args=dict(argstr="%s",), + curveinterval=dict(argstr="-curveinterval %f", requires=["curvethresh"],), + curvepriorg=dict(argstr="-curvepriorg %G",), + curvepriork=dict(argstr="-curvepriork %G",), + curvethresh=dict(argstr="-curvethresh %f",), + data_dims=dict(argstr="-datadims %s", units="voxels",), + datamodel=dict(argstr="-datamodel %s",), + environ=dict(nohash=True, usedefault=True,), + extpriordatatype=dict(argstr="-extpriordatatype %s",), + extpriorfile=dict(argstr="-extpriorfile %s", extensions=None,), + gzip=dict(argstr="-gzip",), + in_file=dict(argstr="-inputfile %s", extensions=None, position=1,), + inputdatatype=dict(argstr="-inputdatatype %s",), + inputmodel=dict(argstr="-inputmodel %s", usedefault=True,), + interpolator=dict(argstr="-interpolator %s",), + ipthresh=dict(argstr="-ipthresh %f",), + iterations=dict(argstr="-iterations %d", units="NA",), + maxcomponents=dict(argstr="-maxcomponents %d", units="NA",), + numpds=dict(argstr="-numpds %d", units="NA",), out_file=dict( - argstr='-outputfile %s', - extensions=None, - genfile=True, - position=-1, - ), - output_root=dict( - argstr='-outputroot %s', - extensions=None, - position=-1, - ), - outputtracts=dict(argstr='-outputtracts %s', ), - pdf=dict(argstr='-pdf %s', ), - pointset=dict(argstr='-pointset %s', ), - scheme_file=dict( - argstr='-schemefile %s', - extensions=None, - mandatory=True, - ), - seed_file=dict( - argstr='-seedfile %s', - extensions=None, - position=2, - ), - stepsize=dict( - argstr='-stepsize %f', - requires=['tracker'], - ), - tracker=dict( - argstr='-tracker %s', - usedefault=True, - ), - voxel_dims=dict( - argstr='-voxeldims %s', - units='mm', - ), + argstr="-outputfile %s", extensions=None, genfile=True, position=-1, + ), + output_root=dict(argstr="-outputroot %s", extensions=None, position=-1,), + outputtracts=dict(argstr="-outputtracts %s",), + pdf=dict(argstr="-pdf %s",), + pointset=dict(argstr="-pointset %s",), + scheme_file=dict(argstr="-schemefile %s", extensions=None, mandatory=True,), + seed_file=dict(argstr="-seedfile %s", extensions=None, position=2,), + stepsize=dict(argstr="-stepsize %f", requires=["tracker"],), + tracker=dict(argstr="-tracker %s", usedefault=True,), + voxel_dims=dict(argstr="-voxeldims %s", units="mm",), ) inputs = TrackBayesDirac.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TrackBayesDirac_outputs(): - output_map = dict(tracked=dict(extensions=None, ), ) + output_map = dict(tracked=dict(extensions=None,),) outputs = TrackBayesDirac.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_TrackBedpostxDeter.py b/nipype/interfaces/camino/tests/test_auto_TrackBedpostxDeter.py index f137fda109..91489a1d84 100644 --- a/nipype/interfaces/camino/tests/test_auto_TrackBedpostxDeter.py +++ b/nipype/interfaces/camino/tests/test_auto_TrackBedpostxDeter.py @@ -4,91 +4,42 @@ def test_TrackBedpostxDeter_inputs(): input_map = dict( - anisfile=dict( - argstr='-anisfile %s', - extensions=None, - ), - anisthresh=dict(argstr='-anisthresh %f', ), - args=dict(argstr='%s', ), - bedpostxdir=dict( - argstr='-bedpostxdir %s', - mandatory=True, - ), - curveinterval=dict( - argstr='-curveinterval %f', - requires=['curvethresh'], - ), - curvethresh=dict(argstr='-curvethresh %f', ), - data_dims=dict( - argstr='-datadims %s', - units='voxels', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - gzip=dict(argstr='-gzip', ), - in_file=dict( - argstr='-inputfile %s', - extensions=None, - position=1, - ), - inputdatatype=dict(argstr='-inputdatatype %s', ), - inputmodel=dict( - argstr='-inputmodel %s', - usedefault=True, - ), - interpolator=dict(argstr='-interpolator %s', ), - ipthresh=dict(argstr='-ipthresh %f', ), - maxcomponents=dict( - argstr='-maxcomponents %d', - units='NA', - ), - min_vol_frac=dict( - argstr='-bedpostxminf %d', - units='NA', - ), - numpds=dict( - argstr='-numpds %d', - units='NA', - ), + anisfile=dict(argstr="-anisfile %s", extensions=None,), + anisthresh=dict(argstr="-anisthresh %f",), + args=dict(argstr="%s",), + bedpostxdir=dict(argstr="-bedpostxdir %s", mandatory=True,), + curveinterval=dict(argstr="-curveinterval %f", requires=["curvethresh"],), + curvethresh=dict(argstr="-curvethresh %f",), + data_dims=dict(argstr="-datadims %s", units="voxels",), + environ=dict(nohash=True, usedefault=True,), + gzip=dict(argstr="-gzip",), + in_file=dict(argstr="-inputfile %s", extensions=None, position=1,), + inputdatatype=dict(argstr="-inputdatatype %s",), + inputmodel=dict(argstr="-inputmodel %s", usedefault=True,), + interpolator=dict(argstr="-interpolator %s",), + ipthresh=dict(argstr="-ipthresh %f",), + maxcomponents=dict(argstr="-maxcomponents %d", units="NA",), + min_vol_frac=dict(argstr="-bedpostxminf %d", units="NA",), + numpds=dict(argstr="-numpds %d", units="NA",), out_file=dict( - argstr='-outputfile %s', - extensions=None, - genfile=True, - position=-1, - ), - output_root=dict( - argstr='-outputroot %s', - extensions=None, - position=-1, - ), - outputtracts=dict(argstr='-outputtracts %s', ), - seed_file=dict( - argstr='-seedfile %s', - extensions=None, - position=2, - ), - stepsize=dict( - argstr='-stepsize %f', - requires=['tracker'], - ), - tracker=dict( - argstr='-tracker %s', - usedefault=True, - ), - voxel_dims=dict( - argstr='-voxeldims %s', - units='mm', - ), + argstr="-outputfile %s", extensions=None, genfile=True, position=-1, + ), + output_root=dict(argstr="-outputroot %s", extensions=None, position=-1,), + outputtracts=dict(argstr="-outputtracts %s",), + seed_file=dict(argstr="-seedfile %s", extensions=None, position=2,), + stepsize=dict(argstr="-stepsize %f", requires=["tracker"],), + tracker=dict(argstr="-tracker %s", usedefault=True,), + voxel_dims=dict(argstr="-voxeldims %s", units="mm",), ) inputs = TrackBedpostxDeter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TrackBedpostxDeter_outputs(): - output_map = dict(tracked=dict(extensions=None, ), ) + output_map = dict(tracked=dict(extensions=None,),) outputs = TrackBedpostxDeter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_TrackBedpostxProba.py b/nipype/interfaces/camino/tests/test_auto_TrackBedpostxProba.py index f23fb143d4..92f02879da 100644 --- a/nipype/interfaces/camino/tests/test_auto_TrackBedpostxProba.py +++ b/nipype/interfaces/camino/tests/test_auto_TrackBedpostxProba.py @@ -4,95 +4,43 @@ def test_TrackBedpostxProba_inputs(): input_map = dict( - anisfile=dict( - argstr='-anisfile %s', - extensions=None, - ), - anisthresh=dict(argstr='-anisthresh %f', ), - args=dict(argstr='%s', ), - bedpostxdir=dict( - argstr='-bedpostxdir %s', - mandatory=True, - ), - curveinterval=dict( - argstr='-curveinterval %f', - requires=['curvethresh'], - ), - curvethresh=dict(argstr='-curvethresh %f', ), - data_dims=dict( - argstr='-datadims %s', - units='voxels', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - gzip=dict(argstr='-gzip', ), - in_file=dict( - argstr='-inputfile %s', - extensions=None, - position=1, - ), - inputdatatype=dict(argstr='-inputdatatype %s', ), - inputmodel=dict( - argstr='-inputmodel %s', - usedefault=True, - ), - interpolator=dict(argstr='-interpolator %s', ), - ipthresh=dict(argstr='-ipthresh %f', ), - iterations=dict( - argstr='-iterations %d', - units='NA', - ), - maxcomponents=dict( - argstr='-maxcomponents %d', - units='NA', - ), - min_vol_frac=dict( - argstr='-bedpostxminf %d', - units='NA', - ), - numpds=dict( - argstr='-numpds %d', - units='NA', - ), + anisfile=dict(argstr="-anisfile %s", extensions=None,), + anisthresh=dict(argstr="-anisthresh %f",), + args=dict(argstr="%s",), + bedpostxdir=dict(argstr="-bedpostxdir %s", mandatory=True,), + curveinterval=dict(argstr="-curveinterval %f", requires=["curvethresh"],), + curvethresh=dict(argstr="-curvethresh %f",), + data_dims=dict(argstr="-datadims %s", units="voxels",), + environ=dict(nohash=True, usedefault=True,), + gzip=dict(argstr="-gzip",), + in_file=dict(argstr="-inputfile %s", extensions=None, position=1,), + inputdatatype=dict(argstr="-inputdatatype %s",), + inputmodel=dict(argstr="-inputmodel %s", usedefault=True,), + interpolator=dict(argstr="-interpolator %s",), + ipthresh=dict(argstr="-ipthresh %f",), + iterations=dict(argstr="-iterations %d", units="NA",), + maxcomponents=dict(argstr="-maxcomponents %d", units="NA",), + min_vol_frac=dict(argstr="-bedpostxminf %d", units="NA",), + numpds=dict(argstr="-numpds %d", units="NA",), out_file=dict( - argstr='-outputfile %s', - extensions=None, - genfile=True, - position=-1, - ), - output_root=dict( - argstr='-outputroot %s', - extensions=None, - position=-1, - ), - outputtracts=dict(argstr='-outputtracts %s', ), - seed_file=dict( - argstr='-seedfile %s', - extensions=None, - position=2, - ), - stepsize=dict( - argstr='-stepsize %f', - requires=['tracker'], - ), - tracker=dict( - argstr='-tracker %s', - usedefault=True, - ), - voxel_dims=dict( - argstr='-voxeldims %s', - units='mm', - ), + argstr="-outputfile %s", extensions=None, genfile=True, position=-1, + ), + output_root=dict(argstr="-outputroot %s", extensions=None, position=-1,), + outputtracts=dict(argstr="-outputtracts %s",), + seed_file=dict(argstr="-seedfile %s", extensions=None, position=2,), + stepsize=dict(argstr="-stepsize %f", requires=["tracker"],), + tracker=dict(argstr="-tracker %s", usedefault=True,), + voxel_dims=dict(argstr="-voxeldims %s", units="mm",), ) inputs = TrackBedpostxProba.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TrackBedpostxProba_outputs(): - output_map = dict(tracked=dict(extensions=None, ), ) + output_map = dict(tracked=dict(extensions=None,),) outputs = TrackBedpostxProba.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_TrackBootstrap.py b/nipype/interfaces/camino/tests/test_auto_TrackBootstrap.py index 589d7afe9c..8cd35bab8a 100644 --- a/nipype/interfaces/camino/tests/test_auto_TrackBootstrap.py +++ b/nipype/interfaces/camino/tests/test_auto_TrackBootstrap.py @@ -4,101 +4,45 @@ def test_TrackBootstrap_inputs(): input_map = dict( - anisfile=dict( - argstr='-anisfile %s', - extensions=None, - ), - anisthresh=dict(argstr='-anisthresh %f', ), - args=dict(argstr='%s', ), - bgmask=dict( - argstr='-bgmask %s', - extensions=None, - ), - bsdatafiles=dict( - argstr='-bsdatafile %s', - mandatory=True, - ), - curveinterval=dict( - argstr='-curveinterval %f', - requires=['curvethresh'], - ), - curvethresh=dict(argstr='-curvethresh %f', ), - data_dims=dict( - argstr='-datadims %s', - units='voxels', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - gzip=dict(argstr='-gzip', ), - in_file=dict( - argstr='-inputfile %s', - extensions=None, - position=1, - ), - inputdatatype=dict(argstr='-inputdatatype %s', ), - inputmodel=dict( - argstr='-inputmodel %s', - usedefault=True, - ), - interpolator=dict(argstr='-interpolator %s', ), - inversion=dict(argstr='-inversion %s', ), - ipthresh=dict(argstr='-ipthresh %f', ), - iterations=dict( - argstr='-iterations %d', - units='NA', - ), - maxcomponents=dict( - argstr='-maxcomponents %d', - units='NA', - ), - numpds=dict( - argstr='-numpds %d', - units='NA', - ), + anisfile=dict(argstr="-anisfile %s", extensions=None,), + anisthresh=dict(argstr="-anisthresh %f",), + args=dict(argstr="%s",), + bgmask=dict(argstr="-bgmask %s", extensions=None,), + bsdatafiles=dict(argstr="-bsdatafile %s", mandatory=True,), + curveinterval=dict(argstr="-curveinterval %f", requires=["curvethresh"],), + curvethresh=dict(argstr="-curvethresh %f",), + data_dims=dict(argstr="-datadims %s", units="voxels",), + environ=dict(nohash=True, usedefault=True,), + gzip=dict(argstr="-gzip",), + in_file=dict(argstr="-inputfile %s", extensions=None, position=1,), + inputdatatype=dict(argstr="-inputdatatype %s",), + inputmodel=dict(argstr="-inputmodel %s", usedefault=True,), + interpolator=dict(argstr="-interpolator %s",), + inversion=dict(argstr="-inversion %s",), + ipthresh=dict(argstr="-ipthresh %f",), + iterations=dict(argstr="-iterations %d", units="NA",), + maxcomponents=dict(argstr="-maxcomponents %d", units="NA",), + numpds=dict(argstr="-numpds %d", units="NA",), out_file=dict( - argstr='-outputfile %s', - extensions=None, - genfile=True, - position=-1, - ), - output_root=dict( - argstr='-outputroot %s', - extensions=None, - position=-1, - ), - outputtracts=dict(argstr='-outputtracts %s', ), - scheme_file=dict( - argstr='-schemefile %s', - extensions=None, - mandatory=True, - ), - seed_file=dict( - argstr='-seedfile %s', - extensions=None, - position=2, - ), - stepsize=dict( - argstr='-stepsize %f', - requires=['tracker'], - ), - tracker=dict( - argstr='-tracker %s', - usedefault=True, - ), - voxel_dims=dict( - argstr='-voxeldims %s', - units='mm', - ), + argstr="-outputfile %s", extensions=None, genfile=True, position=-1, + ), + output_root=dict(argstr="-outputroot %s", extensions=None, position=-1,), + outputtracts=dict(argstr="-outputtracts %s",), + scheme_file=dict(argstr="-schemefile %s", extensions=None, mandatory=True,), + seed_file=dict(argstr="-seedfile %s", extensions=None, position=2,), + stepsize=dict(argstr="-stepsize %f", requires=["tracker"],), + tracker=dict(argstr="-tracker %s", usedefault=True,), + voxel_dims=dict(argstr="-voxeldims %s", units="mm",), ) inputs = TrackBootstrap.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TrackBootstrap_outputs(): - output_map = dict(tracked=dict(extensions=None, ), ) + output_map = dict(tracked=dict(extensions=None,),) outputs = TrackBootstrap.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_TrackDT.py b/nipype/interfaces/camino/tests/test_auto_TrackDT.py index 4b95e8f53f..736dc5fc5f 100644 --- a/nipype/interfaces/camino/tests/test_auto_TrackDT.py +++ b/nipype/interfaces/camino/tests/test_auto_TrackDT.py @@ -4,83 +4,40 @@ def test_TrackDT_inputs(): input_map = dict( - anisfile=dict( - argstr='-anisfile %s', - extensions=None, - ), - anisthresh=dict(argstr='-anisthresh %f', ), - args=dict(argstr='%s', ), - curveinterval=dict( - argstr='-curveinterval %f', - requires=['curvethresh'], - ), - curvethresh=dict(argstr='-curvethresh %f', ), - data_dims=dict( - argstr='-datadims %s', - units='voxels', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - gzip=dict(argstr='-gzip', ), - in_file=dict( - argstr='-inputfile %s', - extensions=None, - position=1, - ), - inputdatatype=dict(argstr='-inputdatatype %s', ), - inputmodel=dict( - argstr='-inputmodel %s', - usedefault=True, - ), - interpolator=dict(argstr='-interpolator %s', ), - ipthresh=dict(argstr='-ipthresh %f', ), - maxcomponents=dict( - argstr='-maxcomponents %d', - units='NA', - ), - numpds=dict( - argstr='-numpds %d', - units='NA', - ), + anisfile=dict(argstr="-anisfile %s", extensions=None,), + anisthresh=dict(argstr="-anisthresh %f",), + args=dict(argstr="%s",), + curveinterval=dict(argstr="-curveinterval %f", requires=["curvethresh"],), + curvethresh=dict(argstr="-curvethresh %f",), + data_dims=dict(argstr="-datadims %s", units="voxels",), + environ=dict(nohash=True, usedefault=True,), + gzip=dict(argstr="-gzip",), + in_file=dict(argstr="-inputfile %s", extensions=None, position=1,), + inputdatatype=dict(argstr="-inputdatatype %s",), + inputmodel=dict(argstr="-inputmodel %s", usedefault=True,), + interpolator=dict(argstr="-interpolator %s",), + ipthresh=dict(argstr="-ipthresh %f",), + maxcomponents=dict(argstr="-maxcomponents %d", units="NA",), + numpds=dict(argstr="-numpds %d", units="NA",), out_file=dict( - argstr='-outputfile %s', - extensions=None, - genfile=True, - position=-1, - ), - output_root=dict( - argstr='-outputroot %s', - extensions=None, - position=-1, - ), - outputtracts=dict(argstr='-outputtracts %s', ), - seed_file=dict( - argstr='-seedfile %s', - extensions=None, - position=2, - ), - stepsize=dict( - argstr='-stepsize %f', - requires=['tracker'], - ), - tracker=dict( - argstr='-tracker %s', - usedefault=True, - ), - voxel_dims=dict( - argstr='-voxeldims %s', - units='mm', - ), + argstr="-outputfile %s", extensions=None, genfile=True, position=-1, + ), + output_root=dict(argstr="-outputroot %s", extensions=None, position=-1,), + outputtracts=dict(argstr="-outputtracts %s",), + seed_file=dict(argstr="-seedfile %s", extensions=None, position=2,), + stepsize=dict(argstr="-stepsize %f", requires=["tracker"],), + tracker=dict(argstr="-tracker %s", usedefault=True,), + voxel_dims=dict(argstr="-voxeldims %s", units="mm",), ) inputs = TrackDT.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TrackDT_outputs(): - output_map = dict(tracked=dict(extensions=None, ), ) + output_map = dict(tracked=dict(extensions=None,),) outputs = TrackDT.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_TrackPICo.py b/nipype/interfaces/camino/tests/test_auto_TrackPICo.py index b3c69bc0b7..37d4a95179 100644 --- a/nipype/interfaces/camino/tests/test_auto_TrackPICo.py +++ b/nipype/interfaces/camino/tests/test_auto_TrackPICo.py @@ -4,88 +4,42 @@ def test_TrackPICo_inputs(): input_map = dict( - anisfile=dict( - argstr='-anisfile %s', - extensions=None, - ), - anisthresh=dict(argstr='-anisthresh %f', ), - args=dict(argstr='%s', ), - curveinterval=dict( - argstr='-curveinterval %f', - requires=['curvethresh'], - ), - curvethresh=dict(argstr='-curvethresh %f', ), - data_dims=dict( - argstr='-datadims %s', - units='voxels', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - gzip=dict(argstr='-gzip', ), - in_file=dict( - argstr='-inputfile %s', - extensions=None, - position=1, - ), - inputdatatype=dict(argstr='-inputdatatype %s', ), - inputmodel=dict( - argstr='-inputmodel %s', - usedefault=True, - ), - interpolator=dict(argstr='-interpolator %s', ), - ipthresh=dict(argstr='-ipthresh %f', ), - iterations=dict( - argstr='-iterations %d', - units='NA', - ), - maxcomponents=dict( - argstr='-maxcomponents %d', - units='NA', - ), - numpds=dict( - argstr='-numpds %d', - units='NA', - ), + anisfile=dict(argstr="-anisfile %s", extensions=None,), + anisthresh=dict(argstr="-anisthresh %f",), + args=dict(argstr="%s",), + curveinterval=dict(argstr="-curveinterval %f", requires=["curvethresh"],), + curvethresh=dict(argstr="-curvethresh %f",), + data_dims=dict(argstr="-datadims %s", units="voxels",), + environ=dict(nohash=True, usedefault=True,), + gzip=dict(argstr="-gzip",), + in_file=dict(argstr="-inputfile %s", extensions=None, position=1,), + inputdatatype=dict(argstr="-inputdatatype %s",), + inputmodel=dict(argstr="-inputmodel %s", usedefault=True,), + interpolator=dict(argstr="-interpolator %s",), + ipthresh=dict(argstr="-ipthresh %f",), + iterations=dict(argstr="-iterations %d", units="NA",), + maxcomponents=dict(argstr="-maxcomponents %d", units="NA",), + numpds=dict(argstr="-numpds %d", units="NA",), out_file=dict( - argstr='-outputfile %s', - extensions=None, - genfile=True, - position=-1, - ), - output_root=dict( - argstr='-outputroot %s', - extensions=None, - position=-1, - ), - outputtracts=dict(argstr='-outputtracts %s', ), - pdf=dict(argstr='-pdf %s', ), - seed_file=dict( - argstr='-seedfile %s', - extensions=None, - position=2, - ), - stepsize=dict( - argstr='-stepsize %f', - requires=['tracker'], - ), - tracker=dict( - argstr='-tracker %s', - usedefault=True, - ), - voxel_dims=dict( - argstr='-voxeldims %s', - units='mm', - ), + argstr="-outputfile %s", extensions=None, genfile=True, position=-1, + ), + output_root=dict(argstr="-outputroot %s", extensions=None, position=-1,), + outputtracts=dict(argstr="-outputtracts %s",), + pdf=dict(argstr="-pdf %s",), + seed_file=dict(argstr="-seedfile %s", extensions=None, position=2,), + stepsize=dict(argstr="-stepsize %f", requires=["tracker"],), + tracker=dict(argstr="-tracker %s", usedefault=True,), + voxel_dims=dict(argstr="-voxeldims %s", units="mm",), ) inputs = TrackPICo.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TrackPICo_outputs(): - output_map = dict(tracked=dict(extensions=None, ), ) + output_map = dict(tracked=dict(extensions=None,),) outputs = TrackPICo.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_TractShredder.py b/nipype/interfaces/camino/tests/test_auto_TractShredder.py index 150333d0eb..b8a95c9569 100644 --- a/nipype/interfaces/camino/tests/test_auto_TractShredder.py +++ b/nipype/interfaces/camino/tests/test_auto_TractShredder.py @@ -4,46 +4,23 @@ def test_TractShredder_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bunchsize=dict( - argstr='%d', - position=2, - units='NA', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='< %s', - extensions=None, - mandatory=True, - position=-2, - ), - offset=dict( - argstr='%d', - position=1, - units='NA', - ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), - space=dict( - argstr='%d', - position=3, - units='NA', - ), + args=dict(argstr="%s",), + bunchsize=dict(argstr="%d", position=2, units="NA",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="< %s", extensions=None, mandatory=True, position=-2,), + offset=dict(argstr="%d", position=1, units="NA",), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + space=dict(argstr="%d", position=3, units="NA",), ) inputs = TractShredder.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TractShredder_outputs(): - output_map = dict(shredded=dict(extensions=None, ), ) + output_map = dict(shredded=dict(extensions=None,),) outputs = TractShredder.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/tests/test_auto_VtkStreamlines.py b/nipype/interfaces/camino/tests/test_auto_VtkStreamlines.py index 251c82009d..cee10a70db 100644 --- a/nipype/interfaces/camino/tests/test_auto_VtkStreamlines.py +++ b/nipype/interfaces/camino/tests/test_auto_VtkStreamlines.py @@ -4,58 +4,28 @@ def test_VtkStreamlines_inputs(): input_map = dict( - args=dict(argstr='%s', ), - colourorient=dict(argstr='-colourorient', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr=' < %s', - extensions=None, - mandatory=True, - position=-2, - ), - inputmodel=dict( - argstr='-inputmodel %s', - usedefault=True, - ), - interpolate=dict(argstr='-interpolate', ), - interpolatescalars=dict(argstr='-interpolatescalars', ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), - scalar_file=dict( - argstr='-scalarfile %s', - extensions=None, - position=3, - ), - seed_file=dict( - argstr='-seedfile %s', - extensions=None, - position=1, - ), - target_file=dict( - argstr='-targetfile %s', - extensions=None, - position=2, - ), - voxeldims=dict( - argstr='-voxeldims %s', - position=4, - units='mm', - ), + args=dict(argstr="%s",), + colourorient=dict(argstr="-colourorient",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr=" < %s", extensions=None, mandatory=True, position=-2,), + inputmodel=dict(argstr="-inputmodel %s", usedefault=True,), + interpolate=dict(argstr="-interpolate",), + interpolatescalars=dict(argstr="-interpolatescalars",), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), + scalar_file=dict(argstr="-scalarfile %s", extensions=None, position=3,), + seed_file=dict(argstr="-seedfile %s", extensions=None, position=1,), + target_file=dict(argstr="-targetfile %s", extensions=None, position=2,), + voxeldims=dict(argstr="-voxeldims %s", position=4, units="mm",), ) inputs = VtkStreamlines.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_VtkStreamlines_outputs(): - output_map = dict(vtk=dict(extensions=None, ), ) + output_map = dict(vtk=dict(extensions=None,),) outputs = VtkStreamlines.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino/utils.py b/nipype/interfaces/camino/utils.py index 6cfba21653..201e4e05d0 100644 --- a/nipype/interfaces/camino/utils.py +++ b/nipype/interfaces/camino/utils.py @@ -1,20 +1,29 @@ # -*- coding: utf-8 -*- import os -from ..base import (traits, TraitedSpec, File, CommandLine, - CommandLineInputSpec, InputMultiPath) +from ..base import ( + traits, + TraitedSpec, + File, + CommandLine, + CommandLineInputSpec, + InputMultiPath, +) from ...utils.filemanip import split_filename class ImageStatsInputSpec(CommandLineInputSpec): in_files = InputMultiPath( File(exists=True), - argstr='-images %s', + argstr="-images %s", mandatory=True, position=-1, - desc=('List of images to process. They must ' - 'be in the same space and have the same ' - 'dimensions.')) + desc=( + "List of images to process. They must " + "be in the same space and have the same " + "dimensions." + ), + ) stat = traits.Enum( "min", "max", @@ -23,10 +32,11 @@ class ImageStatsInputSpec(CommandLineInputSpec): "sum", "std", "var", - argstr='-stat %s', - units='NA', + argstr="-stat %s", + units="NA", mandatory=True, - desc="The statistic to compute.") + desc="The statistic to compute.", + ) out_type = traits.Enum( "float", @@ -35,21 +45,24 @@ class ImageStatsInputSpec(CommandLineInputSpec): "int", "long", "double", - argstr='-outputdatatype %s', + argstr="-outputdatatype %s", usedefault=True, - desc=('A Camino data type string, default is "float". ' - 'Type must be signed.')) + desc=('A Camino data type string, default is "float". ' "Type must be signed."), + ) output_root = File( - argstr='-outputroot %s', + argstr="-outputroot %s", mandatory=True, - desc=('Filename root prepended onto the names of the output ' - ' files. The extension will be determined from the input.')) + desc=( + "Filename root prepended onto the names of the output " + " files. The extension will be determined from the input." + ), + ) class ImageStatsOutputSpec(TraitedSpec): out_file = File( - exists=True, - desc='Path of the file computed with the statistic chosen') + exists=True, desc="Path of the file computed with the statistic chosen" + ) class ImageStats(CommandLine): @@ -67,13 +80,14 @@ class ImageStats(CommandLine): >>> imstats.inputs.stat = 'max' >>> imstats.run() # doctest: +SKIP """ - _cmd = 'imagestats' + + _cmd = "imagestats" input_spec = ImageStatsInputSpec output_spec = ImageStatsOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self._gen_outfilename()) + outputs["out_file"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): diff --git a/nipype/interfaces/camino2trackvis/convert.py b/nipype/interfaces/camino2trackvis/convert.py index 3f5664b975..f4e7e7dfd1 100644 --- a/nipype/interfaces/camino2trackvis/convert.py +++ b/nipype/interfaces/camino2trackvis/convert.py @@ -12,70 +12,74 @@ class Camino2TrackvisInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='-i %s', + argstr="-i %s", mandatory=True, position=1, - desc='The input .Bfloat (camino) file.') + desc="The input .Bfloat (camino) file.", + ) out_file = File( - argstr='-o %s', + argstr="-o %s", genfile=True, position=2, - desc='The filename to which to write the .trk (trackvis) file.') + desc="The filename to which to write the .trk (trackvis) file.", + ) min_length = traits.Float( - argstr='-l %d', + argstr="-l %d", position=3, - units='mm', - desc='The minimum length of tracts to output') + units="mm", + desc="The minimum length of tracts to output", + ) data_dims = traits.List( traits.Int, - argstr='-d %s', - sep=',', + argstr="-d %s", + sep=",", mandatory=True, position=4, minlen=3, maxlen=3, - desc= - 'Three comma-separated integers giving the number of voxels along each dimension of the source scans.' + desc="Three comma-separated integers giving the number of voxels along each dimension of the source scans.", ) voxel_dims = traits.List( traits.Float, - argstr='-x %s', - sep=',', + argstr="-x %s", + sep=",", mandatory=True, position=5, minlen=3, maxlen=3, - desc= - 'Three comma-separated numbers giving the size of each voxel in mm.') + desc="Three comma-separated numbers giving the size of each voxel in mm.", + ) # Change to enum with all combinations? i.e. LAS, LPI, RAS, etc.. voxel_order = File( - argstr='--voxel-order %s', + argstr="--voxel-order %s", mandatory=True, position=6, - desc='Set the order in which various directions were stored.\ + desc="Set the order in which various directions were stored.\ Specify with three letters consisting of one each \ from the pairs LR, AP, and SI. These stand for Left-Right, \ Anterior-Posterior, and Superior-Inferior. \ Whichever is specified in each position will \ be the direction of increasing order. \ - Read coordinate system from a NIfTI file.') + Read coordinate system from a NIfTI file.", + ) nifti_file = File( - argstr='--nifti %s', + argstr="--nifti %s", exists=True, position=7, - desc='Read coordinate system from a NIfTI file.') + desc="Read coordinate system from a NIfTI file.", + ) class Camino2TrackvisOutputSpec(TraitedSpec): trackvis = File( - exists=True, - desc='The filename to which to write the .trk (trackvis) file.') + exists=True, desc="The filename to which to write the .trk (trackvis) file." + ) class Camino2Trackvis(CommandLine): @@ -97,24 +101,24 @@ class Camino2Trackvis(CommandLine): >>> c2t.run() # doctest: +SKIP """ - _cmd = 'camino_to_trackvis' + _cmd = "camino_to_trackvis" input_spec = Camino2TrackvisInputSpec output_spec = Camino2TrackvisOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['trackvis'] = os.path.abspath(self._gen_outfilename()) + outputs["trackvis"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '.trk' + return name + ".trk" class Trackvis2CaminoInputSpec(CommandLineInputSpec): @@ -134,46 +138,49 @@ class Trackvis2CaminoInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='-i %s', + argstr="-i %s", mandatory=True, position=1, - desc='The input .trk (trackvis) file.') + desc="The input .trk (trackvis) file.", + ) out_file = File( - argstr='-o %s', + argstr="-o %s", genfile=True, position=2, - desc='The filename to which to write the .Bfloat (camino).') + desc="The filename to which to write the .Bfloat (camino).", + ) append_file = File( exists=True, - argstr='-a %s', + argstr="-a %s", position=2, - desc='A file to which the append the .Bfloat data. ') + desc="A file to which the append the .Bfloat data. ", + ) class Trackvis2CaminoOutputSpec(TraitedSpec): camino = File( - exists=True, - desc='The filename to which to write the .Bfloat (camino).') + exists=True, desc="The filename to which to write the .Bfloat (camino)." + ) class Trackvis2Camino(CommandLine): - _cmd = 'trackvis_to_camino' + _cmd = "trackvis_to_camino" input_spec = Trackvis2CaminoInputSpec output_spec = Trackvis2CaminoOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['camino'] = os.path.abspath(self._gen_outfilename()) + outputs["camino"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '.Bfloat' + return name + ".Bfloat" diff --git a/nipype/interfaces/camino2trackvis/tests/test_auto_Camino2Trackvis.py b/nipype/interfaces/camino2trackvis/tests/test_auto_Camino2Trackvis.py index a78832f1a1..3cd618eb10 100644 --- a/nipype/interfaces/camino2trackvis/tests/test_auto_Camino2Trackvis.py +++ b/nipype/interfaces/camino2trackvis/tests/test_auto_Camino2Trackvis.py @@ -4,50 +4,16 @@ def test_Camino2Trackvis_inputs(): input_map = dict( - args=dict(argstr='%s', ), - data_dims=dict( - argstr='-d %s', - mandatory=True, - position=4, - sep=',', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - position=1, - ), - min_length=dict( - argstr='-l %d', - position=3, - units='mm', - ), - nifti_file=dict( - argstr='--nifti %s', - extensions=None, - position=7, - ), - out_file=dict( - argstr='-o %s', - extensions=None, - genfile=True, - position=2, - ), - voxel_dims=dict( - argstr='-x %s', - mandatory=True, - position=5, - sep=',', - ), + args=dict(argstr="%s",), + data_dims=dict(argstr="-d %s", mandatory=True, position=4, sep=",",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-i %s", extensions=None, mandatory=True, position=1,), + min_length=dict(argstr="-l %d", position=3, units="mm",), + nifti_file=dict(argstr="--nifti %s", extensions=None, position=7,), + out_file=dict(argstr="-o %s", extensions=None, genfile=True, position=2,), + voxel_dims=dict(argstr="-x %s", mandatory=True, position=5, sep=",",), voxel_order=dict( - argstr='--voxel-order %s', - extensions=None, - mandatory=True, - position=6, + argstr="--voxel-order %s", extensions=None, mandatory=True, position=6, ), ) inputs = Camino2Trackvis.input_spec() @@ -55,8 +21,10 @@ def test_Camino2Trackvis_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Camino2Trackvis_outputs(): - output_map = dict(trackvis=dict(extensions=None, ), ) + output_map = dict(trackvis=dict(extensions=None,),) outputs = Camino2Trackvis.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/camino2trackvis/tests/test_auto_Trackvis2Camino.py b/nipype/interfaces/camino2trackvis/tests/test_auto_Trackvis2Camino.py index 3d733966b4..b2869c08e3 100644 --- a/nipype/interfaces/camino2trackvis/tests/test_auto_Trackvis2Camino.py +++ b/nipype/interfaces/camino2trackvis/tests/test_auto_Trackvis2Camino.py @@ -4,36 +4,21 @@ def test_Trackvis2Camino_inputs(): input_map = dict( - append_file=dict( - argstr='-a %s', - extensions=None, - position=2, - ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - position=1, - ), - out_file=dict( - argstr='-o %s', - extensions=None, - genfile=True, - position=2, - ), + append_file=dict(argstr="-a %s", extensions=None, position=2,), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-i %s", extensions=None, mandatory=True, position=1,), + out_file=dict(argstr="-o %s", extensions=None, genfile=True, position=2,), ) inputs = Trackvis2Camino.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Trackvis2Camino_outputs(): - output_map = dict(camino=dict(extensions=None, ), ) + output_map = dict(camino=dict(extensions=None,),) outputs = Trackvis2Camino.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/cmtk/base.py b/nipype/interfaces/cmtk/base.py index 4aedd56bdb..17d3070504 100644 --- a/nipype/interfaces/cmtk/base.py +++ b/nipype/interfaces/cmtk/base.py @@ -8,7 +8,7 @@ class CFFBaseInterface(LibraryBaseInterface): - _pkg = 'cfflib' + _pkg = "cfflib" # Originally set in convert, nbs, nx, parcellation @@ -16,18 +16,18 @@ class CFFBaseInterface(LibraryBaseInterface): # Remove in 2.0 have_cmp = True try: - package_check('cmp') + package_check("cmp") except ImportError: have_cmp = False have_cfflib = True try: - package_check('cfflib') + package_check("cfflib") except ImportError: have_cfflib = False have_cv = True try: - package_check('cviewer') + package_check("cviewer") except ImportError: have_cv = False diff --git a/nipype/interfaces/cmtk/cmtk.py b/nipype/interfaces/cmtk/cmtk.py index 8eb038e89c..c7b34aeae7 100644 --- a/nipype/interfaces/cmtk/cmtk.py +++ b/nipype/interfaces/cmtk/cmtk.py @@ -12,9 +12,18 @@ from ...utils.filemanip import split_filename from ...utils import NUMPY_MMAP -from ..base import (BaseInterface, BaseInterfaceInputSpec, traits, File, - TraitedSpec, Directory, OutputMultiPath, isdefined) -iflogger = logging.getLogger('nipype.interface') +from ..base import ( + BaseInterface, + BaseInterfaceInputSpec, + traits, + File, + TraitedSpec, + Directory, + OutputMultiPath, + isdefined, +) + +iflogger = logging.getLogger("nipype.interface") def length(xyz, along=False): @@ -56,7 +65,7 @@ def length(xyz, along=False): if along: return np.array([0]) return 0 - dists = np.sqrt((np.diff(xyz, axis=0)**2).sum(axis=1)) + dists = np.sqrt((np.diff(xyz, axis=0) ** 2).sum(axis=1)) if along: return np.cumsum(dists) return np.sum(dists) @@ -73,7 +82,8 @@ def get_rois_crossed(pointsmm, roiData, voxelSize): if not roiData[x, y, z] == 0: rois_crossed.append(roiData[x, y, z]) rois_crossed = list( - dict.fromkeys(rois_crossed).keys()) # Removed duplicates from the list + dict.fromkeys(rois_crossed).keys() + ) # Removed duplicates from the list return rois_crossed @@ -101,22 +111,25 @@ def create_allpoints_cmat(streamlines, roiData, voxelSize, n_rois): pcN = int(round(float(100 * i) / n_fib)) if pcN > pc and pcN % 1 == 0: pc = pcN - print('%4.0f%%' % (pc)) + print("%4.0f%%" % (pc)) rois_crossed = get_rois_crossed(fiber[0], roiData, voxelSize) if len(rois_crossed) > 0: list_of_roi_crossed_lists.append(list(rois_crossed)) final_fiber_ids.append(i) - connectivity_matrix = get_connectivity_matrix(n_rois, - list_of_roi_crossed_lists) + connectivity_matrix = get_connectivity_matrix(n_rois, list_of_roi_crossed_lists) dis = n_fib - len(final_fiber_ids) iflogger.info( - 'Found %i (%f percent out of %i fibers) fibers that start or ' - 'terminate in a voxel which is not labeled. (orphans)', dis, - dis * 100.0 / n_fib, n_fib) - iflogger.info('Valid fibers: %i (%f percent)', n_fib - dis, - 100 - dis * 100.0 / n_fib) - iflogger.info('Returning the intersecting point connectivity matrix') + "Found %i (%f percent out of %i fibers) fibers that start or " + "terminate in a voxel which is not labeled. (orphans)", + dis, + dis * 100.0 / n_fib, + n_fib, + ) + iflogger.info( + "Valid fibers: %i (%f percent)", n_fib - dis, 100 - dis * 100.0 / n_fib + ) + iflogger.info("Returning the intersecting point connectivity matrix") return connectivity_matrix, final_fiber_ids @@ -161,29 +174,31 @@ def create_endpoints_array(fib, voxelSize): endpoints[i, 1, 2] = int(endpoints[i, 1, 2] / float(voxelSize[2])) # Return the matrices - iflogger.info('Returning the endpoint matrix') + iflogger.info("Returning the endpoint matrix") return (endpoints, endpointsmm) -def cmat(track_file, - roi_file, - resolution_network_file, - matrix_name, - matrix_mat_name, - endpoint_name, - intersections=False): +def cmat( + track_file, + roi_file, + resolution_network_file, + matrix_name, + matrix_mat_name, + endpoint_name, + intersections=False, +): """ Create the connection matrix for each resolution using fibers and ROIs. """ import scipy.io as sio stats = {} - iflogger.info('Running cmat function') + iflogger.info("Running cmat function") # Identify the endpoints of each fiber - en_fname = op.abspath(endpoint_name + '_endpoints.npy') - en_fnamemm = op.abspath(endpoint_name + '_endpointsmm.npy') + en_fname = op.abspath(endpoint_name + "_endpoints.npy") + en_fnamemm = op.abspath(endpoint_name + "_endpointsmm.npy") - iflogger.info('Reading Trackvis file %s', track_file) + iflogger.info("Reading Trackvis file %s", track_file) fib, hdr = nb.trackvis.read(track_file, False) - stats['orig_n_fib'] = len(fib) + stats["orig_n_fib"] = len(fib) roi = nb.load(roi_file, mmap=NUMPY_MMAP) roiData = roi.get_data() @@ -191,13 +206,13 @@ def cmat(track_file, (endpoints, endpointsmm) = create_endpoints_array(fib, roiVoxelSize) # Output endpoint arrays - iflogger.info('Saving endpoint array: %s', en_fname) + iflogger.info("Saving endpoint array: %s", en_fname) np.save(en_fname, endpoints) - iflogger.info('Saving endpoint array in mm: %s', en_fnamemm) + iflogger.info("Saving endpoint array in mm: %s", en_fnamemm) np.save(en_fnamemm, endpointsmm) n = len(fib) - iflogger.info('Number of fibers: %i', n) + iflogger.info("Number of fibers: %i", n) # Create empty fiber label array fiberlabels = np.zeros((n, 2)) @@ -206,16 +221,16 @@ def cmat(track_file, # Add node information from specified parcellation scheme path, name, ext = split_filename(resolution_network_file) - if ext == '.pck': + if ext == ".pck": gp = nx.read_gpickle(resolution_network_file) - elif ext == '.graphml': + elif ext == ".graphml": gp = nx.read_graphml(resolution_network_file) else: raise TypeError("Unable to read file:", resolution_network_file) nROIs = len(gp.nodes()) # add node information from parcellation - if 'dn_position' in gp.nodes[list(gp.nodes())[0]]: + if "dn_position" in gp.nodes[list(gp.nodes())[0]]: G = gp.copy() else: G = nx.Graph() @@ -225,39 +240,48 @@ def cmat(track_file, # ROI in voxel coordinates (segmentation volume ) xyz = tuple( np.mean( - np.where( - np.flipud(roiData) == int(d["dn_correspondence_id"])), - axis=1)) - G.nodes[int(u)]['dn_position'] = tuple([xyz[0], xyz[2], -xyz[1]]) + np.where(np.flipud(roiData) == int(d["dn_correspondence_id"])), + axis=1, + ) + ) + G.nodes[int(u)]["dn_position"] = tuple([xyz[0], xyz[2], -xyz[1]]) if intersections: iflogger.info("Filtering tractography from intersections") intersection_matrix, final_fiber_ids = create_allpoints_cmat( - fib, roiData, roiVoxelSize, nROIs) + fib, roiData, roiVoxelSize, nROIs + ) finalfibers_fname = op.abspath( - endpoint_name + '_intersections_streamline_final.trk') - stats['intersections_n_fib'] = save_fibers(hdr, fib, finalfibers_fname, - final_fiber_ids) + endpoint_name + "_intersections_streamline_final.trk" + ) + stats["intersections_n_fib"] = save_fibers( + hdr, fib, finalfibers_fname, final_fiber_ids + ) intersection_matrix = np.matrix(intersection_matrix) I = G.copy() H = nx.from_numpy_matrix(np.matrix(intersection_matrix)) - H = nx.relabel_nodes( - H, lambda x: x + 1) # relabel nodes so they start at 1 + H = nx.relabel_nodes(H, lambda x: x + 1) # relabel nodes so they start at 1 I.add_weighted_edges_from( - ((u, v, d['weight']) for u, v, d in H.edges(data=True))) + ((u, v, d["weight"]) for u, v, d in H.edges(data=True)) + ) dis = 0 for i in range(endpoints.shape[0]): # ROI start => ROI end try: - startROI = int(roiData[endpoints[i, 0, 0], endpoints[i, 0, 1], - endpoints[i, 0, 2]]) - endROI = int(roiData[endpoints[i, 1, 0], endpoints[i, 1, 1], - endpoints[i, 1, 2]]) + startROI = int( + roiData[endpoints[i, 0, 0], endpoints[i, 0, 1], endpoints[i, 0, 2]] + ) + endROI = int( + roiData[endpoints[i, 1, 0], endpoints[i, 1, 1], endpoints[i, 1, 2]] + ) except IndexError: - iflogger.error('AN INDEXERROR EXCEPTION OCCURED FOR FIBER %s. ' - 'PLEASE CHECK ENDPOINT GENERATION', i) + iflogger.error( + "AN INDEXERROR EXCEPTION OCCURED FOR FIBER %s. " + "PLEASE CHECK ENDPOINT GENERATION", + i, + ) break # Filter @@ -270,8 +294,7 @@ def cmat(track_file, iflogger.error( "Start or endpoint of fiber terminate in a voxel which is labeled higher" ) - iflogger.error( - "than is expected by the parcellation node information.") + iflogger.error("than is expected by the parcellation node information.") iflogger.error("Start ROI: %i, End ROI: %i", startROI, endROI) iflogger.error("This needs bugfixing!") continue @@ -290,9 +313,8 @@ def cmat(track_file, final_fibers_idx.append(i) # Add edge to graph - if G.has_edge(startROI, - endROI) and 'fiblist' in G.edge[startROI][endROI]: - G.edge[startROI][endROI]['fiblist'].append(i) + if G.has_edge(startROI, endROI) and "fiblist" in G.edge[startROI][endROI]: + G.edge[startROI][endROI]["fiblist"].append(i) else: G.add_edge(startROI, endROI, fiblist=[i]) @@ -314,10 +336,13 @@ def cmat(track_file, final_fiberlabels_array = np.array(final_fiberlabels, dtype=int) iflogger.info( - 'Found %i (%f percent out of %i fibers) fibers that start or ' - 'terminate in a voxel which is not labeled. (orphans)', dis, - dis * 100.0 / n, n) - iflogger.info('Valid fibers: %i (%f%%)', n - dis, 100 - dis * 100.0 / n) + "Found %i (%f percent out of %i fibers) fibers that start or " + "terminate in a voxel which is not labeled. (orphans)", + dis, + dis * 100.0 / n, + n, + ) + iflogger.info("Valid fibers: %i (%f%%)", n - dis, 100 - dis * 100.0 / n) numfib = nx.Graph() numfib.add_nodes_from(G) @@ -327,109 +352,108 @@ def cmat(track_file, for u, v, d in G.edges(data=True): G.remove_edge(u, v) di = {} - if 'fiblist' in d: - di['number_of_fibers'] = len(d['fiblist']) - idx = np.where((final_fiberlabels_array[:, 0] == int(u)) & - (final_fiberlabels_array[:, 1] == int(v)))[0] - di['fiber_length_mean'] = float( - np.mean(final_fiberlength_array[idx])) - di['fiber_length_median'] = float( - np.median(final_fiberlength_array[idx])) - di['fiber_length_std'] = float( - np.std(final_fiberlength_array[idx])) + if "fiblist" in d: + di["number_of_fibers"] = len(d["fiblist"]) + idx = np.where( + (final_fiberlabels_array[:, 0] == int(u)) + & (final_fiberlabels_array[:, 1] == int(v)) + )[0] + di["fiber_length_mean"] = float(np.mean(final_fiberlength_array[idx])) + di["fiber_length_median"] = float(np.median(final_fiberlength_array[idx])) + di["fiber_length_std"] = float(np.std(final_fiberlength_array[idx])) else: - di['number_of_fibers'] = 0 - di['fiber_length_mean'] = 0 - di['fiber_length_median'] = 0 - di['fiber_length_std'] = 0 + di["number_of_fibers"] = 0 + di["fiber_length_mean"] = 0 + di["fiber_length_median"] = 0 + di["fiber_length_std"] = 0 if not u == v: # Fix for self loop problem G.add_edge(u, v, **di) - if 'fiblist' in d: - numfib.add_edge(u, v, weight=di['number_of_fibers']) - fibmean.add_edge(u, v, weight=di['fiber_length_mean']) - fibmedian.add_edge(u, v, weight=di['fiber_length_median']) - fibdev.add_edge(u, v, weight=di['fiber_length_std']) + if "fiblist" in d: + numfib.add_edge(u, v, weight=di["number_of_fibers"]) + fibmean.add_edge(u, v, weight=di["fiber_length_mean"]) + fibmedian.add_edge(u, v, weight=di["fiber_length_median"]) + fibdev.add_edge(u, v, weight=di["fiber_length_std"]) - iflogger.info('Writing network as %s', matrix_name) + iflogger.info("Writing network as %s", matrix_name) nx.write_gpickle(G, op.abspath(matrix_name)) numfib_mlab = nx.to_numpy_matrix(numfib, dtype=int) - numfib_dict = {'number_of_fibers': numfib_mlab} + numfib_dict = {"number_of_fibers": numfib_mlab} fibmean_mlab = nx.to_numpy_matrix(fibmean, dtype=np.float64) - fibmean_dict = {'mean_fiber_length': fibmean_mlab} + fibmean_dict = {"mean_fiber_length": fibmean_mlab} fibmedian_mlab = nx.to_numpy_matrix(fibmedian, dtype=np.float64) - fibmedian_dict = {'median_fiber_length': fibmedian_mlab} + fibmedian_dict = {"median_fiber_length": fibmedian_mlab} fibdev_mlab = nx.to_numpy_matrix(fibdev, dtype=np.float64) - fibdev_dict = {'fiber_length_std': fibdev_mlab} + fibdev_dict = {"fiber_length_std": fibdev_mlab} if intersections: path, name, ext = split_filename(matrix_name) - intersection_matrix_name = op.abspath(name + '_intersections') + ext - iflogger.info('Writing intersection network as %s', - intersection_matrix_name) + intersection_matrix_name = op.abspath(name + "_intersections") + ext + iflogger.info("Writing intersection network as %s", intersection_matrix_name) nx.write_gpickle(I, intersection_matrix_name) path, name, ext = split_filename(matrix_mat_name) - if not ext == '.mat': - ext = '.mat' + if not ext == ".mat": + ext = ".mat" matrix_mat_name = matrix_mat_name + ext - iflogger.info('Writing matlab matrix as %s', matrix_mat_name) + iflogger.info("Writing matlab matrix as %s", matrix_mat_name) sio.savemat(matrix_mat_name, numfib_dict) if intersections: - intersect_dict = {'intersections': intersection_matrix} - intersection_matrix_mat_name = op.abspath(name + '_intersections') + ext - iflogger.info('Writing intersection matrix as %s', - intersection_matrix_mat_name) + intersect_dict = {"intersections": intersection_matrix} + intersection_matrix_mat_name = op.abspath(name + "_intersections") + ext + iflogger.info("Writing intersection matrix as %s", intersection_matrix_mat_name) sio.savemat(intersection_matrix_mat_name, intersect_dict) - mean_fiber_length_matrix_name = op.abspath( - name + '_mean_fiber_length') + ext - iflogger.info('Writing matlab mean fiber length matrix as %s', - mean_fiber_length_matrix_name) + mean_fiber_length_matrix_name = op.abspath(name + "_mean_fiber_length") + ext + iflogger.info( + "Writing matlab mean fiber length matrix as %s", mean_fiber_length_matrix_name + ) sio.savemat(mean_fiber_length_matrix_name, fibmean_dict) - median_fiber_length_matrix_name = op.abspath( - name + '_median_fiber_length') + ext - iflogger.info('Writing matlab median fiber length matrix as %s', - median_fiber_length_matrix_name) + median_fiber_length_matrix_name = op.abspath(name + "_median_fiber_length") + ext + iflogger.info( + "Writing matlab median fiber length matrix as %s", + median_fiber_length_matrix_name, + ) sio.savemat(median_fiber_length_matrix_name, fibmedian_dict) - fiber_length_std_matrix_name = op.abspath(name + '_fiber_length_std') + ext - iflogger.info('Writing matlab fiber length deviation matrix as %s', - fiber_length_std_matrix_name) + fiber_length_std_matrix_name = op.abspath(name + "_fiber_length_std") + ext + iflogger.info( + "Writing matlab fiber length deviation matrix as %s", + fiber_length_std_matrix_name, + ) sio.savemat(fiber_length_std_matrix_name, fibdev_dict) - fiberlengths_fname = op.abspath(endpoint_name + '_final_fiberslength.npy') - iflogger.info('Storing final fiber length array as %s', fiberlengths_fname) + fiberlengths_fname = op.abspath(endpoint_name + "_final_fiberslength.npy") + iflogger.info("Storing final fiber length array as %s", fiberlengths_fname) np.save(fiberlengths_fname, final_fiberlength_array) - fiberlabels_fname = op.abspath(endpoint_name + '_filtered_fiberslabel.npy') - iflogger.info('Storing all fiber labels (with orphans) as %s', - fiberlabels_fname) + fiberlabels_fname = op.abspath(endpoint_name + "_filtered_fiberslabel.npy") + iflogger.info("Storing all fiber labels (with orphans) as %s", fiberlabels_fname) np.save( - fiberlabels_fname, - np.array(fiberlabels, dtype=np.int32), + fiberlabels_fname, np.array(fiberlabels, dtype=np.int32), ) - fiberlabels_noorphans_fname = op.abspath( - endpoint_name + '_final_fiberslabels.npy') - iflogger.info('Storing final fiber labels (no orphans) as %s', - fiberlabels_noorphans_fname) + fiberlabels_noorphans_fname = op.abspath(endpoint_name + "_final_fiberslabels.npy") + iflogger.info( + "Storing final fiber labels (no orphans) as %s", fiberlabels_noorphans_fname + ) np.save(fiberlabels_noorphans_fname, final_fiberlabels_array) iflogger.info("Filtering tractography - keeping only no orphan fibers") - finalfibers_fname = op.abspath(endpoint_name + '_streamline_final.trk') - stats['endpoint_n_fib'] = save_fibers(hdr, fib, finalfibers_fname, - final_fibers_idx) - stats['endpoints_percent'] = float(stats['endpoint_n_fib']) / float( - stats['orig_n_fib']) * 100 - stats['intersections_percent'] = float( - stats['intersections_n_fib']) / float(stats['orig_n_fib']) * 100 - - out_stats_file = op.abspath(endpoint_name + '_statistics.mat') - iflogger.info('Saving matrix creation statistics as %s', out_stats_file) + finalfibers_fname = op.abspath(endpoint_name + "_streamline_final.trk") + stats["endpoint_n_fib"] = save_fibers(hdr, fib, finalfibers_fname, final_fibers_idx) + stats["endpoints_percent"] = ( + float(stats["endpoint_n_fib"]) / float(stats["orig_n_fib"]) * 100 + ) + stats["intersections_percent"] = ( + float(stats["intersections_n_fib"]) / float(stats["orig_n_fib"]) * 100 + ) + + out_stats_file = op.abspath(endpoint_name + "_statistics.mat") + iflogger.info("Saving matrix creation statistics as %s", out_stats_file) sio.savemat(out_stats_file, stats) @@ -440,114 +464,119 @@ def save_fibers(oldhdr, oldfib, fname, indices): for i in indices: outstreams.append(oldfib[i]) n_fib_out = len(outstreams) - hdrnew['n_count'] = n_fib_out - iflogger.info('Writing final non-orphan fibers as %s', fname) + hdrnew["n_count"] = n_fib_out + iflogger.info("Writing final non-orphan fibers as %s", fname) nb.trackvis.write(fname, outstreams, hdrnew) return n_fib_out class CreateMatrixInputSpec(TraitedSpec): - roi_file = File( - exists=True, mandatory=True, desc='Freesurfer aparc+aseg file') - tract_file = File(exists=True, mandatory=True, desc='Trackvis tract file') + roi_file = File(exists=True, mandatory=True, desc="Freesurfer aparc+aseg file") + tract_file = File(exists=True, mandatory=True, desc="Trackvis tract file") resolution_network_file = File( exists=True, mandatory=True, - desc='Parcellation files from Connectome Mapping Toolkit') + desc="Parcellation files from Connectome Mapping Toolkit", + ) count_region_intersections = traits.Bool( False, usedefault=True, - desc= - 'Counts all of the fiber-region traversals in the connectivity matrix (requires significantly more computational time)' + desc="Counts all of the fiber-region traversals in the connectivity matrix (requires significantly more computational time)", ) out_matrix_file = File( - genfile=True, desc='NetworkX graph describing the connectivity') + genfile=True, desc="NetworkX graph describing the connectivity" + ) out_matrix_mat_file = File( - 'cmatrix.mat', - usedefault=True, - desc='Matlab matrix describing the connectivity') + "cmatrix.mat", usedefault=True, desc="Matlab matrix describing the connectivity" + ) out_mean_fiber_length_matrix_mat_file = File( genfile=True, - desc= - 'Matlab matrix describing the mean fiber lengths between each node.') + desc="Matlab matrix describing the mean fiber lengths between each node.", + ) out_median_fiber_length_matrix_mat_file = File( genfile=True, - desc= - 'Matlab matrix describing the mean fiber lengths between each node.') + desc="Matlab matrix describing the mean fiber lengths between each node.", + ) out_fiber_length_std_matrix_mat_file = File( genfile=True, - desc= - 'Matlab matrix describing the deviation in fiber lengths connecting each node.' + desc="Matlab matrix describing the deviation in fiber lengths connecting each node.", ) out_intersection_matrix_mat_file = File( genfile=True, - desc= - 'Matlab connectivity matrix if all region/fiber intersections are counted.' + desc="Matlab connectivity matrix if all region/fiber intersections are counted.", ) out_endpoint_array_name = File( - genfile=True, desc='Name for the generated endpoint arrays') + genfile=True, desc="Name for the generated endpoint arrays" + ) class CreateMatrixOutputSpec(TraitedSpec): - matrix_file = File( - desc='NetworkX graph describing the connectivity', exists=True) + matrix_file = File(desc="NetworkX graph describing the connectivity", exists=True) intersection_matrix_file = File( - desc='NetworkX graph describing the connectivity', exists=True) + desc="NetworkX graph describing the connectivity", exists=True + ) matrix_files = OutputMultiPath( File( - desc='All of the gpickled network files output by this interface', - exists=True)) + desc="All of the gpickled network files output by this interface", + exists=True, + ) + ) matlab_matrix_files = OutputMultiPath( - File( - desc='All of the MATLAB .mat files output by this interface', - exists=True)) + File(desc="All of the MATLAB .mat files output by this interface", exists=True) + ) matrix_mat_file = File( - desc='Matlab matrix describing the connectivity', exists=True) + desc="Matlab matrix describing the connectivity", exists=True + ) intersection_matrix_mat_file = File( - desc= - 'Matlab matrix describing the mean fiber lengths between each node.', - exists=True) + desc="Matlab matrix describing the mean fiber lengths between each node.", + exists=True, + ) mean_fiber_length_matrix_mat_file = File( - desc= - 'Matlab matrix describing the mean fiber lengths between each node.', - exists=True) + desc="Matlab matrix describing the mean fiber lengths between each node.", + exists=True, + ) median_fiber_length_matrix_mat_file = File( - desc= - 'Matlab matrix describing the median fiber lengths between each node.', - exists=True) + desc="Matlab matrix describing the median fiber lengths between each node.", + exists=True, + ) fiber_length_std_matrix_mat_file = File( - desc= - 'Matlab matrix describing the deviation in fiber lengths connecting each node.', - exists=True) + desc="Matlab matrix describing the deviation in fiber lengths connecting each node.", + exists=True, + ) endpoint_file = File( - desc='Saved Numpy array with the endpoints of each fiber', exists=True) + desc="Saved Numpy array with the endpoints of each fiber", exists=True + ) endpoint_file_mm = File( - desc= - 'Saved Numpy array with the endpoints of each fiber (in millimeters)', - exists=True) + desc="Saved Numpy array with the endpoints of each fiber (in millimeters)", + exists=True, + ) fiber_length_file = File( - desc='Saved Numpy array with the lengths of each fiber', exists=True) + desc="Saved Numpy array with the lengths of each fiber", exists=True + ) fiber_label_file = File( - desc='Saved Numpy array with the labels for each fiber', exists=True) + desc="Saved Numpy array with the labels for each fiber", exists=True + ) fiber_labels_noorphans = File( - desc='Saved Numpy array with the labels for each non-orphan fiber', - exists=True) + desc="Saved Numpy array with the labels for each non-orphan fiber", exists=True + ) filtered_tractography = File( - desc= - 'TrackVis file containing only those fibers originate in one and terminate in another region', - exists=True) + desc="TrackVis file containing only those fibers originate in one and terminate in another region", + exists=True, + ) filtered_tractography_by_intersections = File( - desc='TrackVis file containing all fibers which connect two regions', - exists=True) + desc="TrackVis file containing all fibers which connect two regions", + exists=True, + ) filtered_tractographies = OutputMultiPath( File( - desc= - 'TrackVis file containing only those fibers originate in one and terminate in another region', - exists=True)) + desc="TrackVis file containing only those fibers originate in one and terminate in another region", + exists=True, + ) + ) stats_file = File( - desc= - 'Saved Matlab .mat file with the number of fibers saved at each stage', - exists=True) + desc="Saved Matlab .mat file with the number of fibers saved at each stage", + exists=True, + ) class CreateMatrix(BaseInterface): @@ -570,36 +599,42 @@ class CreateMatrix(BaseInterface): def _run_interface(self, runtime): if isdefined(self.inputs.out_matrix_file): path, name, _ = split_filename(self.inputs.out_matrix_file) - matrix_file = op.abspath(name + '.pck') + matrix_file = op.abspath(name + ".pck") else: - matrix_file = self._gen_outfilename('.pck') + matrix_file = self._gen_outfilename(".pck") matrix_mat_file = op.abspath(self.inputs.out_matrix_mat_file) path, name, ext = split_filename(matrix_mat_file) - if not ext == '.mat': - ext = '.mat' + if not ext == ".mat": + ext = ".mat" matrix_mat_file = matrix_mat_file + ext if isdefined(self.inputs.out_mean_fiber_length_matrix_mat_file): mean_fiber_length_matrix_mat_file = op.abspath( - self.inputs.out_mean_fiber_length_matrix_mat_file) + self.inputs.out_mean_fiber_length_matrix_mat_file + ) else: mean_fiber_length_matrix_name = op.abspath( - self._gen_outfilename('_mean_fiber_length.mat')) + self._gen_outfilename("_mean_fiber_length.mat") + ) if isdefined(self.inputs.out_median_fiber_length_matrix_mat_file): median_fiber_length_matrix_mat_file = op.abspath( - self.inputs.out_median_fiber_length_matrix_mat_file) + self.inputs.out_median_fiber_length_matrix_mat_file + ) else: median_fiber_length_matrix_name = op.abspath( - self._gen_outfilename('_median_fiber_length.mat')) + self._gen_outfilename("_median_fiber_length.mat") + ) if isdefined(self.inputs.out_fiber_length_std_matrix_mat_file): fiber_length_std_matrix_mat_file = op.abspath( - self.inputs.out_fiber_length_std_matrix_mat_file) + self.inputs.out_fiber_length_std_matrix_mat_file + ) else: fiber_length_std_matrix_name = op.abspath( - self._gen_outfilename('_fiber_length_std.mat')) + self._gen_outfilename("_fiber_length_std.mat") + ) if not isdefined(self.inputs.out_endpoint_array_name): _, endpoint_name, _ = split_filename(self.inputs.tract_file) @@ -607,118 +642,135 @@ def _run_interface(self, runtime): else: endpoint_name = op.abspath(self.inputs.out_endpoint_array_name) - cmat(self.inputs.tract_file, self.inputs.roi_file, - self.inputs.resolution_network_file, matrix_file, matrix_mat_file, - endpoint_name, self.inputs.count_region_intersections) + cmat( + self.inputs.tract_file, + self.inputs.roi_file, + self.inputs.resolution_network_file, + matrix_file, + matrix_mat_file, + endpoint_name, + self.inputs.count_region_intersections, + ) return runtime def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_matrix_file): path, name, _ = split_filename(self.inputs.out_matrix_file) - out_matrix_file = op.abspath(name + '.pck') - out_intersection_matrix_file = op.abspath( - name + '_intersections.pck') + out_matrix_file = op.abspath(name + ".pck") + out_intersection_matrix_file = op.abspath(name + "_intersections.pck") else: - out_matrix_file = op.abspath(self._gen_outfilename('.pck')) + out_matrix_file = op.abspath(self._gen_outfilename(".pck")) out_intersection_matrix_file = op.abspath( - self._gen_outfilename('_intersections.pck')) + self._gen_outfilename("_intersections.pck") + ) - outputs['matrix_file'] = out_matrix_file - outputs['intersection_matrix_file'] = out_intersection_matrix_file + outputs["matrix_file"] = out_matrix_file + outputs["intersection_matrix_file"] = out_intersection_matrix_file matrix_mat_file = op.abspath(self.inputs.out_matrix_mat_file) path, name, ext = split_filename(matrix_mat_file) - if not ext == '.mat': - ext = '.mat' + if not ext == ".mat": + ext = ".mat" matrix_mat_file = matrix_mat_file + ext - outputs['matrix_mat_file'] = matrix_mat_file + outputs["matrix_mat_file"] = matrix_mat_file if isdefined(self.inputs.out_mean_fiber_length_matrix_mat_file): - outputs['mean_fiber_length_matrix_mat_file'] = op.abspath( - self.inputs.out_mean_fiber_length_matrix_mat_file) + outputs["mean_fiber_length_matrix_mat_file"] = op.abspath( + self.inputs.out_mean_fiber_length_matrix_mat_file + ) else: - outputs['mean_fiber_length_matrix_mat_file'] = op.abspath( - self._gen_outfilename('_mean_fiber_length.mat')) + outputs["mean_fiber_length_matrix_mat_file"] = op.abspath( + self._gen_outfilename("_mean_fiber_length.mat") + ) if isdefined(self.inputs.out_median_fiber_length_matrix_mat_file): - outputs['median_fiber_length_matrix_mat_file'] = op.abspath( - self.inputs.out_median_fiber_length_matrix_mat_file) + outputs["median_fiber_length_matrix_mat_file"] = op.abspath( + self.inputs.out_median_fiber_length_matrix_mat_file + ) else: - outputs['median_fiber_length_matrix_mat_file'] = op.abspath( - self._gen_outfilename('_median_fiber_length.mat')) + outputs["median_fiber_length_matrix_mat_file"] = op.abspath( + self._gen_outfilename("_median_fiber_length.mat") + ) if isdefined(self.inputs.out_fiber_length_std_matrix_mat_file): - outputs['fiber_length_std_matrix_mat_file'] = op.abspath( - self.inputs.out_fiber_length_std_matrix_mat_file) + outputs["fiber_length_std_matrix_mat_file"] = op.abspath( + self.inputs.out_fiber_length_std_matrix_mat_file + ) else: - outputs['fiber_length_std_matrix_mat_file'] = op.abspath( - self._gen_outfilename('_fiber_length_std.mat')) + outputs["fiber_length_std_matrix_mat_file"] = op.abspath( + self._gen_outfilename("_fiber_length_std.mat") + ) if isdefined(self.inputs.out_intersection_matrix_mat_file): - outputs['intersection_matrix_mat_file'] = op.abspath( - self.inputs.out_intersection_matrix_mat_file) + outputs["intersection_matrix_mat_file"] = op.abspath( + self.inputs.out_intersection_matrix_mat_file + ) else: - outputs['intersection_matrix_mat_file'] = op.abspath( - self._gen_outfilename('_intersections.mat')) + outputs["intersection_matrix_mat_file"] = op.abspath( + self._gen_outfilename("_intersections.mat") + ) if isdefined(self.inputs.out_endpoint_array_name): endpoint_name = self.inputs.out_endpoint_array_name - outputs['endpoint_file'] = op.abspath( - self.inputs.out_endpoint_array_name + '_endpoints.npy') - outputs['endpoint_file_mm'] = op.abspath( - self.inputs.out_endpoint_array_name + '_endpointsmm.npy') - outputs['fiber_length_file'] = op.abspath( - self.inputs.out_endpoint_array_name + '_final_fiberslength.npy' + outputs["endpoint_file"] = op.abspath( + self.inputs.out_endpoint_array_name + "_endpoints.npy" ) - outputs['fiber_label_file'] = op.abspath( - self.inputs.out_endpoint_array_name + - '_filtered_fiberslabel.npy') - outputs['fiber_labels_noorphans'] = op.abspath( - self.inputs.out_endpoint_array_name + '_final_fiberslabels.npy' + outputs["endpoint_file_mm"] = op.abspath( + self.inputs.out_endpoint_array_name + "_endpointsmm.npy" + ) + outputs["fiber_length_file"] = op.abspath( + self.inputs.out_endpoint_array_name + "_final_fiberslength.npy" + ) + outputs["fiber_label_file"] = op.abspath( + self.inputs.out_endpoint_array_name + "_filtered_fiberslabel.npy" + ) + outputs["fiber_labels_noorphans"] = op.abspath( + self.inputs.out_endpoint_array_name + "_final_fiberslabels.npy" ) else: _, endpoint_name, _ = split_filename(self.inputs.tract_file) - outputs['endpoint_file'] = op.abspath( - endpoint_name + '_endpoints.npy') - outputs['endpoint_file_mm'] = op.abspath( - endpoint_name + '_endpointsmm.npy') - outputs['fiber_length_file'] = op.abspath( - endpoint_name + '_final_fiberslength.npy') - outputs['fiber_label_file'] = op.abspath( - endpoint_name + '_filtered_fiberslabel.npy') - outputs['fiber_labels_noorphans'] = op.abspath( - endpoint_name + '_final_fiberslabels.npy') + outputs["endpoint_file"] = op.abspath(endpoint_name + "_endpoints.npy") + outputs["endpoint_file_mm"] = op.abspath(endpoint_name + "_endpointsmm.npy") + outputs["fiber_length_file"] = op.abspath( + endpoint_name + "_final_fiberslength.npy" + ) + outputs["fiber_label_file"] = op.abspath( + endpoint_name + "_filtered_fiberslabel.npy" + ) + outputs["fiber_labels_noorphans"] = op.abspath( + endpoint_name + "_final_fiberslabels.npy" + ) if self.inputs.count_region_intersections: - outputs['matrix_files'] = [ - out_matrix_file, out_intersection_matrix_file - ] - outputs['matlab_matrix_files'] = [ - outputs['matrix_mat_file'], - outputs['mean_fiber_length_matrix_mat_file'], - outputs['median_fiber_length_matrix_mat_file'], - outputs['fiber_length_std_matrix_mat_file'], - outputs['intersection_matrix_mat_file'] + outputs["matrix_files"] = [out_matrix_file, out_intersection_matrix_file] + outputs["matlab_matrix_files"] = [ + outputs["matrix_mat_file"], + outputs["mean_fiber_length_matrix_mat_file"], + outputs["median_fiber_length_matrix_mat_file"], + outputs["fiber_length_std_matrix_mat_file"], + outputs["intersection_matrix_mat_file"], ] else: - outputs['matrix_files'] = [out_matrix_file] - outputs['matlab_matrix_files'] = [ - outputs['matrix_mat_file'], - outputs['mean_fiber_length_matrix_mat_file'], - outputs['median_fiber_length_matrix_mat_file'], - outputs['fiber_length_std_matrix_mat_file'] + outputs["matrix_files"] = [out_matrix_file] + outputs["matlab_matrix_files"] = [ + outputs["matrix_mat_file"], + outputs["mean_fiber_length_matrix_mat_file"], + outputs["median_fiber_length_matrix_mat_file"], + outputs["fiber_length_std_matrix_mat_file"], ] - outputs['filtered_tractography'] = op.abspath( - endpoint_name + '_streamline_final.trk') - outputs['filtered_tractography_by_intersections'] = op.abspath( - endpoint_name + '_intersections_streamline_final.trk') - outputs['filtered_tractographies'] = [ - outputs['filtered_tractography'], - outputs['filtered_tractography_by_intersections'] + outputs["filtered_tractography"] = op.abspath( + endpoint_name + "_streamline_final.trk" + ) + outputs["filtered_tractography_by_intersections"] = op.abspath( + endpoint_name + "_intersections_streamline_final.trk" + ) + outputs["filtered_tractographies"] = [ + outputs["filtered_tractography"], + outputs["filtered_tractography_by_intersections"], ] - outputs['stats_file'] = op.abspath(endpoint_name + '_statistics.mat') + outputs["stats_file"] = op.abspath(endpoint_name + "_statistics.mat") return outputs def _gen_outfilename(self, ext): @@ -733,27 +785,29 @@ def _gen_outfilename(self, ext): class ROIGenInputSpec(BaseInterfaceInputSpec): aparc_aseg_file = File( - exists=True, mandatory=True, desc='Freesurfer aparc+aseg file') + exists=True, mandatory=True, desc="Freesurfer aparc+aseg file" + ) LUT_file = File( exists=True, - xor=['use_freesurfer_LUT'], - desc='Custom lookup table (cf. FreeSurferColorLUT.txt)') + xor=["use_freesurfer_LUT"], + desc="Custom lookup table (cf. FreeSurferColorLUT.txt)", + ) use_freesurfer_LUT = traits.Bool( - xor=['LUT_file'], - desc= - 'Boolean value; Set to True to use default Freesurfer LUT, False for custom LUT' + xor=["LUT_file"], + desc="Boolean value; Set to True to use default Freesurfer LUT, False for custom LUT", ) freesurfer_dir = Directory( - requires=['use_freesurfer_LUT'], desc='Freesurfer main directory') + requires=["use_freesurfer_LUT"], desc="Freesurfer main directory" + ) out_roi_file = File( - genfile=True, desc='Region of Interest file for connectivity mapping') - out_dict_file = File( - genfile=True, desc='Label dictionary saved in Pickle format') + genfile=True, desc="Region of Interest file for connectivity mapping" + ) + out_dict_file = File(genfile=True, desc="Label dictionary saved in Pickle format") class ROIGenOutputSpec(TraitedSpec): - roi_file = File(desc='Region of Interest file for connectivity mapping') - dict_file = File(desc='Label dictionary saved in Pickle format') + roi_file = File(desc="Region of Interest file for connectivity mapping") + dict_file = File(desc="Label dictionary saved in Pickle format") class ROIGen(BaseInterface): @@ -784,104 +838,165 @@ class ROIGen(BaseInterface): def _run_interface(self, runtime): aparc_aseg_file = self.inputs.aparc_aseg_file aparcpath, aparcname, aparcext = split_filename(aparc_aseg_file) - iflogger.info('Using Aparc+Aseg file: %s', aparcname + aparcext) + iflogger.info("Using Aparc+Aseg file: %s", aparcname + aparcext) niiAPARCimg = nb.load(aparc_aseg_file, mmap=NUMPY_MMAP) niiAPARCdata = niiAPARCimg.get_data() niiDataLabels = np.unique(niiAPARCdata) numDataLabels = np.size(niiDataLabels) - iflogger.info('Number of labels in image: %s', numDataLabels) + iflogger.info("Number of labels in image: %s", numDataLabels) write_dict = True if self.inputs.use_freesurfer_LUT: - self.LUT_file = self.inputs.freesurfer_dir + '/FreeSurferColorLUT.txt' - iflogger.info('Using Freesurfer LUT: %s', self.LUT_file) - prefix = 'fsLUT' - elif not self.inputs.use_freesurfer_LUT and isdefined( - self.inputs.LUT_file): + self.LUT_file = self.inputs.freesurfer_dir + "/FreeSurferColorLUT.txt" + iflogger.info("Using Freesurfer LUT: %s", self.LUT_file) + prefix = "fsLUT" + elif not self.inputs.use_freesurfer_LUT and isdefined(self.inputs.LUT_file): self.LUT_file = op.abspath(self.inputs.LUT_file) lutpath, lutname, lutext = split_filename(self.LUT_file) - iflogger.info('Using Custom LUT file: %s', lutname + lutext) + iflogger.info("Using Custom LUT file: %s", lutname + lutext) prefix = lutname else: - prefix = 'hardcoded' + prefix = "hardcoded" write_dict = False if isdefined(self.inputs.out_roi_file): roi_file = op.abspath(self.inputs.out_roi_file) else: - roi_file = op.abspath(prefix + '_' + aparcname + '.nii') + roi_file = op.abspath(prefix + "_" + aparcname + ".nii") if isdefined(self.inputs.out_dict_file): dict_file = op.abspath(self.inputs.out_dict_file) else: - dict_file = op.abspath(prefix + '_' + aparcname + '.pck') + dict_file = op.abspath(prefix + "_" + aparcname + ".pck") if write_dict: - iflogger.info('Lookup table: %s', op.abspath(self.LUT_file)) + iflogger.info("Lookup table: %s", op.abspath(self.LUT_file)) LUTlabelsRGBA = np.loadtxt( self.LUT_file, skiprows=4, usecols=[0, 1, 2, 3, 4, 5], - comments='#', + comments="#", dtype={ - 'names': ('index', 'label', 'R', 'G', 'B', 'A'), - 'formats': ('int', '|S30', 'int', 'int', 'int', 'int') - }) + "names": ("index", "label", "R", "G", "B", "A"), + "formats": ("int", "|S30", "int", "int", "int", "int"), + }, + ) numLUTLabels = np.size(LUTlabelsRGBA) if numLUTLabels < numDataLabels: iflogger.error( - 'LUT file provided does not contain all of the regions in the image' + "LUT file provided does not contain all of the regions in the image" ) - iflogger.error('Removing unmapped regions') - iflogger.info('Number of labels in LUT: %s', numLUTLabels) + iflogger.error("Removing unmapped regions") + iflogger.info("Number of labels in LUT: %s", numLUTLabels) LUTlabelDict = {} """ Create dictionary for input LUT table""" for labels in range(0, numLUTLabels): LUTlabelDict[LUTlabelsRGBA[labels][0]] = [ - LUTlabelsRGBA[labels][1], LUTlabelsRGBA[labels][2], - LUTlabelsRGBA[labels][3], LUTlabelsRGBA[labels][4], - LUTlabelsRGBA[labels][5] + LUTlabelsRGBA[labels][1], + LUTlabelsRGBA[labels][2], + LUTlabelsRGBA[labels][3], + LUTlabelsRGBA[labels][4], + LUTlabelsRGBA[labels][5], ] - iflogger.info('Printing LUT label dictionary') + iflogger.info("Printing LUT label dictionary") iflogger.info(LUTlabelDict) mapDict = {} - MAPPING = [[1, 2012], [2, 2019], [3, 2032], [4, 2014], [5, 2020], [ - 6, 2018 - ], [7, 2027], [8, 2028], [9, 2003], [10, 2024], [11, 2017], [12, 2026], - [13, 2002], [14, 2023], [15, 2010], [16, 2022], [17, 2031], - [18, 2029], [19, 2008], [20, 2025], [21, 2005], [22, 2021], - [23, 2011], [24, 2013], [25, 2007], [26, 2016], [27, 2006], - [28, 2033], [29, 2009], [30, 2015], [31, 2001], [32, 2030], - [33, 2034], [34, 2035], [35, 49], [36, 50], [37, 51], [ - 38, 52 - ], [39, 58], [40, 53], [41, 54], [42, 1012], [43, 1019], [ - 44, 1032 - ], [45, 1014], [46, 1020], [47, 1018], [48, 1027], [ - 49, 1028 - ], [50, 1003], [51, 1024], [52, 1017], [53, 1026], [ - 54, 1002 - ], [55, 1023], [56, 1010], [57, 1022], [58, 1031], [ - 59, 1029 - ], [60, 1008], [61, 1025], [62, 1005], [63, 1021], [ - 64, 1011 - ], [65, 1013], [66, 1007], [67, 1016], [68, 1006], [ - 69, 1033 - ], [70, 1009], [71, 1015], [72, 1001], [73, 1030], [ - 74, 1034 - ], [75, 1035], [76, 10], [77, 11], [78, 12], [79, 13], [ - 80, 26 - ], [81, 17], [82, 18], [83, 16]] + MAPPING = [ + [1, 2012], + [2, 2019], + [3, 2032], + [4, 2014], + [5, 2020], + [6, 2018], + [7, 2027], + [8, 2028], + [9, 2003], + [10, 2024], + [11, 2017], + [12, 2026], + [13, 2002], + [14, 2023], + [15, 2010], + [16, 2022], + [17, 2031], + [18, 2029], + [19, 2008], + [20, 2025], + [21, 2005], + [22, 2021], + [23, 2011], + [24, 2013], + [25, 2007], + [26, 2016], + [27, 2006], + [28, 2033], + [29, 2009], + [30, 2015], + [31, 2001], + [32, 2030], + [33, 2034], + [34, 2035], + [35, 49], + [36, 50], + [37, 51], + [38, 52], + [39, 58], + [40, 53], + [41, 54], + [42, 1012], + [43, 1019], + [44, 1032], + [45, 1014], + [46, 1020], + [47, 1018], + [48, 1027], + [49, 1028], + [50, 1003], + [51, 1024], + [52, 1017], + [53, 1026], + [54, 1002], + [55, 1023], + [56, 1010], + [57, 1022], + [58, 1031], + [59, 1029], + [60, 1008], + [61, 1025], + [62, 1005], + [63, 1021], + [64, 1011], + [65, 1013], + [66, 1007], + [67, 1016], + [68, 1006], + [69, 1033], + [70, 1009], + [71, 1015], + [72, 1001], + [73, 1030], + [74, 1034], + [75, 1035], + [76, 10], + [77, 11], + [78, 12], + [79, 13], + [80, 26], + [81, 17], + [82, 18], + [83, 16], + ] """ Create empty grey matter mask, Populate with only those regions defined in the mapping.""" niiGM = np.zeros(niiAPARCdata.shape, dtype=np.uint) for ma in MAPPING: niiGM[niiAPARCdata == ma[1]] = ma[0] mapDict[ma[0]] = ma[1] - iflogger.info('Grey matter mask created') + iflogger.info("Grey matter mask created") greyMaskLabels = np.unique(niiGM) numGMLabels = np.size(greyMaskLabels) - iflogger.info('Number of grey matter labels: %s', numGMLabels) + iflogger.info("Number of grey matter labels: %s", numGMLabels) labelDict = {} GMlabelDict = {} @@ -889,55 +1004,53 @@ def _run_interface(self, runtime): try: mapDict[label] if write_dict: - GMlabelDict['originalID'] = mapDict[label] + GMlabelDict["originalID"] = mapDict[label] except: - iflogger.info('Label %s not in provided mapping', label) + iflogger.info("Label %s not in provided mapping", label) if write_dict: del GMlabelDict GMlabelDict = {} - GMlabelDict['labels'] = LUTlabelDict[label][0] - GMlabelDict['colors'] = [ - LUTlabelDict[label][1], LUTlabelDict[label][2], - LUTlabelDict[label][3] + GMlabelDict["labels"] = LUTlabelDict[label][0] + GMlabelDict["colors"] = [ + LUTlabelDict[label][1], + LUTlabelDict[label][2], + LUTlabelDict[label][3], ] - GMlabelDict['a'] = LUTlabelDict[label][4] + GMlabelDict["a"] = LUTlabelDict[label][4] labelDict[label] = GMlabelDict - roi_image = nb.Nifti1Image(niiGM, niiAPARCimg.affine, - niiAPARCimg.header) - iflogger.info('Saving ROI File to %s', roi_file) + roi_image = nb.Nifti1Image(niiGM, niiAPARCimg.affine, niiAPARCimg.header) + iflogger.info("Saving ROI File to %s", roi_file) nb.save(roi_image, roi_file) if write_dict: - iflogger.info('Saving Dictionary File to %s in Pickle format', - dict_file) - with open(dict_file, 'w') as f: + iflogger.info("Saving Dictionary File to %s in Pickle format", dict_file) + with open(dict_file, "w") as f: pickle.dump(labelDict, f) return runtime def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.out_roi_file): - outputs['roi_file'] = op.abspath(self.inputs.out_roi_file) + outputs["roi_file"] = op.abspath(self.inputs.out_roi_file) else: - outputs['roi_file'] = op.abspath(self._gen_outfilename('nii')) + outputs["roi_file"] = op.abspath(self._gen_outfilename("nii")) if isdefined(self.inputs.out_dict_file): - outputs['dict_file'] = op.abspath(self.inputs.out_dict_file) + outputs["dict_file"] = op.abspath(self.inputs.out_dict_file) else: - outputs['dict_file'] = op.abspath(self._gen_outfilename('pck')) + outputs["dict_file"] = op.abspath(self._gen_outfilename("pck")) return outputs def _gen_outfilename(self, ext): _, name, _ = split_filename(self.inputs.aparc_aseg_file) if self.inputs.use_freesurfer_LUT: - prefix = 'fsLUT' - elif not self.inputs.use_freesurfer_LUT and isdefined( - self.inputs.LUT_file): + prefix = "fsLUT" + elif not self.inputs.use_freesurfer_LUT and isdefined(self.inputs.LUT_file): lutpath, lutname, lutext = split_filename(self.inputs.LUT_file) prefix = lutname else: - prefix = 'hardcoded' - return prefix + '_' + name + '.' + ext + prefix = "hardcoded" + return prefix + "_" + name + "." + ext def create_nodes(roi_file, resolution_network_file, out_filename): @@ -949,28 +1062,30 @@ def create_nodes(roi_file, resolution_network_file, out_filename): G.add_node(int(u), **d) xyz = tuple( np.mean( - np.where(np.flipud(roiData) == int(d["dn_correspondence_id"])), - axis=1)) - G.nodes[int(u)]['dn_position'] = tuple([xyz[0], xyz[2], -xyz[1]]) + np.where(np.flipud(roiData) == int(d["dn_correspondence_id"])), axis=1 + ) + ) + G.nodes[int(u)]["dn_position"] = tuple([xyz[0], xyz[2], -xyz[1]]) nx.write_gpickle(G, out_filename) return out_filename class CreateNodesInputSpec(BaseInterfaceInputSpec): - roi_file = File( - exists=True, mandatory=True, desc='Region of interest file') + roi_file = File(exists=True, mandatory=True, desc="Region of interest file") resolution_network_file = File( exists=True, mandatory=True, - desc='Parcellation file from Connectome Mapping Toolkit') + desc="Parcellation file from Connectome Mapping Toolkit", + ) out_filename = File( - 'nodenetwork.pck', + "nodenetwork.pck", usedefault=True, - desc='Output gpickled network with the nodes defined.') + desc="Output gpickled network with the nodes defined.", + ) class CreateNodesOutputSpec(TraitedSpec): - node_network = File(desc='Output gpickled network with the nodes defined.') + node_network = File(desc="Output gpickled network with the nodes defined.") class CreateNodes(BaseInterface): @@ -991,14 +1106,16 @@ class CreateNodes(BaseInterface): output_spec = CreateNodesOutputSpec def _run_interface(self, runtime): - iflogger.info('Creating nodes...') - create_nodes(self.inputs.roi_file, self.inputs.resolution_network_file, - self.inputs.out_filename) - iflogger.info('Saving node network to %s', - op.abspath(self.inputs.out_filename)) + iflogger.info("Creating nodes...") + create_nodes( + self.inputs.roi_file, + self.inputs.resolution_network_file, + self.inputs.out_filename, + ) + iflogger.info("Saving node network to %s", op.abspath(self.inputs.out_filename)) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['node_network'] = op.abspath(self.inputs.out_filename) + outputs["node_network"] = op.abspath(self.inputs.out_filename) return outputs diff --git a/nipype/interfaces/cmtk/convert.py b/nipype/interfaces/cmtk/convert.py index 170ca44366..a0f956b6f9 100644 --- a/nipype/interfaces/cmtk/convert.py +++ b/nipype/interfaces/cmtk/convert.py @@ -7,54 +7,58 @@ import networkx as nx from ...utils.filemanip import split_filename -from ..base import (BaseInterfaceInputSpec, traits, File, - TraitedSpec, InputMultiPath, isdefined) +from ..base import ( + BaseInterfaceInputSpec, + traits, + File, + TraitedSpec, + InputMultiPath, + isdefined, +) from .base import CFFBaseInterface, have_cfflib class CFFConverterInputSpec(BaseInterfaceInputSpec): graphml_networks = InputMultiPath( - File(exists=True), desc='list of graphML networks') + File(exists=True), desc="list of graphML networks" + ) gpickled_networks = InputMultiPath( - File(exists=True), desc='list of gpickled Networkx graphs') + File(exists=True), desc="list of gpickled Networkx graphs" + ) - gifti_surfaces = InputMultiPath( - File(exists=True), desc='list of GIFTI surfaces') - gifti_labels = InputMultiPath( - File(exists=True), desc='list of GIFTI labels') - nifti_volumes = InputMultiPath( - File(exists=True), desc='list of NIFTI volumes') - tract_files = InputMultiPath( - File(exists=True), desc='list of Trackvis fiber files') + gifti_surfaces = InputMultiPath(File(exists=True), desc="list of GIFTI surfaces") + gifti_labels = InputMultiPath(File(exists=True), desc="list of GIFTI labels") + nifti_volumes = InputMultiPath(File(exists=True), desc="list of NIFTI volumes") + tract_files = InputMultiPath(File(exists=True), desc="list of Trackvis fiber files") timeseries_files = InputMultiPath( - File(exists=True), desc='list of HDF5 timeseries files') + File(exists=True), desc="list of HDF5 timeseries files" + ) script_files = InputMultiPath( - File(exists=True), desc='list of script files to include') + File(exists=True), desc="list of script files to include" + ) data_files = InputMultiPath( - File(exists=True), - desc='list of external data files (i.e. Numpy, HD5, XML) ') - - title = traits.Str(desc='Connectome Title') - creator = traits.Str(desc='Creator') - email = traits.Str(desc='Email address') - publisher = traits.Str(desc='Publisher') - license = traits.Str(desc='License') - rights = traits.Str(desc='Rights') - references = traits.Str(desc='References') - relation = traits.Str(desc='Relation') - species = traits.Str('Homo sapiens', desc='Species', usedefault=True) + File(exists=True), desc="list of external data files (i.e. Numpy, HD5, XML) " + ) + + title = traits.Str(desc="Connectome Title") + creator = traits.Str(desc="Creator") + email = traits.Str(desc="Email address") + publisher = traits.Str(desc="Publisher") + license = traits.Str(desc="License") + rights = traits.Str(desc="Rights") + references = traits.Str(desc="References") + relation = traits.Str(desc="Relation") + species = traits.Str("Homo sapiens", desc="Species", usedefault=True) description = traits.Str( - 'Created with the Nipype CFF converter', - desc='Description', - usedefault=True) + "Created with the Nipype CFF converter", desc="Description", usedefault=True + ) - out_file = File( - 'connectome.cff', usedefault=True, desc='Output connectome file') + out_file = File("connectome.cff", usedefault=True, desc="Output connectome file") class CFFConverterOutputSpec(TraitedSpec): - connectome_file = File(exists=True, desc='Output connectome file') + connectome_file = File(exists=True, desc="Output connectome file") class CFFConverter(CFFBaseInterface): @@ -78,6 +82,7 @@ class CFFConverter(CFFBaseInterface): def _run_interface(self, runtime): import cfflib as cf + a = cf.connectome() if isdefined(self.inputs.title): @@ -89,7 +94,7 @@ def _run_interface(self, runtime): a.connectome_meta.set_creator(self.inputs.creator) else: # Probably only works on some OSes... - a.connectome_meta.set_creator(os.getenv('USER')) + a.connectome_meta.set_creator(os.getenv("USER")) if isdefined(self.inputs.email): a.connectome_meta.set_email(self.inputs.email) @@ -122,7 +127,7 @@ def _run_interface(self, runtime): for ntwk in self.inputs.graphml_networks: # There must be a better way to deal with the unique name problem # (i.e. tracks and networks can't use the same name, and previously we were pulling them both from the input files) - ntwk_name = 'Network {cnt}'.format(cnt=count) + ntwk_name = "Network {cnt}".format(cnt=count) a.add_connectome_network_from_graphml(ntwk_name, ntwk) count += 1 @@ -148,10 +153,11 @@ def _run_interface(self, runtime): if isdefined(self.inputs.gifti_surfaces): for surf in self.inputs.gifti_surfaces: _, surf_name, _ = split_filename(surf) - csurf = cf.CSurface.create_from_gifti("Surface %d - %s" % - (count, surf_name), surf) - csurf.fileformat = 'Gifti' - csurf.dtype = 'Surfaceset' + csurf = cf.CSurface.create_from_gifti( + "Surface %d - %s" % (count, surf_name), surf + ) + csurf.fileformat = "Gifti" + csurf.dtype = "Surfaceset" a.add_connectome_surface(csurf) count += 1 @@ -160,9 +166,10 @@ def _run_interface(self, runtime): for label in self.inputs.gifti_labels: _, label_name, _ = split_filename(label) csurf = cf.CSurface.create_from_gifti( - "Surface Label %d - %s" % (count, label_name), label) - csurf.fileformat = 'Gifti' - csurf.dtype = 'Labels' + "Surface Label %d - %s" % (count, label_name), label + ) + csurf.fileformat = "Gifti" + csurf.dtype = "Labels" a.add_connectome_surface(csurf) count += 1 @@ -181,19 +188,19 @@ def _run_interface(self, runtime): if isdefined(self.inputs.data_files): for data in self.inputs.data_files: _, data_name, _ = split_filename(data) - cda = cf.CData(name=data_name, src=data, fileformat='NumPy') - if not string.find(data_name, 'lengths') == -1: - cda.dtype = 'FinalFiberLengthArray' - if not string.find(data_name, 'endpoints') == -1: - cda.dtype = 'FiberEndpoints' - if not string.find(data_name, 'labels') == -1: - cda.dtype = 'FinalFiberLabels' + cda = cf.CData(name=data_name, src=data, fileformat="NumPy") + if not string.find(data_name, "lengths") == -1: + cda.dtype = "FinalFiberLengthArray" + if not string.find(data_name, "endpoints") == -1: + cda.dtype = "FiberEndpoints" + if not string.find(data_name, "labels") == -1: + cda.dtype = "FinalFiberLabels" a.add_connectome_data(cda) a.print_summary() _, name, ext = split_filename(self.inputs.out_file) - if not ext == '.cff': - ext = '.cff' + if not ext == ".cff": + ext = ".cff" cf.save_to_cff(a, op.abspath(name + ext)) return runtime @@ -201,9 +208,9 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() _, name, ext = split_filename(self.inputs.out_file) - if not ext == '.cff': - ext = '.cff' - outputs['connectome_file'] = op.abspath(name + ext) + if not ext == ".cff": + ext = ".cff" + outputs["connectome_file"] = op.abspath(name + ext) return outputs @@ -211,16 +218,19 @@ class MergeCNetworksInputSpec(BaseInterfaceInputSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, - desc='List of CFF files to extract networks from') + desc="List of CFF files to extract networks from", + ) out_file = File( - 'merged_network_connectome.cff', + "merged_network_connectome.cff", usedefault=True, - desc='Output CFF file with all the networks added') + desc="Output CFF file with all the networks added", + ) class MergeCNetworksOutputSpec(TraitedSpec): connectome_file = File( - exists=True, desc='Output CFF file with all the networks added') + exists=True, desc="Output CFF file with all the networks added" + ) class MergeCNetworks(CFFBaseInterface): @@ -235,11 +245,13 @@ class MergeCNetworks(CFFBaseInterface): >>> mrg.run() # doctest: +SKIP """ + input_spec = MergeCNetworksInputSpec output_spec = MergeCNetworksOutputSpec def _run_interface(self, runtime): import cfflib as cf + extracted_networks = [] for i, con in enumerate(self.inputs.in_files): @@ -250,21 +262,22 @@ def _run_interface(self, runtime): # metadata information ne.load() contitle = mycon.get_connectome_meta().get_title() - ne.set_name(str(i) + ': ' + contitle + ' - ' + ne.get_name()) + ne.set_name(str(i) + ": " + contitle + " - " + ne.get_name()) ne.set_src(ne.get_name()) extracted_networks.append(ne) # Add networks to new connectome newcon = cf.connectome( - title='All CNetworks', connectome_network=extracted_networks) + title="All CNetworks", connectome_network=extracted_networks + ) # Setting additional metadata metadata = newcon.get_connectome_meta() - metadata.set_creator('My Name') - metadata.set_email('My Email') + metadata.set_creator("My Name") + metadata.set_email("My Email") _, name, ext = split_filename(self.inputs.out_file) - if not ext == '.cff': - ext = '.cff' + if not ext == ".cff": + ext = ".cff" cf.save_to_cff(newcon, op.abspath(name + ext)) return runtime @@ -272,7 +285,7 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() _, name, ext = split_filename(self.inputs.out_file) - if not ext == '.cff': - ext = '.cff' - outputs['connectome_file'] = op.abspath(name + ext) + if not ext == ".cff": + ext = ".cff" + outputs["connectome_file"] = op.abspath(name + ext) return outputs diff --git a/nipype/interfaces/cmtk/nbs.py b/nipype/interfaces/cmtk/nbs.py index 19425d2977..e224daa082 100644 --- a/nipype/interfaces/cmtk/nbs.py +++ b/nipype/interfaces/cmtk/nbs.py @@ -8,10 +8,19 @@ import networkx as nx from ... import logging -from ..base import (LibraryBaseInterface, BaseInterfaceInputSpec, traits, File, - TraitedSpec, InputMultiPath, OutputMultiPath, isdefined) +from ..base import ( + LibraryBaseInterface, + BaseInterfaceInputSpec, + traits, + File, + TraitedSpec, + InputMultiPath, + OutputMultiPath, + isdefined, +) from .base import have_cv -iflogger = logging.getLogger('nipype.interface') + +iflogger = logging.getLogger("nipype.interface") def ntwks_to_matrices(in_files, edge_key): @@ -23,12 +32,13 @@ def ntwks_to_matrices(in_files, edge_key): graph = nx.read_gpickle(name) for u, v, d in graph.edges(data=True): try: - graph[u][v]['weight'] = d[ - edge_key] # Setting the edge requested edge value as weight value + graph[u][v]["weight"] = d[ + edge_key + ] # Setting the edge requested edge value as weight value except: raise KeyError( - "the graph edges do not have {} attribute".format( - edge_key)) + "the graph edges do not have {} attribute".format(edge_key) + ) matrix[:, :, idx] = nx.to_numpy_matrix(graph) # Retrieve the matrix return matrix @@ -37,49 +47,50 @@ class NetworkBasedStatisticInputSpec(BaseInterfaceInputSpec): in_group1 = InputMultiPath( File(exists=True), mandatory=True, - desc='Networks for the first group of subjects') + desc="Networks for the first group of subjects", + ) in_group2 = InputMultiPath( File(exists=True), mandatory=True, - desc='Networks for the second group of subjects') + desc="Networks for the second group of subjects", + ) node_position_network = File( - desc= - 'An optional network used to position the nodes for the output networks' + desc="An optional network used to position the nodes for the output networks" ) number_of_permutations = traits.Int( - 1000, usedefault=True, desc='Number of permutations to perform') - threshold = traits.Float(3, usedefault=True, desc='T-statistic threshold') + 1000, usedefault=True, desc="Number of permutations to perform" + ) + threshold = traits.Float(3, usedefault=True, desc="T-statistic threshold") t_tail = traits.Enum( - 'left', - 'right', - 'both', + "left", + "right", + "both", usedefault=True, - desc='Can be one of "left", "right", or "both"') + desc='Can be one of "left", "right", or "both"', + ) edge_key = traits.Str( - 'number_of_fibers', + "number_of_fibers", usedefault=True, - desc= - 'Usually "number_of_fibers, "fiber_length_mean", "fiber_length_std" for matrices made with CMTK' - 'Sometimes "weight" or "value" for functional networks.') - out_nbs_network = File( - desc='Output network with edges identified by the NBS') + desc='Usually "number_of_fibers, "fiber_length_mean", "fiber_length_std" for matrices made with CMTK' + 'Sometimes "weight" or "value" for functional networks.', + ) + out_nbs_network = File(desc="Output network with edges identified by the NBS") out_nbs_pval_network = File( - desc= - 'Output network with p-values to weight the edges identified by the NBS' + desc="Output network with p-values to weight the edges identified by the NBS" ) class NetworkBasedStatisticOutputSpec(TraitedSpec): nbs_network = File( - exists=True, desc='Output network with edges identified by the NBS') + exists=True, desc="Output network with edges identified by the NBS" + ) nbs_pval_network = File( exists=True, - desc= - 'Output network with p-values to weight the edges identified by the NBS' + desc="Output network with p-values to weight the edges identified by the NBS", ) network_files = OutputMultiPath( - File(exists=True), - desc='Output network with edges identified by the NBS') + File(exists=True), desc="Output network with edges identified by the NBS" + ) class NetworkBasedStatistic(LibraryBaseInterface): @@ -99,9 +110,10 @@ class NetworkBasedStatistic(LibraryBaseInterface): >>> nbs.inputs.in_group2 = ['pat1.pck', 'pat2.pck'] # doctest: +SKIP >>> nbs.run() # doctest: +SKIP """ + input_spec = NetworkBasedStatisticInputSpec output_spec = NetworkBasedStatisticOutputSpec - _pkg = 'cviewer' + _pkg = "cviewer" def _run_interface(self, runtime): from cviewer.libs.pyconto.groupstatistics import nbs @@ -110,8 +122,16 @@ def _run_interface(self, runtime): K = self.inputs.number_of_permutations TAIL = self.inputs.t_tail edge_key = self.inputs.edge_key - details = edge_key + '-thresh-' + str(THRESH) + '-k-' + str( - K) + '-tail-' + TAIL + '.pck' + details = ( + edge_key + + "-thresh-" + + str(THRESH) + + "-k-" + + str(K) + + "-tail-" + + TAIL + + ".pck" + ) # Fill in the data from the networks X = ntwks_to_matrices(self.inputs.in_group1, edge_key) @@ -119,7 +139,7 @@ def _run_interface(self, runtime): PVAL, ADJ, _ = nbs.compute_nbs(X, Y, THRESH, K, TAIL) - iflogger.info('p-values:') + iflogger.info("p-values:") iflogger.info(PVAL) pADJ = ADJ.copy() @@ -141,22 +161,23 @@ def _run_interface(self, runtime): node_ntwk_name = self.inputs.in_group1[0] node_network = nx.read_gpickle(node_ntwk_name) - iflogger.info('Populating node dictionaries with attributes from %s', - node_ntwk_name) + iflogger.info( + "Populating node dictionaries with attributes from %s", node_ntwk_name + ) for nid, ndata in node_network.nodes(data=True): nbsgraph.nodes[nid] = ndata nbs_pval_graph.nodes[nid] = ndata - path = op.abspath('NBS_Result_' + details) + path = op.abspath("NBS_Result_" + details) iflogger.info(path) nx.write_gpickle(nbsgraph, path) - iflogger.info('Saving output NBS edge network as %s', path) + iflogger.info("Saving output NBS edge network as %s", path) - pval_path = op.abspath('NBS_P_vals_' + details) + pval_path = op.abspath("NBS_P_vals_" + details) iflogger.info(pval_path) nx.write_gpickle(nbs_pval_graph, pval_path) - iflogger.info('Saving output p-value network as %s', pval_path) + iflogger.info("Saving output p-value network as %s", pval_path) return runtime def _list_outputs(self): @@ -166,15 +187,23 @@ def _list_outputs(self): K = self.inputs.number_of_permutations TAIL = self.inputs.t_tail edge_key = self.inputs.edge_key - details = edge_key + '-thresh-' + str(THRESH) + '-k-' + str( - K) + '-tail-' + TAIL + '.pck' - path = op.abspath('NBS_Result_' + details) - pval_path = op.abspath('NBS_P_vals_' + details) - - outputs['nbs_network'] = path - outputs['nbs_pval_network'] = pval_path - outputs['network_files'] = [path, pval_path] + details = ( + edge_key + + "-thresh-" + + str(THRESH) + + "-k-" + + str(K) + + "-tail-" + + TAIL + + ".pck" + ) + path = op.abspath("NBS_Result_" + details) + pval_path = op.abspath("NBS_P_vals_" + details) + + outputs["nbs_network"] = path + outputs["nbs_pval_network"] = pval_path + outputs["network_files"] = [path, pval_path] return outputs def _gen_outfilename(self, name, ext): - return name + '.' + ext + return name + "." + ext diff --git a/nipype/interfaces/cmtk/nx.py b/nipype/interfaces/cmtk/nx.py index 7fb47da6ac..c34d372a7e 100644 --- a/nipype/interfaces/cmtk/nx.py +++ b/nipype/interfaces/cmtk/nx.py @@ -9,19 +9,27 @@ from ... import logging from ...utils.filemanip import split_filename -from ..base import (BaseInterface, BaseInterfaceInputSpec, traits, File, - TraitedSpec, InputMultiPath, OutputMultiPath, isdefined) +from ..base import ( + BaseInterface, + BaseInterfaceInputSpec, + traits, + File, + TraitedSpec, + InputMultiPath, + OutputMultiPath, + isdefined, +) from .base import have_cmp -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") def read_unknown_ntwk(ntwk): if not isinstance(ntwk, nx.classes.graph.Graph): _, _, ext = split_filename(ntwk) - if ext == '.pck': + if ext == ".pck": ntwk = nx.read_gpickle(ntwk) - elif ext == '.graphml': + elif ext == ".graphml": ntwk = nx.read_graphml(ntwk) return ntwk @@ -39,34 +47,36 @@ def fix_keys_for_gexf(orig): GEXF Networks can be read in Gephi, however, the keys for the node and edge IDs must be converted to strings """ import networkx as nx + ntwk = nx.Graph() nodes = list(orig.nodes()) edges = list(orig.edges()) for node in nodes: newnodedata = {} newnodedata.update(orig.nodes[node]) - if 'dn_fsname' in orig.nodes[node]: - newnodedata['label'] = orig.nodes[node]['dn_fsname'] + if "dn_fsname" in orig.nodes[node]: + newnodedata["label"] = orig.nodes[node]["dn_fsname"] ntwk.add_node(str(node), **newnodedata) - if 'dn_position' in ntwk.nodes[str( - node)] and 'dn_position' in newnodedata: - ntwk.nodes[str(node)]['dn_position'] = str( - newnodedata['dn_position']) + if "dn_position" in ntwk.nodes[str(node)] and "dn_position" in newnodedata: + ntwk.nodes[str(node)]["dn_position"] = str(newnodedata["dn_position"]) for edge in edges: data = {} data = orig.edge[edge[0]][edge[1]] ntwk.add_edge(str(edge[0]), str(edge[1]), **data) - if 'fiber_length_mean' in ntwk.edge[str(edge[0])][str(edge[1])]: - ntwk.edge[str(edge[0])][str(edge[1])]['fiber_length_mean'] = str( - data['fiber_length_mean']) - if 'fiber_length_std' in ntwk.edge[str(edge[0])][str(edge[1])]: - ntwk.edge[str(edge[0])][str(edge[1])]['fiber_length_std'] = str( - data['fiber_length_std']) - if 'number_of_fibers' in ntwk.edge[str(edge[0])][str(edge[1])]: - ntwk.edge[str(edge[0])][str(edge[1])]['number_of_fibers'] = str( - data['number_of_fibers']) - if 'value' in ntwk.edge[str(edge[0])][str(edge[1])]: - ntwk.edge[str(edge[0])][str(edge[1])]['value'] = str(data['value']) + if "fiber_length_mean" in ntwk.edge[str(edge[0])][str(edge[1])]: + ntwk.edge[str(edge[0])][str(edge[1])]["fiber_length_mean"] = str( + data["fiber_length_mean"] + ) + if "fiber_length_std" in ntwk.edge[str(edge[0])][str(edge[1])]: + ntwk.edge[str(edge[0])][str(edge[1])]["fiber_length_std"] = str( + data["fiber_length_std"] + ) + if "number_of_fibers" in ntwk.edge[str(edge[0])][str(edge[1])]: + ntwk.edge[str(edge[0])][str(edge[1])]["number_of_fibers"] = str( + data["number_of_fibers"] + ) + if "value" in ntwk.edge[str(edge[0])][str(edge[1])]: + ntwk.edge[str(edge[0])][str(edge[1])]["value"] = str(data["value"]) return ntwk @@ -90,30 +100,34 @@ def average_networks(in_files, ntwk_res_file, group_id): import networkx as nx import os.path as op import scipy.io as sio - iflogger.info('Creating average network for group: %s', group_id) + + iflogger.info("Creating average network for group: %s", group_id) matlab_network_list = [] if len(in_files) == 1: avg_ntwk = read_unknown_ntwk(in_files[0]) else: count_to_keep_edge = np.round(len(in_files) / 2.0) - iflogger.info('Number of networks: %i, an edge must occur in at ' - 'least %i to remain in the average network', - len(in_files), count_to_keep_edge) + iflogger.info( + "Number of networks: %i, an edge must occur in at " + "least %i to remain in the average network", + len(in_files), + count_to_keep_edge, + ) ntwk_res_file = read_unknown_ntwk(ntwk_res_file) - iflogger.info('%i nodes found in network resolution file', - ntwk_res_file.number_of_nodes()) + iflogger.info( + "%i nodes found in network resolution file", ntwk_res_file.number_of_nodes() + ) ntwk = remove_all_edges(ntwk_res_file) counting_ntwk = ntwk.copy() # Sums all the relevant variables for index, subject in enumerate(in_files): tmp = nx.read_gpickle(subject) - iflogger.info('File %s has %i edges', subject, - tmp.number_of_edges()) + iflogger.info("File %s has %i edges", subject, tmp.number_of_edges()) edges = list(tmp.edges()) for edge in edges: data = {} data = tmp.edge[edge[0]][edge[1]] - data['count'] = 1 + data["count"] = 1 if ntwk.has_edge(edge[0], edge[1]): current = {} current = ntwk.edge[edge[0]][edge[1]] @@ -123,67 +137,75 @@ def average_networks(in_files, ntwk_res_file, group_id): for node in nodes: data = {} data = ntwk.nodes[node] - if 'value' in tmp.nodes[node]: - data['value'] = data['value'] + tmp.nodes[node]['value'] + if "value" in tmp.nodes[node]: + data["value"] = data["value"] + tmp.nodes[node]["value"] ntwk.add_node(node, **data) # Divides each value by the number of files nodes = list(ntwk.nodes()) edges = list(ntwk.edges()) - iflogger.info('Total network has %i edges', ntwk.number_of_edges()) + iflogger.info("Total network has %i edges", ntwk.number_of_edges()) avg_ntwk = nx.Graph() newdata = {} for node in nodes: data = ntwk.nodes[node] newdata = data - if 'value' in data: - newdata['value'] = data['value'] / len(in_files) - ntwk.nodes[node]['value'] = newdata + if "value" in data: + newdata["value"] = data["value"] / len(in_files) + ntwk.nodes[node]["value"] = newdata avg_ntwk.add_node(node, **newdata) edge_dict = {} - edge_dict['count'] = np.zeros((avg_ntwk.number_of_nodes(), - avg_ntwk.number_of_nodes())) + edge_dict["count"] = np.zeros( + (avg_ntwk.number_of_nodes(), avg_ntwk.number_of_nodes()) + ) for edge in edges: data = ntwk.edge[edge[0]][edge[1]] - if ntwk.edge[edge[0]][edge[1]]['count'] >= count_to_keep_edge: + if ntwk.edge[edge[0]][edge[1]]["count"] >= count_to_keep_edge: for key in list(data.keys()): - if not key == 'count': + if not key == "count": data[key] = data[key] / len(in_files) ntwk.edge[edge[0]][edge[1]] = data avg_ntwk.add_edge(edge[0], edge[1], **data) - edge_dict['count'][edge[0] - 1][edge[1] - 1] = ntwk.edge[edge[0]][ - edge[1]]['count'] + edge_dict["count"][edge[0] - 1][edge[1] - 1] = ntwk.edge[edge[0]][edge[1]][ + "count" + ] - iflogger.info('After thresholding, the average network has %i edges', - avg_ntwk.number_of_edges()) + iflogger.info( + "After thresholding, the average network has %i edges", + avg_ntwk.number_of_edges(), + ) avg_edges = avg_ntwk.edges() for edge in avg_edges: data = avg_ntwk.edge[edge[0]][edge[1]] for key in list(data.keys()): - if not key == 'count': - edge_dict[key] = np.zeros((avg_ntwk.number_of_nodes(), - avg_ntwk.number_of_nodes())) + if not key == "count": + edge_dict[key] = np.zeros( + (avg_ntwk.number_of_nodes(), avg_ntwk.number_of_nodes()) + ) edge_dict[key][edge[0] - 1][edge[1] - 1] = data[key] for key in list(edge_dict.keys()): tmp = {} - network_name = group_id + '_' + key + '_average.mat' + network_name = group_id + "_" + key + "_average.mat" matlab_network_list.append(op.abspath(network_name)) tmp[key] = edge_dict[key] sio.savemat(op.abspath(network_name), tmp) - iflogger.info('Saving average network for key: %s as %s', key, - op.abspath(network_name)) + iflogger.info( + "Saving average network for key: %s as %s", + key, + op.abspath(network_name), + ) # Writes the networks and returns the name - network_name = group_id + '_average.pck' + network_name = group_id + "_average.pck" nx.write_gpickle(avg_ntwk, op.abspath(network_name)) - iflogger.info('Saving average network as %s', op.abspath(network_name)) + iflogger.info("Saving average network as %s", op.abspath(network_name)) avg_ntwk = fix_keys_for_gexf(avg_ntwk) - network_name = group_id + '_average.gexf' + network_name = group_id + "_average.gexf" nx.write_gexf(avg_ntwk, op.abspath(network_name)) - iflogger.info('Saving average network as %s', op.abspath(network_name)) + iflogger.info("Saving average network as %s", op.abspath(network_name)) return network_name, matlab_network_list @@ -191,44 +213,46 @@ def compute_node_measures(ntwk, calculate_cliques=False): """ These return node-based measures """ - iflogger.info('Computing node measures:') + iflogger.info("Computing node measures:") measures = {} - iflogger.info('...Computing degree...') - measures['degree'] = np.array(list(ntwk.degree().values())) - iflogger.info('...Computing load centrality...') - measures['load_centrality'] = np.array( - list(nx.load_centrality(ntwk).values())) - iflogger.info('...Computing betweenness centrality...') - measures['betweenness_centrality'] = np.array( - list(nx.betweenness_centrality(ntwk).values())) - iflogger.info('...Computing degree centrality...') - measures['degree_centrality'] = np.array( - list(nx.degree_centrality(ntwk).values())) - iflogger.info('...Computing closeness centrality...') - measures['closeness_centrality'] = np.array( - list(nx.closeness_centrality(ntwk).values())) + iflogger.info("...Computing degree...") + measures["degree"] = np.array(list(ntwk.degree().values())) + iflogger.info("...Computing load centrality...") + measures["load_centrality"] = np.array(list(nx.load_centrality(ntwk).values())) + iflogger.info("...Computing betweenness centrality...") + measures["betweenness_centrality"] = np.array( + list(nx.betweenness_centrality(ntwk).values()) + ) + iflogger.info("...Computing degree centrality...") + measures["degree_centrality"] = np.array(list(nx.degree_centrality(ntwk).values())) + iflogger.info("...Computing closeness centrality...") + measures["closeness_centrality"] = np.array( + list(nx.closeness_centrality(ntwk).values()) + ) # iflogger.info('...Computing eigenvector centrality...') # measures['eigenvector_centrality'] = np.array(nx.eigenvector_centrality(ntwk, max_iter=100000).values()) - iflogger.info('...Computing triangles...') - measures['triangles'] = np.array(list(nx.triangles(ntwk).values())) - iflogger.info('...Computing clustering...') - measures['clustering'] = np.array(list(nx.clustering(ntwk).values())) - iflogger.info('...Computing k-core number') - measures['core_number'] = np.array(list(nx.core_number(ntwk).values())) - iflogger.info('...Identifying network isolates...') + iflogger.info("...Computing triangles...") + measures["triangles"] = np.array(list(nx.triangles(ntwk).values())) + iflogger.info("...Computing clustering...") + measures["clustering"] = np.array(list(nx.clustering(ntwk).values())) + iflogger.info("...Computing k-core number") + measures["core_number"] = np.array(list(nx.core_number(ntwk).values())) + iflogger.info("...Identifying network isolates...") isolate_list = nx.isolates(ntwk) binarized = np.zeros((ntwk.number_of_nodes(), 1)) for value in isolate_list: value = value - 1 # Zero indexing binarized[value] = 1 - measures['isolates'] = binarized + measures["isolates"] = binarized if calculate_cliques: - iflogger.info('...Calculating node clique number') - measures['node_clique_number'] = np.array( - list(nx.node_clique_number(ntwk).values())) - iflogger.info('...Computing number of cliques for each node...') - measures['number_of_cliques'] = np.array( - list(nx.number_of_cliques(ntwk).values())) + iflogger.info("...Calculating node clique number") + measures["node_clique_number"] = np.array( + list(nx.node_clique_number(ntwk).values()) + ) + iflogger.info("...Computing number of cliques for each node...") + measures["number_of_cliques"] = np.array( + list(nx.number_of_cliques(ntwk).values()) + ) return measures @@ -236,7 +260,7 @@ def compute_edge_measures(ntwk): """ These return edge-based measures """ - iflogger.info('Computing edge measures:') + iflogger.info("Computing edge measures:") measures = {} # iflogger.info('...Computing google matrix...' #Makes really large networks (500k+ edges)) # measures['google_matrix'] = nx.google_matrix(ntwk) @@ -251,60 +275,56 @@ def compute_dict_measures(ntwk): """ Returns a dictionary """ - iflogger.info('Computing measures which return a dictionary:') + iflogger.info("Computing measures which return a dictionary:") measures = {} - iflogger.info('...Computing rich club coefficient...') - measures['rich_club_coef'] = nx.rich_club_coefficient(ntwk) + iflogger.info("...Computing rich club coefficient...") + measures["rich_club_coef"] = nx.rich_club_coefficient(ntwk) return measures -def compute_singlevalued_measures(ntwk, weighted=True, - calculate_cliques=False): +def compute_singlevalued_measures(ntwk, weighted=True, calculate_cliques=False): """ Returns a single value per network """ - iflogger.info('Computing single valued measures:') + iflogger.info("Computing single valued measures:") measures = {} - iflogger.info('...Computing degree assortativity (pearson number) ...') + iflogger.info("...Computing degree assortativity (pearson number) ...") try: - measures['degree_pearsonr'] = nx.degree_pearsonr(ntwk) + measures["degree_pearsonr"] = nx.degree_pearsonr(ntwk) except AttributeError: # For NetworkX 1.6 - measures[ - 'degree_pearsonr'] = nx.degree_pearson_correlation_coefficient( - ntwk) - iflogger.info('...Computing degree assortativity...') + measures["degree_pearsonr"] = nx.degree_pearson_correlation_coefficient(ntwk) + iflogger.info("...Computing degree assortativity...") try: - measures['degree_assortativity'] = nx.degree_assortativity(ntwk) + measures["degree_assortativity"] = nx.degree_assortativity(ntwk) except AttributeError: - measures['degree_assortativity'] = nx.degree_assortativity_coefficient( - ntwk) - iflogger.info('...Computing transitivity...') - measures['transitivity'] = nx.transitivity(ntwk) - iflogger.info('...Computing number of connected_components...') - measures['number_connected_components'] = nx.number_connected_components( - ntwk) - iflogger.info('...Computing graph density...') - measures['graph_density'] = nx.density(ntwk) - iflogger.info('...Recording number of edges...') - measures['number_of_edges'] = nx.number_of_edges(ntwk) - iflogger.info('...Recording number of nodes...') - measures['number_of_nodes'] = nx.number_of_nodes(ntwk) - iflogger.info('...Computing average clustering...') - measures['average_clustering'] = nx.average_clustering(ntwk) + measures["degree_assortativity"] = nx.degree_assortativity_coefficient(ntwk) + iflogger.info("...Computing transitivity...") + measures["transitivity"] = nx.transitivity(ntwk) + iflogger.info("...Computing number of connected_components...") + measures["number_connected_components"] = nx.number_connected_components(ntwk) + iflogger.info("...Computing graph density...") + measures["graph_density"] = nx.density(ntwk) + iflogger.info("...Recording number of edges...") + measures["number_of_edges"] = nx.number_of_edges(ntwk) + iflogger.info("...Recording number of nodes...") + measures["number_of_nodes"] = nx.number_of_nodes(ntwk) + iflogger.info("...Computing average clustering...") + measures["average_clustering"] = nx.average_clustering(ntwk) if nx.is_connected(ntwk): - iflogger.info('...Calculating average shortest path length...') - measures[ - 'average_shortest_path_length'] = nx.average_shortest_path_length( - ntwk, weighted) + iflogger.info("...Calculating average shortest path length...") + measures["average_shortest_path_length"] = nx.average_shortest_path_length( + ntwk, weighted + ) else: - iflogger.info('...Calculating average shortest path length...') - measures[ - 'average_shortest_path_length'] = nx.average_shortest_path_length( - nx.connected_component_subgraphs(ntwk)[0], weighted) + iflogger.info("...Calculating average shortest path length...") + measures["average_shortest_path_length"] = nx.average_shortest_path_length( + nx.connected_component_subgraphs(ntwk)[0], weighted + ) if calculate_cliques: - iflogger.info('...Computing graph clique number...') - measures['graph_clique_number'] = nx.graph_clique_number( - ntwk) # out of memory error + iflogger.info("...Computing graph clique number...") + measures["graph_clique_number"] = nx.graph_clique_number( + ntwk + ) # out of memory error return measures @@ -324,7 +344,7 @@ def add_node_data(node_array, ntwk): newdata = {} for idx, data in ntwk.nodes(data=True): if not int(idx) == 0: - newdata['value'] = node_array[int(idx) - 1] + newdata["value"] = node_array[int(idx) - 1] data.update(newdata) node_ntwk.add_node(int(idx), **data) return node_ntwk @@ -336,8 +356,8 @@ def add_edge_data(edge_array, ntwk, above=0, below=0): for x, row in enumerate(edge_array): for y in range(0, np.max(np.shape(edge_array[x]))): if not edge_array[x, y] == 0: - data['value'] = edge_array[x, y] - if data['value'] <= below or data['value'] >= above: + data["value"] = edge_array[x, y] + if data["value"] <= below or data["value"] >= above: if edge_ntwk.has_edge(x + 1, y + 1): old_edge_dict = edge_ntwk.edge[x + 1][y + 1] edge_ntwk.remove_edge(x + 1, y + 1) @@ -347,79 +367,78 @@ def add_edge_data(edge_array, ntwk, above=0, below=0): class NetworkXMetricsInputSpec(BaseInterfaceInputSpec): - in_file = File(exists=True, mandatory=True, desc='Input network') + in_file = File(exists=True, mandatory=True, desc="Input network") out_k_core = File( - 'k_core', + "k_core", usedefault=True, - desc='Computed k-core network stored as a NetworkX pickle.') + desc="Computed k-core network stored as a NetworkX pickle.", + ) out_k_shell = File( - 'k_shell', + "k_shell", usedefault=True, - desc='Computed k-shell network stored as a NetworkX pickle.') + desc="Computed k-shell network stored as a NetworkX pickle.", + ) out_k_crust = File( - 'k_crust', + "k_crust", usedefault=True, - desc='Computed k-crust network stored as a NetworkX pickle.') + desc="Computed k-crust network stored as a NetworkX pickle.", + ) treat_as_weighted_graph = traits.Bool( True, usedefault=True, - desc= - 'Some network metrics can be calculated while considering only a binarized version of the graph' + desc="Some network metrics can be calculated while considering only a binarized version of the graph", ) compute_clique_related_measures = traits.Bool( False, usedefault=True, - desc= - 'Computing clique-related measures (e.g. node clique number) can be very time consuming' + desc="Computing clique-related measures (e.g. node clique number) can be very time consuming", ) out_global_metrics_matlab = File( - genfile=True, desc='Output node metrics in MATLAB .mat format') + genfile=True, desc="Output node metrics in MATLAB .mat format" + ) out_node_metrics_matlab = File( - genfile=True, desc='Output node metrics in MATLAB .mat format') + genfile=True, desc="Output node metrics in MATLAB .mat format" + ) out_edge_metrics_matlab = File( - genfile=True, desc='Output edge metrics in MATLAB .mat format') + genfile=True, desc="Output edge metrics in MATLAB .mat format" + ) out_pickled_extra_measures = File( - 'extra_measures', + "extra_measures", usedefault=True, - desc= - 'Network measures for group 1 that return dictionaries stored as a Pickle.' + desc="Network measures for group 1 that return dictionaries stored as a Pickle.", ) class NetworkXMetricsOutputSpec(TraitedSpec): - gpickled_network_files = OutputMultiPath( - File(desc='Output gpickled network files')) + gpickled_network_files = OutputMultiPath(File(desc="Output gpickled network files")) matlab_matrix_files = OutputMultiPath( - File(desc='Output network metrics in MATLAB .mat format')) - global_measures_matlab = File( - desc='Output global metrics in MATLAB .mat format') - node_measures_matlab = File( - desc='Output node metrics in MATLAB .mat format') - edge_measures_matlab = File( - desc='Output edge metrics in MATLAB .mat format') + File(desc="Output network metrics in MATLAB .mat format") + ) + global_measures_matlab = File(desc="Output global metrics in MATLAB .mat format") + node_measures_matlab = File(desc="Output node metrics in MATLAB .mat format") + edge_measures_matlab = File(desc="Output edge metrics in MATLAB .mat format") node_measure_networks = OutputMultiPath( - File(desc='Output gpickled network files for all node-based measures')) + File(desc="Output gpickled network files for all node-based measures") + ) edge_measure_networks = OutputMultiPath( - File(desc='Output gpickled network files for all edge-based measures')) + File(desc="Output gpickled network files for all edge-based measures") + ) k_networks = OutputMultiPath( File( - desc= - 'Output gpickled network files for the k-core, k-shell, and k-crust networks' - )) - k_core = File(desc='Computed k-core network stored as a NetworkX pickle.') - k_shell = File( - desc='Computed k-shell network stored as a NetworkX pickle.') - k_crust = File( - desc='Computed k-crust network stored as a NetworkX pickle.') + desc="Output gpickled network files for the k-core, k-shell, and k-crust networks" + ) + ) + k_core = File(desc="Computed k-core network stored as a NetworkX pickle.") + k_shell = File(desc="Computed k-shell network stored as a NetworkX pickle.") + k_crust = File(desc="Computed k-crust network stored as a NetworkX pickle.") pickled_extra_measures = File( - desc= - 'Network measures for the group that return dictionaries, stored as a Pickle.' + desc="Network measures for the group that return dictionaries, stored as a Pickle." ) matlab_dict_measures = OutputMultiPath( File( - desc= - 'Network measures for the group that return dictionaries, stored as matlab matrices.' - )) + desc="Network measures for the group that return dictionaries, stored as matlab matrices." + ) + ) class NetworkXMetrics(BaseInterface): @@ -434,11 +453,13 @@ class NetworkXMetrics(BaseInterface): >>> nxmetrics.inputs.in_file = 'subj1.pck' >>> nxmetrics.run() # doctest: +SKIP """ + input_spec = NetworkXMetricsInputSpec output_spec = NetworkXMetricsOutputSpec def _run_interface(self, runtime): import scipy.io as sio + global gpickled, nodentwks, edgentwks, kntwks, matlab gpickled = list() nodentwks = list() @@ -455,70 +476,73 @@ def _run_interface(self, runtime): weighted = self.inputs.treat_as_weighted_graph global_measures = compute_singlevalued_measures( - ntwk, weighted, calculate_cliques) + ntwk, weighted, calculate_cliques + ) if isdefined(self.inputs.out_global_metrics_matlab): global_out_file = op.abspath(self.inputs.out_global_metrics_matlab) else: - global_out_file = op.abspath( - self._gen_outfilename('globalmetrics', 'mat')) - sio.savemat(global_out_file, global_measures, oned_as='column') + global_out_file = op.abspath(self._gen_outfilename("globalmetrics", "mat")) + sio.savemat(global_out_file, global_measures, oned_as="column") matlab.append(global_out_file) node_measures = compute_node_measures(ntwk, calculate_cliques) for key in list(node_measures.keys()): newntwk = add_node_data(node_measures[key], ntwk) - out_file = op.abspath(self._gen_outfilename(key, 'pck')) + out_file = op.abspath(self._gen_outfilename(key, "pck")) nx.write_gpickle(newntwk, out_file) nodentwks.append(out_file) if isdefined(self.inputs.out_node_metrics_matlab): node_out_file = op.abspath(self.inputs.out_node_metrics_matlab) else: - node_out_file = op.abspath( - self._gen_outfilename('nodemetrics', 'mat')) - sio.savemat(node_out_file, node_measures, oned_as='column') + node_out_file = op.abspath(self._gen_outfilename("nodemetrics", "mat")) + sio.savemat(node_out_file, node_measures, oned_as="column") matlab.append(node_out_file) gpickled.extend(nodentwks) edge_measures = compute_edge_measures(ntwk) for key in list(edge_measures.keys()): newntwk = add_edge_data(edge_measures[key], ntwk) - out_file = op.abspath(self._gen_outfilename(key, 'pck')) + out_file = op.abspath(self._gen_outfilename(key, "pck")) nx.write_gpickle(newntwk, out_file) edgentwks.append(out_file) if isdefined(self.inputs.out_edge_metrics_matlab): edge_out_file = op.abspath(self.inputs.out_edge_metrics_matlab) else: - edge_out_file = op.abspath( - self._gen_outfilename('edgemetrics', 'mat')) - sio.savemat(edge_out_file, edge_measures, oned_as='column') + edge_out_file = op.abspath(self._gen_outfilename("edgemetrics", "mat")) + sio.savemat(edge_out_file, edge_measures, oned_as="column") matlab.append(edge_out_file) gpickled.extend(edgentwks) ntwk_measures = compute_network_measures(ntwk) for key in list(ntwk_measures.keys()): - if key == 'k_core': + if key == "k_core": out_file = op.abspath( - self._gen_outfilename(self.inputs.out_k_core, 'pck')) - if key == 'k_shell': + self._gen_outfilename(self.inputs.out_k_core, "pck") + ) + if key == "k_shell": out_file = op.abspath( - self._gen_outfilename(self.inputs.out_k_shell, 'pck')) - if key == 'k_crust': + self._gen_outfilename(self.inputs.out_k_shell, "pck") + ) + if key == "k_crust": out_file = op.abspath( - self._gen_outfilename(self.inputs.out_k_crust, 'pck')) + self._gen_outfilename(self.inputs.out_k_crust, "pck") + ) nx.write_gpickle(ntwk_measures[key], out_file) kntwks.append(out_file) gpickled.extend(kntwks) out_pickled_extra_measures = op.abspath( - self._gen_outfilename(self.inputs.out_pickled_extra_measures, - 'pck')) + self._gen_outfilename(self.inputs.out_pickled_extra_measures, "pck") + ) dict_measures = compute_dict_measures(ntwk) - iflogger.info('Saving extra measure file to %s in Pickle format', - op.abspath(out_pickled_extra_measures)) - with open(out_pickled_extra_measures, 'w') as fo: + iflogger.info( + "Saving extra measure file to %s in Pickle format", + op.abspath(out_pickled_extra_measures), + ) + with open(out_pickled_extra_measures, "w") as fo: pickle.dump(dict_measures, fo) - iflogger.info('Saving MATLAB measures as %s', matlab) + iflogger.info("Saving MATLAB measures as %s", matlab) # Loops through the measures which return a dictionary, # converts the keys and values to a Numpy array, @@ -535,67 +559,72 @@ def _run_interface(self, runtime): values = np.array(dict_measures[key][keyd]) nparrayvalues = np.append(nparrayvalues, values) nparray = np.vstack((nparraykeys, nparrayvalues)) - out_file = op.abspath(self._gen_outfilename(key, 'mat')) + out_file = op.abspath(self._gen_outfilename(key, "mat")) npdict = {} npdict[key] = nparray - sio.savemat(out_file, npdict, oned_as='column') + sio.savemat(out_file, npdict, oned_as="column") dicts.append(out_file) return runtime def _list_outputs(self): outputs = self.output_spec().get() outputs["k_core"] = op.abspath( - self._gen_outfilename(self.inputs.out_k_core, 'pck')) + self._gen_outfilename(self.inputs.out_k_core, "pck") + ) outputs["k_shell"] = op.abspath( - self._gen_outfilename(self.inputs.out_k_shell, 'pck')) + self._gen_outfilename(self.inputs.out_k_shell, "pck") + ) outputs["k_crust"] = op.abspath( - self._gen_outfilename(self.inputs.out_k_crust, 'pck')) + self._gen_outfilename(self.inputs.out_k_crust, "pck") + ) outputs["gpickled_network_files"] = gpickled outputs["k_networks"] = kntwks outputs["node_measure_networks"] = nodentwks outputs["edge_measure_networks"] = edgentwks outputs["matlab_dict_measures"] = dicts outputs["global_measures_matlab"] = op.abspath( - self._gen_outfilename('globalmetrics', 'mat')) + self._gen_outfilename("globalmetrics", "mat") + ) outputs["node_measures_matlab"] = op.abspath( - self._gen_outfilename('nodemetrics', 'mat')) + self._gen_outfilename("nodemetrics", "mat") + ) outputs["edge_measures_matlab"] = op.abspath( - self._gen_outfilename('edgemetrics', 'mat')) + self._gen_outfilename("edgemetrics", "mat") + ) outputs["matlab_matrix_files"] = [ - outputs["global_measures_matlab"], outputs["node_measures_matlab"], - outputs["edge_measures_matlab"] + outputs["global_measures_matlab"], + outputs["node_measures_matlab"], + outputs["edge_measures_matlab"], ] outputs["pickled_extra_measures"] = op.abspath( - self._gen_outfilename(self.inputs.out_pickled_extra_measures, - 'pck')) + self._gen_outfilename(self.inputs.out_pickled_extra_measures, "pck") + ) return outputs def _gen_outfilename(self, name, ext): - return name + '.' + ext + return name + "." + ext class AverageNetworksInputSpec(BaseInterfaceInputSpec): in_files = InputMultiPath( - File(exists=True), - mandatory=True, - desc='Networks for a group of subjects') + File(exists=True), mandatory=True, desc="Networks for a group of subjects" + ) resolution_network_file = File( exists=True, - desc= - 'Parcellation files from Connectome Mapping Toolkit. This is not necessary' - ', but if included, the interface will output the statistical maps as networkx graphs.' + desc="Parcellation files from Connectome Mapping Toolkit. This is not necessary" + ", but if included, the interface will output the statistical maps as networkx graphs.", ) - group_id = traits.Str('group1', usedefault=True, desc='ID for group') - out_gpickled_groupavg = File( - desc='Average network saved as a NetworkX .pck') - out_gexf_groupavg = File(desc='Average network saved as a .gexf file') + group_id = traits.Str("group1", usedefault=True, desc="ID for group") + out_gpickled_groupavg = File(desc="Average network saved as a NetworkX .pck") + out_gexf_groupavg = File(desc="Average network saved as a .gexf file") class AverageNetworksOutputSpec(TraitedSpec): - gpickled_groupavg = File(desc='Average network saved as a NetworkX .pck') - gexf_groupavg = File(desc='Average network saved as a .gexf file') + gpickled_groupavg = File(desc="Average network saved as a NetworkX .pck") + gexf_groupavg = File(desc="Average network saved as a .gexf file") matlab_groupavgs = OutputMultiPath( - File(desc='Average network saved as a .gexf file')) + File(desc="Average network saved as a .gexf file") + ) class AverageNetworks(BaseInterface): @@ -614,6 +643,7 @@ class AverageNetworks(BaseInterface): >>> avg.run() # doctest: +SKIP """ + input_spec = AverageNetworksInputSpec output_spec = AverageNetworksOutputSpec @@ -625,29 +655,28 @@ def _run_interface(self, runtime): global matlab_network_list network_name, matlab_network_list = average_networks( - self.inputs.in_files, ntwk_res_file, self.inputs.group_id) + self.inputs.in_files, ntwk_res_file, self.inputs.group_id + ) return runtime def _list_outputs(self): outputs = self.output_spec().get() if not isdefined(self.inputs.out_gpickled_groupavg): outputs["gpickled_groupavg"] = op.abspath( - self._gen_outfilename(self.inputs.group_id + '_average', - 'pck')) + self._gen_outfilename(self.inputs.group_id + "_average", "pck") + ) else: - outputs["gpickled_groupavg"] = op.abspath( - self.inputs.out_gpickled_groupavg) + outputs["gpickled_groupavg"] = op.abspath(self.inputs.out_gpickled_groupavg) if not isdefined(self.inputs.out_gexf_groupavg): outputs["gexf_groupavg"] = op.abspath( - self._gen_outfilename(self.inputs.group_id + '_average', - 'gexf')) + self._gen_outfilename(self.inputs.group_id + "_average", "gexf") + ) else: - outputs["gexf_groupavg"] = op.abspath( - self.inputs.out_gexf_groupavg) + outputs["gexf_groupavg"] = op.abspath(self.inputs.out_gexf_groupavg) outputs["matlab_groupavgs"] = matlab_network_list return outputs def _gen_outfilename(self, name, ext): - return name + '.' + ext + return name + "." + ext diff --git a/nipype/interfaces/cmtk/parcellation.py b/nipype/interfaces/cmtk/parcellation.py index a80c4e895f..60ef0445b3 100644 --- a/nipype/interfaces/cmtk/parcellation.py +++ b/nipype/interfaces/cmtk/parcellation.py @@ -10,151 +10,321 @@ import networkx as nx from ... import logging -from ..base import (BaseInterface, LibraryBaseInterface, - BaseInterfaceInputSpec, traits, File, - TraitedSpec, Directory, isdefined) +from ..base import ( + BaseInterface, + LibraryBaseInterface, + BaseInterfaceInputSpec, + traits, + File, + TraitedSpec, + Directory, + isdefined, +) from .base import have_cmp -iflogger = logging.getLogger('nipype.interface') + +iflogger = logging.getLogger("nipype.interface") def create_annot_label(subject_id, subjects_dir, fs_dir, parcellation_name): import cmp from cmp.util import runCmd + iflogger.info("Create the cortical labels necessary for our ROIs") iflogger.info("=================================================") - fs_label_dir = op.join(op.join(subjects_dir, subject_id), 'label') + fs_label_dir = op.join(op.join(subjects_dir, subject_id), "label") output_dir = op.abspath(op.curdir) paths = [] cmp_config = cmp.configuration.PipelineConfiguration() cmp_config.parcellation_scheme = "Lausanne2008" - for hemi in ['lh', 'rh']: - spath = cmp_config._get_lausanne_parcellation('Lausanne2008')[ - parcellation_name]['fs_label_subdir_name'] % hemi + for hemi in ["lh", "rh"]: + spath = ( + cmp_config._get_lausanne_parcellation("Lausanne2008")[parcellation_name][ + "fs_label_subdir_name" + ] + % hemi + ) paths.append(spath) for p in paths: try: - os.makedirs(op.join('.', p)) + os.makedirs(op.join(".", p)) except: pass - if '33' in parcellation_name: + if "33" in parcellation_name: comp = [ - ('rh', 'myatlas_36_rh.gcs', 'rh.myaparc_36.annot', - 'regenerated_rh_36', 'myaparc_36'), - ('rh', 'myatlas_60_rh.gcs', 'rh.myaparc_60.annot', - 'regenerated_rh_60', 'myaparc_60'), - ('lh', 'myatlas_36_lh.gcs', 'lh.myaparc_36.annot', - 'regenerated_lh_36', 'myaparc_36'), - ('lh', 'myatlas_60_lh.gcs', 'lh.myaparc_60.annot', - 'regenerated_lh_60', 'myaparc_60'), + ( + "rh", + "myatlas_36_rh.gcs", + "rh.myaparc_36.annot", + "regenerated_rh_36", + "myaparc_36", + ), + ( + "rh", + "myatlas_60_rh.gcs", + "rh.myaparc_60.annot", + "regenerated_rh_60", + "myaparc_60", + ), + ( + "lh", + "myatlas_36_lh.gcs", + "lh.myaparc_36.annot", + "regenerated_lh_36", + "myaparc_36", + ), + ( + "lh", + "myatlas_60_lh.gcs", + "lh.myaparc_60.annot", + "regenerated_lh_60", + "myaparc_60", + ), ] - elif '60' in parcellation_name: + elif "60" in parcellation_name: comp = [ - ('rh', 'myatlas_60_rh.gcs', 'rh.myaparc_60.annot', - 'regenerated_rh_60', 'myaparc_60'), - ('lh', 'myatlas_60_lh.gcs', 'lh.myaparc_60.annot', - 'regenerated_lh_60', 'myaparc_60'), + ( + "rh", + "myatlas_60_rh.gcs", + "rh.myaparc_60.annot", + "regenerated_rh_60", + "myaparc_60", + ), + ( + "lh", + "myatlas_60_lh.gcs", + "lh.myaparc_60.annot", + "regenerated_lh_60", + "myaparc_60", + ), ] - elif '125' in parcellation_name: + elif "125" in parcellation_name: comp = [ - ('rh', 'myatlas_125_rh.gcs', 'rh.myaparc_125.annot', - 'regenerated_rh_125', 'myaparc_125'), - ('rh', 'myatlas_60_rh.gcs', 'rh.myaparc_60.annot', - 'regenerated_rh_60', 'myaparc_60'), - ('lh', 'myatlas_125_lh.gcs', 'lh.myaparc_125.annot', - 'regenerated_lh_125', 'myaparc_125'), - ('lh', 'myatlas_60_lh.gcs', 'lh.myaparc_60.annot', - 'regenerated_lh_60', 'myaparc_60'), + ( + "rh", + "myatlas_125_rh.gcs", + "rh.myaparc_125.annot", + "regenerated_rh_125", + "myaparc_125", + ), + ( + "rh", + "myatlas_60_rh.gcs", + "rh.myaparc_60.annot", + "regenerated_rh_60", + "myaparc_60", + ), + ( + "lh", + "myatlas_125_lh.gcs", + "lh.myaparc_125.annot", + "regenerated_lh_125", + "myaparc_125", + ), + ( + "lh", + "myatlas_60_lh.gcs", + "lh.myaparc_60.annot", + "regenerated_lh_60", + "myaparc_60", + ), ] - elif '250' in parcellation_name: + elif "250" in parcellation_name: comp = [ - ('rh', 'myatlas_250_rh.gcs', 'rh.myaparc_250.annot', - 'regenerated_rh_250', 'myaparc_250'), - ('rh', 'myatlas_60_rh.gcs', 'rh.myaparc_60.annot', - 'regenerated_rh_60', 'myaparc_60'), - ('lh', 'myatlas_250_lh.gcs', 'lh.myaparc_250.annot', - 'regenerated_lh_250', 'myaparc_250'), - ('lh', 'myatlas_60_lh.gcs', 'lh.myaparc_60.annot', - 'regenerated_lh_60', 'myaparc_60'), + ( + "rh", + "myatlas_250_rh.gcs", + "rh.myaparc_250.annot", + "regenerated_rh_250", + "myaparc_250", + ), + ( + "rh", + "myatlas_60_rh.gcs", + "rh.myaparc_60.annot", + "regenerated_rh_60", + "myaparc_60", + ), + ( + "lh", + "myatlas_250_lh.gcs", + "lh.myaparc_250.annot", + "regenerated_lh_250", + "myaparc_250", + ), + ( + "lh", + "myatlas_60_lh.gcs", + "lh.myaparc_60.annot", + "regenerated_lh_60", + "myaparc_60", + ), ] else: comp = [ - ('rh', 'myatlas_36_rh.gcs', 'rh.myaparc_36.annot', - 'regenerated_rh_36', 'myaparc_36'), - ('rh', 'myatlasP1_16_rh.gcs', 'rh.myaparcP1_16.annot', - 'regenerated_rh_500', 'myaparcP1_16'), - ('rh', 'myatlasP17_28_rh.gcs', 'rh.myaparcP17_28.annot', - 'regenerated_rh_500', 'myaparcP17_28'), - ('rh', 'myatlasP29_36_rh.gcs', 'rh.myaparcP29_36.annot', - 'regenerated_rh_500', 'myaparcP29_36'), - ('rh', 'myatlas_60_rh.gcs', 'rh.myaparc_60.annot', - 'regenerated_rh_60', 'myaparc_60'), - ('rh', 'myatlas_125_rh.gcs', 'rh.myaparc_125.annot', - 'regenerated_rh_125', 'myaparc_125'), - ('rh', 'myatlas_250_rh.gcs', 'rh.myaparc_250.annot', - 'regenerated_rh_250', 'myaparc_250'), - ('lh', 'myatlas_36_lh.gcs', 'lh.myaparc_36.annot', - 'regenerated_lh_36', 'myaparc_36'), - ('lh', 'myatlasP1_16_lh.gcs', 'lh.myaparcP1_16.annot', - 'regenerated_lh_500', 'myaparcP1_16'), - ('lh', 'myatlasP17_28_lh.gcs', 'lh.myaparcP17_28.annot', - 'regenerated_lh_500', 'myaparcP17_28'), - ('lh', 'myatlasP29_36_lh.gcs', 'lh.myaparcP29_36.annot', - 'regenerated_lh_500', 'myaparcP29_36'), - ('lh', 'myatlas_60_lh.gcs', 'lh.myaparc_60.annot', - 'regenerated_lh_60', 'myaparc_60'), - ('lh', 'myatlas_125_lh.gcs', 'lh.myaparc_125.annot', - 'regenerated_lh_125', 'myaparc_125'), - ('lh', 'myatlas_250_lh.gcs', 'lh.myaparc_250.annot', - 'regenerated_lh_250', 'myaparc_250'), + ( + "rh", + "myatlas_36_rh.gcs", + "rh.myaparc_36.annot", + "regenerated_rh_36", + "myaparc_36", + ), + ( + "rh", + "myatlasP1_16_rh.gcs", + "rh.myaparcP1_16.annot", + "regenerated_rh_500", + "myaparcP1_16", + ), + ( + "rh", + "myatlasP17_28_rh.gcs", + "rh.myaparcP17_28.annot", + "regenerated_rh_500", + "myaparcP17_28", + ), + ( + "rh", + "myatlasP29_36_rh.gcs", + "rh.myaparcP29_36.annot", + "regenerated_rh_500", + "myaparcP29_36", + ), + ( + "rh", + "myatlas_60_rh.gcs", + "rh.myaparc_60.annot", + "regenerated_rh_60", + "myaparc_60", + ), + ( + "rh", + "myatlas_125_rh.gcs", + "rh.myaparc_125.annot", + "regenerated_rh_125", + "myaparc_125", + ), + ( + "rh", + "myatlas_250_rh.gcs", + "rh.myaparc_250.annot", + "regenerated_rh_250", + "myaparc_250", + ), + ( + "lh", + "myatlas_36_lh.gcs", + "lh.myaparc_36.annot", + "regenerated_lh_36", + "myaparc_36", + ), + ( + "lh", + "myatlasP1_16_lh.gcs", + "lh.myaparcP1_16.annot", + "regenerated_lh_500", + "myaparcP1_16", + ), + ( + "lh", + "myatlasP17_28_lh.gcs", + "lh.myaparcP17_28.annot", + "regenerated_lh_500", + "myaparcP17_28", + ), + ( + "lh", + "myatlasP29_36_lh.gcs", + "lh.myaparcP29_36.annot", + "regenerated_lh_500", + "myaparcP29_36", + ), + ( + "lh", + "myatlas_60_lh.gcs", + "lh.myaparc_60.annot", + "regenerated_lh_60", + "myaparc_60", + ), + ( + "lh", + "myatlas_125_lh.gcs", + "lh.myaparc_125.annot", + "regenerated_lh_125", + "myaparc_125", + ), + ( + "lh", + "myatlas_250_lh.gcs", + "lh.myaparc_250.annot", + "regenerated_lh_250", + "myaparc_250", + ), ] log = cmp_config.get_logger() for out in comp: mris_cmd = 'mris_ca_label %s %s "%s/surf/%s.sphere.reg" "%s" "%s" ' % ( - subject_id, out[0], op.join(subjects_dir, subject_id), out[0], + subject_id, + out[0], + op.join(subjects_dir, subject_id), + out[0], cmp_config.get_lausanne_atlas(out[1]), - op.join(fs_label_dir, out[2])) + op.join(fs_label_dir, out[2]), + ) runCmd(mris_cmd, log) - iflogger.info('-----------') + iflogger.info("-----------") annot = '--annotation "%s"' % out[4] mri_an_cmd = 'mri_annotation2label --subject %s --hemi %s --outdir "%s" %s' % ( - subject_id, out[0], op.join(output_dir, out[3]), annot) + subject_id, + out[0], + op.join(output_dir, out[3]), + annot, + ) iflogger.info(mri_an_cmd) runCmd(mri_an_cmd, log) - iflogger.info('-----------') - iflogger.info(os.environ['SUBJECTS_DIR']) + iflogger.info("-----------") + iflogger.info(os.environ["SUBJECTS_DIR"]) # extract cc and unknown to add to tractography mask, we do not want this as a region of interest # in FS 5.0, unknown and corpuscallosum are not available for the 35 scale (why?), # but for the other scales only, take the ones from _60 - rhun = op.join(output_dir, 'rh.unknown.label') - lhun = op.join(output_dir, 'lh.unknown.label') - rhco = op.join(output_dir, 'rh.corpuscallosum.label') - lhco = op.join(output_dir, 'lh.corpuscallosum.label') - shutil.copy( - op.join(output_dir, 'regenerated_rh_60', 'rh.unknown.label'), rhun) - shutil.copy( - op.join(output_dir, 'regenerated_lh_60', 'lh.unknown.label'), lhun) + rhun = op.join(output_dir, "rh.unknown.label") + lhun = op.join(output_dir, "lh.unknown.label") + rhco = op.join(output_dir, "rh.corpuscallosum.label") + lhco = op.join(output_dir, "lh.corpuscallosum.label") + shutil.copy(op.join(output_dir, "regenerated_rh_60", "rh.unknown.label"), rhun) + shutil.copy(op.join(output_dir, "regenerated_lh_60", "lh.unknown.label"), lhun) shutil.copy( - op.join(output_dir, 'regenerated_rh_60', 'rh.corpuscallosum.label'), - rhco) + op.join(output_dir, "regenerated_rh_60", "rh.corpuscallosum.label"), rhco + ) shutil.copy( - op.join(output_dir, 'regenerated_lh_60', 'lh.corpuscallosum.label'), - lhco) + op.join(output_dir, "regenerated_lh_60", "lh.corpuscallosum.label"), lhco + ) - mri_cmd = """mri_label2vol --label "%s" --label "%s" --label "%s" --label "%s" --temp "%s" --o "%s" --identity """ % ( - rhun, lhun, rhco, lhco, - op.join(op.join(subjects_dir, subject_id), 'mri', 'orig.mgz'), - op.join(fs_label_dir, 'cc_unknown.nii.gz')) + mri_cmd = ( + """mri_label2vol --label "%s" --label "%s" --label "%s" --label "%s" --temp "%s" --o "%s" --identity """ + % ( + rhun, + lhun, + rhco, + lhco, + op.join(op.join(subjects_dir, subject_id), "mri", "orig.mgz"), + op.join(fs_label_dir, "cc_unknown.nii.gz"), + ) + ) runCmd(mri_cmd, log) - runCmd('mris_volmask %s' % subject_id, log) + runCmd("mris_volmask %s" % subject_id, log) mri_cmd = 'mri_convert -i "%s/mri/ribbon.mgz" -o "%s/mri/ribbon.nii.gz"' % ( - op.join(subjects_dir, subject_id), op.join(subjects_dir, subject_id)) + op.join(subjects_dir, subject_id), + op.join(subjects_dir, subject_id), + ) runCmd(mri_cmd, log) mri_cmd = 'mri_convert -i "%s/mri/aseg.mgz" -o "%s/mri/aseg.nii.gz"' % ( - op.join(subjects_dir, subject_id), op.join(subjects_dir, subject_id)) + op.join(subjects_dir, subject_id), + op.join(subjects_dir, subject_id), + ) runCmd(mri_cmd, log) iflogger.info("[ DONE ]") @@ -165,16 +335,16 @@ def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation): from networks. Iteratively create volume. """ import cmp from cmp.util import runCmd + iflogger.info("Create the ROIs:") output_dir = op.abspath(op.curdir) fs_dir = op.join(subjects_dir, subject_id) cmp_config = cmp.configuration.PipelineConfiguration() cmp_config.parcellation_scheme = "Lausanne2008" log = cmp_config.get_logger() - parval = cmp_config._get_lausanne_parcellation('Lausanne2008')[ - parcellation_name] - pgpath = parval['node_information_graphml'] - aseg = nb.load(op.join(fs_dir, 'mri', 'aseg.nii.gz')) + parval = cmp_config._get_lausanne_parcellation("Lausanne2008")[parcellation_name] + pgpath = parval["node_information_graphml"] + aseg = nb.load(op.join(fs_dir, "mri", "aseg.nii.gz")) asegd = aseg.get_data() # identify cortical voxels, right (3) and left (42) hemispheres @@ -189,7 +359,7 @@ def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation): shape = (25, 25, 25) center = np.array(shape) // 2 # dist: distances from the center of the neighbourhood - dist = np.zeros(shape, dtype='float32') + dist = np.zeros(shape, dtype="float32") for x in range(shape[0]): for y in range(shape[1]): for z in range(shape[2]): @@ -198,8 +368,8 @@ def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation): iflogger.info("Working on parcellation: ") iflogger.info( - cmp_config._get_lausanne_parcellation('Lausanne2008')[ - parcellation_name]) + cmp_config._get_lausanne_parcellation("Lausanne2008")[parcellation_name] + ) iflogger.info("========================") pg = nx.read_graphml(pgpath) # each node represents a brain region @@ -211,52 +381,53 @@ def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation): count = count + 1 iflogger.info(brv) iflogger.info(brk) - if brv['dn_hemisphere'] == 'left': - hemi = 'lh' - elif brv['dn_hemisphere'] == 'right': - hemi = 'rh' - if brv['dn_region'] == 'subcortical': + if brv["dn_hemisphere"] == "left": + hemi = "lh" + elif brv["dn_hemisphere"] == "right": + hemi = "rh" + if brv["dn_region"] == "subcortical": iflogger.info(brv) - iflogger.info('---------------------') - iflogger.info('Work on brain region: %s', brv['dn_region']) - iflogger.info('Freesurfer Name: %s', brv['dn_fsname']) - iflogger.info('Region %s of %s', count, pg.number_of_nodes()) - iflogger.info('---------------------') + iflogger.info("---------------------") + iflogger.info("Work on brain region: %s", brv["dn_region"]) + iflogger.info("Freesurfer Name: %s", brv["dn_fsname"]) + iflogger.info("Region %s of %s", count, pg.number_of_nodes()) + iflogger.info("---------------------") # if it is subcortical, retrieve roi from aseg - idx = np.where(asegd == int(brv['dn_fs_aseg_val'])) - rois[idx] = int(brv['dn_correspondence_id']) + idx = np.where(asegd == int(brv["dn_fs_aseg_val"])) + rois[idx] = int(brv["dn_correspondence_id"]) - elif brv['dn_region'] == 'cortical': + elif brv["dn_region"] == "cortical": iflogger.info(brv) - iflogger.info('---------------------') - iflogger.info('Work on brain region: %s', brv['dn_region']) - iflogger.info('Freesurfer Name: %s', brv['dn_fsname']) - iflogger.info('Region %s of %s', count, pg.number_of_nodes()) - iflogger.info('---------------------') - - labelpath = op.join(output_dir, - parval['fs_label_subdir_name'] % hemi) + iflogger.info("---------------------") + iflogger.info("Work on brain region: %s", brv["dn_region"]) + iflogger.info("Freesurfer Name: %s", brv["dn_fsname"]) + iflogger.info("Region %s of %s", count, pg.number_of_nodes()) + iflogger.info("---------------------") + + labelpath = op.join(output_dir, parval["fs_label_subdir_name"] % hemi) # construct .label file name - fname = '%s.%s.label' % (hemi, brv['dn_fsname']) + fname = "%s.%s.label" % (hemi, brv["dn_fsname"]) # execute fs mri_label2vol to generate volume roi from the label file # store it in temporary file to be overwritten for each region mri_cmd = 'mri_label2vol --label "%s" --temp "%s" --o "%s" --identity' % ( - op.join(labelpath, fname), op.join(fs_dir, 'mri', 'orig.mgz'), - op.join(output_dir, 'tmp.nii.gz')) + op.join(labelpath, fname), + op.join(fs_dir, "mri", "orig.mgz"), + op.join(output_dir, "tmp.nii.gz"), + ) runCmd(mri_cmd, log) - tmp = nb.load(op.join(output_dir, 'tmp.nii.gz')) + tmp = nb.load(op.join(output_dir, "tmp.nii.gz")) tmpd = tmp.get_data() # find voxel and set them to intensityvalue in rois idx = np.where(tmpd == 1) - rois[idx] = int(brv['dn_correspondence_id']) + rois[idx] = int(brv["dn_correspondence_id"]) # store volume eg in ROI_scale33.nii.gz - out_roi = op.abspath('ROI_%s.nii.gz' % parcellation_name) + out_roi = op.abspath("ROI_%s.nii.gz" % parcellation_name) # update the header hdr = aseg.header @@ -274,22 +445,20 @@ def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation): # loop throughout all the voxels belonging to the aseg GM volume for j in range(xx.size): if rois[xx[j], yy[j], zz[j]] == 0: - local = extract( - rois, shape, position=(xx[j], yy[j], zz[j]), fill=0) + local = extract(rois, shape, position=(xx[j], yy[j], zz[j]), fill=0) mask = local.copy() mask[np.nonzero(local > 0)] = 1 thisdist = np.multiply(dist, mask) thisdist[np.nonzero(thisdist == 0)] = np.amax(thisdist) - value = np.int_( - local[np.nonzero(thisdist == np.amin(thisdist))]) + value = np.int_(local[np.nonzero(thisdist == np.amin(thisdist))]) if value.size > 1: counts = np.bincount(value) value = np.argmax(counts) rois[xx[j], yy[j], zz[j]] = value # store volume eg in ROIv_scale33.nii.gz - out_roi = op.abspath('ROIv_%s.nii.gz' % parcellation_name) - iflogger.info('Save output image to %s', out_roi) + out_roi = op.abspath("ROIv_%s.nii.gz" % parcellation_name) + iflogger.info("Save output image to %s", out_roi) img = nb.Nifti1Image(rois, aseg.affine, hdr2) nb.save(img, out_roi) @@ -299,14 +468,16 @@ def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation): def create_wm_mask(subject_id, subjects_dir, fs_dir, parcellation_name): import cmp import scipy.ndimage.morphology as nd + iflogger.info("Create white matter mask") fs_dir = op.join(subjects_dir, subject_id) cmp_config = cmp.configuration.PipelineConfiguration() cmp_config.parcellation_scheme = "Lausanne2008" - pgpath = cmp_config._get_lausanne_parcellation('Lausanne2008')[ - parcellation_name]['node_information_graphml'] + pgpath = cmp_config._get_lausanne_parcellation("Lausanne2008")[parcellation_name][ + "node_information_graphml" + ] # load ribbon as basis for white matter mask - fsmask = nb.load(op.join(fs_dir, 'mri', 'ribbon.nii.gz')) + fsmask = nb.load(op.join(fs_dir, "mri", "ribbon.nii.gz")) fsmaskd = fsmask.get_data() wmmask = np.zeros(fsmaskd.shape) @@ -318,7 +489,7 @@ def create_wm_mask(subject_id, subjects_dir, fs_dir, parcellation_name): wmmask[idx_rh] = 1 # remove subcortical nuclei from white matter mask - aseg = nb.load(op.join(fs_dir, 'mri', 'aseg.nii.gz')) + aseg = nb.load(op.join(fs_dir, "mri", "aseg.nii.gz")) asegd = aseg.get_data() # need binary erosion function @@ -340,21 +511,36 @@ def create_wm_mask(subject_id, subjects_dir, fs_dir, parcellation_name): # lateral ventricles, thalamus proper and caudate # the latter two removed for better erosion, but put back afterwards - idx = np.where((asegd == 4) | (asegd == 43) | (asegd == 11) | (asegd == 50) - | (asegd == 31) | (asegd == 63) | (asegd == 10) - | (asegd == 49)) + idx = np.where( + (asegd == 4) + | (asegd == 43) + | (asegd == 11) + | (asegd == 50) + | (asegd == 31) + | (asegd == 63) + | (asegd == 10) + | (asegd == 49) + ) csfA[idx] = 1 csfA = imerode(imerode(csfA, se1), se) # thalmus proper and cuadate are put back because they are not lateral ventricles - idx = np.where((asegd == 11) | (asegd == 50) | (asegd == 10) - | (asegd == 49)) + idx = np.where((asegd == 11) | (asegd == 50) | (asegd == 10) | (asegd == 49)) csfA[idx] = 0 # REST CSF, IE 3RD AND 4TH VENTRICULE AND EXTRACEREBRAL CSF - idx = np.where((asegd == 5) | (asegd == 14) | (asegd == 15) | (asegd == 24) - | (asegd == 44) | (asegd == 72) | (asegd == 75) - | (asegd == 76) | (asegd == 213) | (asegd == 221)) + idx = np.where( + (asegd == 5) + | (asegd == 14) + | (asegd == 15) + | (asegd == 24) + | (asegd == 44) + | (asegd == 72) + | (asegd == 75) + | (asegd == 76) + | (asegd == 213) + | (asegd == 221) + ) # 43 ??, 4?? 213?, 221? # more to discuss. for i in [5, 14, 15, 24, 44, 72, 75, 76, 213, 221]: @@ -389,75 +575,88 @@ def create_wm_mask(subject_id, subjects_dir, fs_dir, parcellation_name): remaining[idx] = 1 # now remove all the structures from the white matter - idx = np.where((csfA != 0) | (csfB != 0) | (gr_ncl != 0) - | (remaining != 0)) + idx = np.where((csfA != 0) | (csfB != 0) | (gr_ncl != 0) | (remaining != 0)) wmmask[idx] = 0 iflogger.info( "Removing lateral ventricles and eroded grey nuclei and brainstem from white matter mask" ) # ADD voxels from 'cc_unknown.nii.gz' dataset - ccun = nb.load(op.join(fs_dir, 'label', 'cc_unknown.nii.gz')) + ccun = nb.load(op.join(fs_dir, "label", "cc_unknown.nii.gz")) ccund = ccun.get_data() idx = np.where(ccund != 0) iflogger.info("Add corpus callosum and unknown to wm mask") wmmask[idx] = 1 # check if we should subtract the cortical rois from this parcellation - iflogger.info('Loading ROI_%s.nii.gz to subtract cortical ROIs from white ' - 'matter mask', parcellation_name) - roi = nb.load(op.join(op.curdir, 'ROI_%s.nii.gz' % parcellation_name)) + iflogger.info( + "Loading ROI_%s.nii.gz to subtract cortical ROIs from white " "matter mask", + parcellation_name, + ) + roi = nb.load(op.join(op.curdir, "ROI_%s.nii.gz" % parcellation_name)) roid = roi.get_data() assert roid.shape[0] == wmmask.shape[0] pg = nx.read_graphml(pgpath) for brk, brv in pg.nodes(data=True): - if brv['dn_region'] == 'cortical': - iflogger.info('Subtracting region %s with intensity value %s', - brv['dn_region'], brv['dn_correspondence_id']) - idx = np.where(roid == int(brv['dn_correspondence_id'])) + if brv["dn_region"] == "cortical": + iflogger.info( + "Subtracting region %s with intensity value %s", + brv["dn_region"], + brv["dn_correspondence_id"], + ) + idx = np.where(roid == int(brv["dn_correspondence_id"])) wmmask[idx] = 0 # output white matter mask. crop and move it afterwards - wm_out = op.join(fs_dir, 'mri', 'fsmask_1mm.nii.gz') + wm_out = op.join(fs_dir, "mri", "fsmask_1mm.nii.gz") img = nb.Nifti1Image(wmmask, fsmask.affine, fsmask.header) - iflogger.info('Save white matter mask: %s', wm_out) + iflogger.info("Save white matter mask: %s", wm_out) nb.save(img, wm_out) -def crop_and_move_datasets(subject_id, subjects_dir, fs_dir, parcellation_name, - out_roi_file, dilation): +def crop_and_move_datasets( + subject_id, subjects_dir, fs_dir, parcellation_name, out_roi_file, dilation +): from cmp.util import runCmd + fs_dir = op.join(subjects_dir, subject_id) cmp_config = cmp.configuration.PipelineConfiguration() cmp_config.parcellation_scheme = "Lausanne2008" log = cmp_config.get_logger() output_dir = op.abspath(op.curdir) - iflogger.info('Cropping and moving datasets to %s', output_dir) - ds = [(op.join(fs_dir, 'mri', 'aseg.nii.gz'), - op.abspath('aseg.nii.gz')), (op.join(fs_dir, 'mri', - 'ribbon.nii.gz'), - op.abspath('ribbon.nii.gz')), - (op.join(fs_dir, 'mri', 'fsmask_1mm.nii.gz'), - op.abspath('fsmask_1mm.nii.gz')), (op.join(fs_dir, 'label', - 'cc_unknown.nii.gz'), - op.abspath('cc_unknown.nii.gz'))] - - ds.append((op.abspath('ROI_%s.nii.gz' % parcellation_name), - op.abspath('ROI_HR_th.nii.gz'))) + iflogger.info("Cropping and moving datasets to %s", output_dir) + ds = [ + (op.join(fs_dir, "mri", "aseg.nii.gz"), op.abspath("aseg.nii.gz")), + (op.join(fs_dir, "mri", "ribbon.nii.gz"), op.abspath("ribbon.nii.gz")), + (op.join(fs_dir, "mri", "fsmask_1mm.nii.gz"), op.abspath("fsmask_1mm.nii.gz")), + ( + op.join(fs_dir, "label", "cc_unknown.nii.gz"), + op.abspath("cc_unknown.nii.gz"), + ), + ] + + ds.append( + ( + op.abspath("ROI_%s.nii.gz" % parcellation_name), + op.abspath("ROI_HR_th.nii.gz"), + ) + ) if dilation is True: - ds.append((op.abspath('ROIv_%s.nii.gz' % parcellation_name), - op.abspath('ROIv_HR_th.nii.gz'))) - orig = op.join(fs_dir, 'mri', 'orig', '001.mgz') + ds.append( + ( + op.abspath("ROIv_%s.nii.gz" % parcellation_name), + op.abspath("ROIv_HR_th.nii.gz"), + ) + ) + orig = op.join(fs_dir, "mri", "orig", "001.mgz") for d in ds: - iflogger.info('Processing %s:', d[0]) + iflogger.info("Processing %s:", d[0]) if not op.exists(d[0]): - raise Exception('File %s does not exist.' % d[0]) + raise Exception("File %s does not exist." % d[0]) # reslice to original volume because the roi creation with freesurfer # changed to 256x256x256 resolution - mri_cmd = 'mri_convert -rl "%s" -rt nearest "%s" -nc "%s"' % (orig, - d[0], - d[1]) + mri_cmd = 'mri_convert -rl "%s" -rt nearest "%s" -nc "%s"' % (orig, d[0], d[1]) runCmd(mri_cmd, log) @@ -473,68 +672,71 @@ def extract(Z, shape, position, fill): ------- R: the neighbourhood of the specified point in Z """ - R = np.ones(shape, dtype=Z.dtype) * \ - fill # initialize output block to the fill value - P = np.array(list(position)).astype( - int) # position coordinates(numpy array) - Rs = np.array(list(R.shape)).astype( - int) # output block dimensions (numpy array) - Zs = np.array(list(Z.shape)).astype( - int) # original volume dimensions (numpy array) + R = ( + np.ones(shape, dtype=Z.dtype) * fill + ) # initialize output block to the fill value + P = np.array(list(position)).astype(int) # position coordinates(numpy array) + Rs = np.array(list(R.shape)).astype(int) # output block dimensions (numpy array) + Zs = np.array(list(Z.shape)).astype(int) # original volume dimensions (numpy array) R_start = np.zeros(len(shape)).astype(int) R_stop = np.array(list(shape)).astype(int) - Z_start = (P - Rs // 2) + Z_start = P - Rs // 2 Z_start_cor = (np.maximum(Z_start, 0)).tolist() # handle borders R_start = R_start + (Z_start_cor - Z_start) Z_stop = (P + Rs // 2) + Rs % 2 Z_stop_cor = (np.minimum(Z_stop, Zs)).tolist() # handle borders R_stop = R_stop - (Z_stop - Z_stop_cor) - R[R_start[0]:R_stop[0], R_start[1]:R_stop[1], R_start[2]:R_stop[ - 2]] = Z[Z_start_cor[0]:Z_stop_cor[0], Z_start_cor[1]:Z_stop_cor[1], - Z_start_cor[2]:Z_stop_cor[2]] + R[R_start[0] : R_stop[0], R_start[1] : R_stop[1], R_start[2] : R_stop[2]] = Z[ + Z_start_cor[0] : Z_stop_cor[0], + Z_start_cor[1] : Z_stop_cor[1], + Z_start_cor[2] : Z_stop_cor[2], + ] return R class ParcellateInputSpec(BaseInterfaceInputSpec): - subject_id = traits.String(mandatory=True, desc='Subject ID') + subject_id = traits.String(mandatory=True, desc="Subject ID") parcellation_name = traits.Enum( - 'scale500', ['scale33', 'scale60', 'scale125', 'scale250', 'scale500'], - usedefault=True) - freesurfer_dir = Directory(exists=True, desc='Freesurfer main directory') - subjects_dir = Directory(exists=True, desc='Freesurfer subjects directory') + "scale500", + ["scale33", "scale60", "scale125", "scale250", "scale500"], + usedefault=True, + ) + freesurfer_dir = Directory(exists=True, desc="Freesurfer main directory") + subjects_dir = Directory(exists=True, desc="Freesurfer subjects directory") out_roi_file = File( - genfile=True, desc='Region of Interest file for connectivity mapping') + genfile=True, desc="Region of Interest file for connectivity mapping" + ) dilation = traits.Bool( False, usedefault=True, - desc='Dilate cortical parcels? Useful for fMRI connectivity') + desc="Dilate cortical parcels? Useful for fMRI connectivity", + ) class ParcellateOutputSpec(TraitedSpec): roi_file = File( - exists=True, desc='Region of Interest file for connectivity mapping') - roiv_file = File( - desc='Region of Interest file for fMRI connectivity mapping') - white_matter_mask_file = File(exists=True, desc='White matter mask file') + exists=True, desc="Region of Interest file for connectivity mapping" + ) + roiv_file = File(desc="Region of Interest file for fMRI connectivity mapping") + white_matter_mask_file = File(exists=True, desc="White matter mask file") cc_unknown_file = File( - desc='Image file with regions labelled as unknown cortical structures', - exists=True) - ribbon_file = File( - desc='Image file detailing the cortical ribbon', exists=True) + desc="Image file with regions labelled as unknown cortical structures", + exists=True, + ) + ribbon_file = File(desc="Image file detailing the cortical ribbon", exists=True) aseg_file = File( - desc= - 'Automated segmentation file converted from Freesurfer "subjects" directory', - exists=True) + desc='Automated segmentation file converted from Freesurfer "subjects" directory', + exists=True, + ) roi_file_in_structural_space = File( - desc= - 'ROI image resliced to the dimensions of the original structural image', - exists=True) + desc="ROI image resliced to the dimensions of the original structural image", + exists=True, + ) dilated_roi_file_in_structural_space = File( - desc= - 'dilated ROI image resliced to the dimensions of the original structural image' + desc="dilated ROI image resliced to the dimensions of the original structural image" ) @@ -560,53 +762,66 @@ class Parcellate(LibraryBaseInterface): input_spec = ParcellateInputSpec output_spec = ParcellateOutputSpec - _pkg = 'cmp' - imports = ('scipy', ) + _pkg = "cmp" + imports = ("scipy",) def _run_interface(self, runtime): if self.inputs.subjects_dir: - os.environ.update({'SUBJECTS_DIR': self.inputs.subjects_dir}) + os.environ.update({"SUBJECTS_DIR": self.inputs.subjects_dir}) if not os.path.exists( - op.join(self.inputs.subjects_dir, self.inputs.subject_id)): + op.join(self.inputs.subjects_dir, self.inputs.subject_id) + ): raise Exception iflogger.info("ROI_HR_th.nii.gz / fsmask_1mm.nii.gz CREATION") iflogger.info("=============================================") - create_annot_label(self.inputs.subject_id, self.inputs.subjects_dir, - self.inputs.freesurfer_dir, - self.inputs.parcellation_name) - create_roi(self.inputs.subject_id, self.inputs.subjects_dir, - self.inputs.freesurfer_dir, self.inputs.parcellation_name, - self.inputs.dilation) - create_wm_mask(self.inputs.subject_id, self.inputs.subjects_dir, - self.inputs.freesurfer_dir, - self.inputs.parcellation_name) + create_annot_label( + self.inputs.subject_id, + self.inputs.subjects_dir, + self.inputs.freesurfer_dir, + self.inputs.parcellation_name, + ) + create_roi( + self.inputs.subject_id, + self.inputs.subjects_dir, + self.inputs.freesurfer_dir, + self.inputs.parcellation_name, + self.inputs.dilation, + ) + create_wm_mask( + self.inputs.subject_id, + self.inputs.subjects_dir, + self.inputs.freesurfer_dir, + self.inputs.parcellation_name, + ) crop_and_move_datasets( - self.inputs.subject_id, self.inputs.subjects_dir, - self.inputs.freesurfer_dir, self.inputs.parcellation_name, - self.inputs.out_roi_file, self.inputs.dilation) + self.inputs.subject_id, + self.inputs.subjects_dir, + self.inputs.freesurfer_dir, + self.inputs.parcellation_name, + self.inputs.out_roi_file, + self.inputs.dilation, + ) return runtime def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.out_roi_file): - outputs['roi_file'] = op.abspath(self.inputs.out_roi_file) + outputs["roi_file"] = op.abspath(self.inputs.out_roi_file) else: - outputs['roi_file'] = op.abspath( - self._gen_outfilename('nii.gz', 'ROI')) + outputs["roi_file"] = op.abspath(self._gen_outfilename("nii.gz", "ROI")) if self.inputs.dilation is True: - outputs['roiv_file'] = op.abspath( - self._gen_outfilename('nii.gz', 'ROIv')) - outputs['white_matter_mask_file'] = op.abspath('fsmask_1mm.nii.gz') - outputs['cc_unknown_file'] = op.abspath('cc_unknown.nii.gz') - outputs['ribbon_file'] = op.abspath('ribbon.nii.gz') - outputs['aseg_file'] = op.abspath('aseg.nii.gz') - outputs['roi_file_in_structural_space'] = op.abspath( - 'ROI_HR_th.nii.gz') + outputs["roiv_file"] = op.abspath(self._gen_outfilename("nii.gz", "ROIv")) + outputs["white_matter_mask_file"] = op.abspath("fsmask_1mm.nii.gz") + outputs["cc_unknown_file"] = op.abspath("cc_unknown.nii.gz") + outputs["ribbon_file"] = op.abspath("ribbon.nii.gz") + outputs["aseg_file"] = op.abspath("aseg.nii.gz") + outputs["roi_file_in_structural_space"] = op.abspath("ROI_HR_th.nii.gz") if self.inputs.dilation is True: - outputs['dilated_roi_file_in_structural_space'] = op.abspath( - 'ROIv_HR_th.nii.gz') + outputs["dilated_roi_file_in_structural_space"] = op.abspath( + "ROIv_HR_th.nii.gz" + ) return outputs - def _gen_outfilename(self, ext, prefix='ROI'): - return prefix + '_' + self.inputs.parcellation_name + '.' + ext + def _gen_outfilename(self, ext, prefix="ROI"): + return prefix + "_" + self.inputs.parcellation_name + "." + ext diff --git a/nipype/interfaces/cmtk/tests/test_auto_AverageNetworks.py b/nipype/interfaces/cmtk/tests/test_auto_AverageNetworks.py index 5ed036dabd..41d3f6ecce 100644 --- a/nipype/interfaces/cmtk/tests/test_auto_AverageNetworks.py +++ b/nipype/interfaces/cmtk/tests/test_auto_AverageNetworks.py @@ -4,21 +4,23 @@ def test_AverageNetworks_inputs(): input_map = dict( - group_id=dict(usedefault=True, ), - in_files=dict(mandatory=True, ), - out_gexf_groupavg=dict(extensions=None, ), - out_gpickled_groupavg=dict(extensions=None, ), - resolution_network_file=dict(extensions=None, ), + group_id=dict(usedefault=True,), + in_files=dict(mandatory=True,), + out_gexf_groupavg=dict(extensions=None,), + out_gpickled_groupavg=dict(extensions=None,), + resolution_network_file=dict(extensions=None,), ) inputs = AverageNetworks.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AverageNetworks_outputs(): output_map = dict( - gexf_groupavg=dict(extensions=None, ), - gpickled_groupavg=dict(extensions=None, ), + gexf_groupavg=dict(extensions=None,), + gpickled_groupavg=dict(extensions=None,), matlab_groupavgs=dict(), ) outputs = AverageNetworks.output_spec() diff --git a/nipype/interfaces/cmtk/tests/test_auto_CFFConverter.py b/nipype/interfaces/cmtk/tests/test_auto_CFFConverter.py index 7a3f837709..43240defab 100644 --- a/nipype/interfaces/cmtk/tests/test_auto_CFFConverter.py +++ b/nipype/interfaces/cmtk/tests/test_auto_CFFConverter.py @@ -6,7 +6,7 @@ def test_CFFConverter_inputs(): input_map = dict( creator=dict(), data_files=dict(), - description=dict(usedefault=True, ), + description=dict(usedefault=True,), email=dict(), gifti_labels=dict(), gifti_surfaces=dict(), @@ -14,16 +14,13 @@ def test_CFFConverter_inputs(): graphml_networks=dict(), license=dict(), nifti_volumes=dict(), - out_file=dict( - extensions=None, - usedefault=True, - ), + out_file=dict(extensions=None, usedefault=True,), publisher=dict(), references=dict(), relation=dict(), rights=dict(), script_files=dict(), - species=dict(usedefault=True, ), + species=dict(usedefault=True,), timeseries_files=dict(), title=dict(), tract_files=dict(), @@ -33,8 +30,10 @@ def test_CFFConverter_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CFFConverter_outputs(): - output_map = dict(connectome_file=dict(extensions=None, ), ) + output_map = dict(connectome_file=dict(extensions=None,),) outputs = CFFConverter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/cmtk/tests/test_auto_CreateMatrix.py b/nipype/interfaces/cmtk/tests/test_auto_CreateMatrix.py index 4939b9301a..3e68292557 100644 --- a/nipype/interfaces/cmtk/tests/test_auto_CreateMatrix.py +++ b/nipype/interfaces/cmtk/tests/test_auto_CreateMatrix.py @@ -4,73 +4,45 @@ def test_CreateMatrix_inputs(): input_map = dict( - count_region_intersections=dict(usedefault=True, ), - out_endpoint_array_name=dict( - extensions=None, - genfile=True, - ), - out_fiber_length_std_matrix_mat_file=dict( - extensions=None, - genfile=True, - ), - out_intersection_matrix_mat_file=dict( - extensions=None, - genfile=True, - ), - out_matrix_file=dict( - extensions=None, - genfile=True, - ), - out_matrix_mat_file=dict( - extensions=None, - usedefault=True, - ), - out_mean_fiber_length_matrix_mat_file=dict( - extensions=None, - genfile=True, - ), - out_median_fiber_length_matrix_mat_file=dict( - extensions=None, - genfile=True, - ), - resolution_network_file=dict( - extensions=None, - mandatory=True, - ), - roi_file=dict( - extensions=None, - mandatory=True, - ), - tract_file=dict( - extensions=None, - mandatory=True, - ), + count_region_intersections=dict(usedefault=True,), + out_endpoint_array_name=dict(extensions=None, genfile=True,), + out_fiber_length_std_matrix_mat_file=dict(extensions=None, genfile=True,), + out_intersection_matrix_mat_file=dict(extensions=None, genfile=True,), + out_matrix_file=dict(extensions=None, genfile=True,), + out_matrix_mat_file=dict(extensions=None, usedefault=True,), + out_mean_fiber_length_matrix_mat_file=dict(extensions=None, genfile=True,), + out_median_fiber_length_matrix_mat_file=dict(extensions=None, genfile=True,), + resolution_network_file=dict(extensions=None, mandatory=True,), + roi_file=dict(extensions=None, mandatory=True,), + tract_file=dict(extensions=None, mandatory=True,), ) inputs = CreateMatrix.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CreateMatrix_outputs(): output_map = dict( - endpoint_file=dict(extensions=None, ), - endpoint_file_mm=dict(extensions=None, ), - fiber_label_file=dict(extensions=None, ), - fiber_labels_noorphans=dict(extensions=None, ), - fiber_length_file=dict(extensions=None, ), - fiber_length_std_matrix_mat_file=dict(extensions=None, ), + endpoint_file=dict(extensions=None,), + endpoint_file_mm=dict(extensions=None,), + fiber_label_file=dict(extensions=None,), + fiber_labels_noorphans=dict(extensions=None,), + fiber_length_file=dict(extensions=None,), + fiber_length_std_matrix_mat_file=dict(extensions=None,), filtered_tractographies=dict(), - filtered_tractography=dict(extensions=None, ), - filtered_tractography_by_intersections=dict(extensions=None, ), - intersection_matrix_file=dict(extensions=None, ), - intersection_matrix_mat_file=dict(extensions=None, ), + filtered_tractography=dict(extensions=None,), + filtered_tractography_by_intersections=dict(extensions=None,), + intersection_matrix_file=dict(extensions=None,), + intersection_matrix_mat_file=dict(extensions=None,), matlab_matrix_files=dict(), - matrix_file=dict(extensions=None, ), + matrix_file=dict(extensions=None,), matrix_files=dict(), - matrix_mat_file=dict(extensions=None, ), - mean_fiber_length_matrix_mat_file=dict(extensions=None, ), - median_fiber_length_matrix_mat_file=dict(extensions=None, ), - stats_file=dict(extensions=None, ), + matrix_mat_file=dict(extensions=None,), + mean_fiber_length_matrix_mat_file=dict(extensions=None,), + median_fiber_length_matrix_mat_file=dict(extensions=None,), + stats_file=dict(extensions=None,), ) outputs = CreateMatrix.output_spec() diff --git a/nipype/interfaces/cmtk/tests/test_auto_CreateNodes.py b/nipype/interfaces/cmtk/tests/test_auto_CreateNodes.py index 95b810459c..95023590d2 100644 --- a/nipype/interfaces/cmtk/tests/test_auto_CreateNodes.py +++ b/nipype/interfaces/cmtk/tests/test_auto_CreateNodes.py @@ -4,26 +4,19 @@ def test_CreateNodes_inputs(): input_map = dict( - out_filename=dict( - extensions=None, - usedefault=True, - ), - resolution_network_file=dict( - extensions=None, - mandatory=True, - ), - roi_file=dict( - extensions=None, - mandatory=True, - ), + out_filename=dict(extensions=None, usedefault=True,), + resolution_network_file=dict(extensions=None, mandatory=True,), + roi_file=dict(extensions=None, mandatory=True,), ) inputs = CreateNodes.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CreateNodes_outputs(): - output_map = dict(node_network=dict(extensions=None, ), ) + output_map = dict(node_network=dict(extensions=None,),) outputs = CreateNodes.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/cmtk/tests/test_auto_MergeCNetworks.py b/nipype/interfaces/cmtk/tests/test_auto_MergeCNetworks.py index 3ba3588882..30aae80243 100644 --- a/nipype/interfaces/cmtk/tests/test_auto_MergeCNetworks.py +++ b/nipype/interfaces/cmtk/tests/test_auto_MergeCNetworks.py @@ -4,19 +4,18 @@ def test_MergeCNetworks_inputs(): input_map = dict( - in_files=dict(mandatory=True, ), - out_file=dict( - extensions=None, - usedefault=True, - ), + in_files=dict(mandatory=True,), + out_file=dict(extensions=None, usedefault=True,), ) inputs = MergeCNetworks.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MergeCNetworks_outputs(): - output_map = dict(connectome_file=dict(extensions=None, ), ) + output_map = dict(connectome_file=dict(extensions=None,),) outputs = MergeCNetworks.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/cmtk/tests/test_auto_NetworkBasedStatistic.py b/nipype/interfaces/cmtk/tests/test_auto_NetworkBasedStatistic.py index fe805cb134..af1c68fca2 100644 --- a/nipype/interfaces/cmtk/tests/test_auto_NetworkBasedStatistic.py +++ b/nipype/interfaces/cmtk/tests/test_auto_NetworkBasedStatistic.py @@ -4,25 +4,27 @@ def test_NetworkBasedStatistic_inputs(): input_map = dict( - edge_key=dict(usedefault=True, ), - in_group1=dict(mandatory=True, ), - in_group2=dict(mandatory=True, ), - node_position_network=dict(extensions=None, ), - number_of_permutations=dict(usedefault=True, ), - out_nbs_network=dict(extensions=None, ), - out_nbs_pval_network=dict(extensions=None, ), - t_tail=dict(usedefault=True, ), - threshold=dict(usedefault=True, ), + edge_key=dict(usedefault=True,), + in_group1=dict(mandatory=True,), + in_group2=dict(mandatory=True,), + node_position_network=dict(extensions=None,), + number_of_permutations=dict(usedefault=True,), + out_nbs_network=dict(extensions=None,), + out_nbs_pval_network=dict(extensions=None,), + t_tail=dict(usedefault=True,), + threshold=dict(usedefault=True,), ) inputs = NetworkBasedStatistic.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NetworkBasedStatistic_outputs(): output_map = dict( - nbs_network=dict(extensions=None, ), - nbs_pval_network=dict(extensions=None, ), + nbs_network=dict(extensions=None,), + nbs_pval_network=dict(extensions=None,), network_files=dict(), ) outputs = NetworkBasedStatistic.output_spec() diff --git a/nipype/interfaces/cmtk/tests/test_auto_NetworkXMetrics.py b/nipype/interfaces/cmtk/tests/test_auto_NetworkXMetrics.py index 638fe596f5..820b447885 100644 --- a/nipype/interfaces/cmtk/tests/test_auto_NetworkXMetrics.py +++ b/nipype/interfaces/cmtk/tests/test_auto_NetworkXMetrics.py @@ -4,61 +4,39 @@ def test_NetworkXMetrics_inputs(): input_map = dict( - compute_clique_related_measures=dict(usedefault=True, ), - in_file=dict( - extensions=None, - mandatory=True, - ), - out_edge_metrics_matlab=dict( - extensions=None, - genfile=True, - ), - out_global_metrics_matlab=dict( - extensions=None, - genfile=True, - ), - out_k_core=dict( - extensions=None, - usedefault=True, - ), - out_k_crust=dict( - extensions=None, - usedefault=True, - ), - out_k_shell=dict( - extensions=None, - usedefault=True, - ), - out_node_metrics_matlab=dict( - extensions=None, - genfile=True, - ), - out_pickled_extra_measures=dict( - extensions=None, - usedefault=True, - ), - treat_as_weighted_graph=dict(usedefault=True, ), + compute_clique_related_measures=dict(usedefault=True,), + in_file=dict(extensions=None, mandatory=True,), + out_edge_metrics_matlab=dict(extensions=None, genfile=True,), + out_global_metrics_matlab=dict(extensions=None, genfile=True,), + out_k_core=dict(extensions=None, usedefault=True,), + out_k_crust=dict(extensions=None, usedefault=True,), + out_k_shell=dict(extensions=None, usedefault=True,), + out_node_metrics_matlab=dict(extensions=None, genfile=True,), + out_pickled_extra_measures=dict(extensions=None, usedefault=True,), + treat_as_weighted_graph=dict(usedefault=True,), ) inputs = NetworkXMetrics.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NetworkXMetrics_outputs(): output_map = dict( edge_measure_networks=dict(), - edge_measures_matlab=dict(extensions=None, ), - global_measures_matlab=dict(extensions=None, ), + edge_measures_matlab=dict(extensions=None,), + global_measures_matlab=dict(extensions=None,), gpickled_network_files=dict(), - k_core=dict(extensions=None, ), - k_crust=dict(extensions=None, ), + k_core=dict(extensions=None,), + k_crust=dict(extensions=None,), k_networks=dict(), - k_shell=dict(extensions=None, ), + k_shell=dict(extensions=None,), matlab_dict_measures=dict(), matlab_matrix_files=dict(), node_measure_networks=dict(), - node_measures_matlab=dict(extensions=None, ), - pickled_extra_measures=dict(extensions=None, ), + node_measures_matlab=dict(extensions=None,), + pickled_extra_measures=dict(extensions=None,), ) outputs = NetworkXMetrics.output_spec() diff --git a/nipype/interfaces/cmtk/tests/test_auto_Parcellate.py b/nipype/interfaces/cmtk/tests/test_auto_Parcellate.py index f4d8eb8141..8c380c5704 100644 --- a/nipype/interfaces/cmtk/tests/test_auto_Parcellate.py +++ b/nipype/interfaces/cmtk/tests/test_auto_Parcellate.py @@ -4,14 +4,11 @@ def test_Parcellate_inputs(): input_map = dict( - dilation=dict(usedefault=True, ), + dilation=dict(usedefault=True,), freesurfer_dir=dict(), - out_roi_file=dict( - extensions=None, - genfile=True, - ), - parcellation_name=dict(usedefault=True, ), - subject_id=dict(mandatory=True, ), + out_roi_file=dict(extensions=None, genfile=True,), + parcellation_name=dict(usedefault=True,), + subject_id=dict(mandatory=True,), subjects_dir=dict(), ) inputs = Parcellate.input_spec() @@ -19,16 +16,18 @@ def test_Parcellate_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Parcellate_outputs(): output_map = dict( - aseg_file=dict(extensions=None, ), - cc_unknown_file=dict(extensions=None, ), - dilated_roi_file_in_structural_space=dict(extensions=None, ), - ribbon_file=dict(extensions=None, ), - roi_file=dict(extensions=None, ), - roi_file_in_structural_space=dict(extensions=None, ), - roiv_file=dict(extensions=None, ), - white_matter_mask_file=dict(extensions=None, ), + aseg_file=dict(extensions=None,), + cc_unknown_file=dict(extensions=None,), + dilated_roi_file_in_structural_space=dict(extensions=None,), + ribbon_file=dict(extensions=None,), + roi_file=dict(extensions=None,), + roi_file_in_structural_space=dict(extensions=None,), + roiv_file=dict(extensions=None,), + white_matter_mask_file=dict(extensions=None,), ) outputs = Parcellate.output_spec() diff --git a/nipype/interfaces/cmtk/tests/test_auto_ROIGen.py b/nipype/interfaces/cmtk/tests/test_auto_ROIGen.py index da70979685..2191f940ac 100644 --- a/nipype/interfaces/cmtk/tests/test_auto_ROIGen.py +++ b/nipype/interfaces/cmtk/tests/test_auto_ROIGen.py @@ -4,34 +4,23 @@ def test_ROIGen_inputs(): input_map = dict( - LUT_file=dict( - extensions=None, - xor=['use_freesurfer_LUT'], - ), - aparc_aseg_file=dict( - extensions=None, - mandatory=True, - ), - freesurfer_dir=dict(requires=['use_freesurfer_LUT'], ), - out_dict_file=dict( - extensions=None, - genfile=True, - ), - out_roi_file=dict( - extensions=None, - genfile=True, - ), - use_freesurfer_LUT=dict(xor=['LUT_file'], ), + LUT_file=dict(extensions=None, xor=["use_freesurfer_LUT"],), + aparc_aseg_file=dict(extensions=None, mandatory=True,), + freesurfer_dir=dict(requires=["use_freesurfer_LUT"],), + out_dict_file=dict(extensions=None, genfile=True,), + out_roi_file=dict(extensions=None, genfile=True,), + use_freesurfer_LUT=dict(xor=["LUT_file"],), ) inputs = ROIGen.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ROIGen_outputs(): output_map = dict( - dict_file=dict(extensions=None, ), - roi_file=dict(extensions=None, ), + dict_file=dict(extensions=None,), roi_file=dict(extensions=None,), ) outputs = ROIGen.output_spec() diff --git a/nipype/interfaces/cmtk/tests/test_nbs.py b/nipype/interfaces/cmtk/tests/test_nbs.py index a03b00de0f..46da939f1a 100644 --- a/nipype/interfaces/cmtk/tests/test_nbs.py +++ b/nipype/interfaces/cmtk/tests/test_nbs.py @@ -6,7 +6,7 @@ have_cv = True try: - package_check('cviewer') + package_check("cviewer") except Exception as e: have_cv = False @@ -18,15 +18,14 @@ def creating_graphs(tmpdir): for idx, name in enumerate(graphnames): graph = np.random.rand(10, 10) G = nx.from_numpy_matrix(graph) - out_file = tmpdir.strpath + graphnames[idx] + '.pck' + out_file = tmpdir.strpath + graphnames[idx] + ".pck" # Save as pck file nx.write_gpickle(G, out_file) graphlist.append(out_file) return graphlist -@pytest.mark.skipif( - have_cv, reason="tests for import error, cviewer available") +@pytest.mark.skipif(have_cv, reason="tests for import error, cviewer available") def test_importerror(creating_graphs, tmpdir): tmpdir.chdir() graphlist = creating_graphs diff --git a/nipype/interfaces/dcm2nii.py b/nipype/interfaces/dcm2nii.py index 0e7c120df8..87d9b7b3df 100644 --- a/nipype/interfaces/dcm2nii.py +++ b/nipype/interfaces/dcm2nii.py @@ -8,19 +8,28 @@ from glob import iglob from ..utils.filemanip import split_filename -from .base import (CommandLine, CommandLineInputSpec, InputMultiPath, traits, - TraitedSpec, OutputMultiPath, isdefined, File, Directory, - PackageInfo) +from .base import ( + CommandLine, + CommandLineInputSpec, + InputMultiPath, + traits, + TraitedSpec, + OutputMultiPath, + isdefined, + File, + Directory, + PackageInfo, +) class Info(PackageInfo): """Handle dcm2niix version information""" - version_cmd = 'dcm2niix' + version_cmd = "dcm2niix" @staticmethod def parse_version(raw_info): - m = re.search(r'version (\S+)', raw_info) + m = re.search(r"version (\S+)", raw_info) return m.groups()[0] if m else None @@ -31,64 +40,63 @@ class Dcm2niiInputSpec(CommandLineInputSpec): position=-1, copyfile=False, mandatory=True, - xor=['source_dir']) + xor=["source_dir"], + ) source_dir = Directory( - exists=True, - argstr="%s", - position=-1, - mandatory=True, - xor=['source_names']) + exists=True, argstr="%s", position=-1, mandatory=True, xor=["source_names"] + ) anonymize = traits.Bool( - True, - argstr='-a', - usedefault=True, - desc="Remove identifying information") + True, argstr="-a", usedefault=True, desc="Remove identifying information" + ) config_file = File( exists=True, argstr="-b %s", genfile=True, - desc="Load settings from specified inifile") + desc="Load settings from specified inifile", + ) collapse_folders = traits.Bool( - True, argstr='-c', usedefault=True, desc="Collapse input folders") + True, argstr="-c", usedefault=True, desc="Collapse input folders" + ) date_in_filename = traits.Bool( - True, argstr='-d', usedefault=True, desc="Date in filename") + True, argstr="-d", usedefault=True, desc="Date in filename" + ) events_in_filename = traits.Bool( - True, - argstr='-e', - usedefault=True, - desc="Events (series/acq) in filename") + True, argstr="-e", usedefault=True, desc="Events (series/acq) in filename" + ) source_in_filename = traits.Bool( - False, argstr='-f', usedefault=True, desc="Source filename") + False, argstr="-f", usedefault=True, desc="Source filename" + ) gzip_output = traits.Bool( - False, argstr='-g', usedefault=True, desc="Gzip output (.gz)") + False, argstr="-g", usedefault=True, desc="Gzip output (.gz)" + ) id_in_filename = traits.Bool( - False, argstr='-i', usedefault=True, desc="ID in filename") + False, argstr="-i", usedefault=True, desc="ID in filename" + ) nii_output = traits.Bool( True, - argstr='-n', + argstr="-n", usedefault=True, - desc="Save as .nii - if no, create .hdr/.img pair") + desc="Save as .nii - if no, create .hdr/.img pair", + ) output_dir = Directory( exists=True, - argstr='-o %s', + argstr="-o %s", genfile=True, - desc="Output dir - if unspecified, source directory is used") + desc="Output dir - if unspecified, source directory is used", + ) protocol_in_filename = traits.Bool( - True, argstr='-p', usedefault=True, desc="Protocol in filename") - reorient = traits.Bool( - argstr='-r', desc="Reorient image to nearest orthogonal") + True, argstr="-p", usedefault=True, desc="Protocol in filename" + ) + reorient = traits.Bool(argstr="-r", desc="Reorient image to nearest orthogonal") spm_analyze = traits.Bool( - argstr='-s', xor=['nii_output'], desc="SPM2/Analyze not SPM5/NIfTI") + argstr="-s", xor=["nii_output"], desc="SPM2/Analyze not SPM5/NIfTI" + ) convert_all_pars = traits.Bool( - True, - argstr='-v', - usedefault=True, - desc="Convert every image in directory") + True, argstr="-v", usedefault=True, desc="Convert every image in directory" + ) reorient_and_crop = traits.Bool( - False, - argstr='-x', - usedefault=True, - desc="Reorient and crop 3D images") + False, argstr="-x", usedefault=True, desc="Reorient and crop 3D images" + ) class Dcm2niiOutputSpec(TraitedSpec): @@ -116,34 +124,46 @@ class Dcm2nii(CommandLine): input_spec = Dcm2niiInputSpec output_spec = Dcm2niiOutputSpec - _cmd = 'dcm2nii' + _cmd = "dcm2nii" def _format_arg(self, opt, spec, val): if opt in [ - 'anonymize', 'collapse_folders', 'date_in_filename', - 'events_in_filename', 'source_in_filename', 'gzip_output', - 'id_in_filename', 'nii_output', 'protocol_in_filename', - 'reorient', 'spm_analyze', 'convert_all_pars', - 'reorient_and_crop' + "anonymize", + "collapse_folders", + "date_in_filename", + "events_in_filename", + "source_in_filename", + "gzip_output", + "id_in_filename", + "nii_output", + "protocol_in_filename", + "reorient", + "spm_analyze", + "convert_all_pars", + "reorient_and_crop", ]: spec = deepcopy(spec) if val: - spec.argstr += ' y' + spec.argstr += " y" else: - spec.argstr += ' n' + spec.argstr += " n" val = True - if opt == 'source_names': + if opt == "source_names": return spec.argstr % val[0] return super(Dcm2nii, self)._format_arg(opt, spec, val) def _run_interface(self, runtime): self._config_created = False new_runtime = super(Dcm2nii, self)._run_interface(runtime) - (self.output_files, self.reoriented_files, - self.reoriented_and_cropped_files, self.bvecs, - self.bvals) = self._parse_stdout(new_runtime.stdout) + ( + self.output_files, + self.reoriented_files, + self.reoriented_and_cropped_files, + self.bvecs, + self.bvals, + ) = self._parse_stdout(new_runtime.stdout) if self._config_created: - os.remove('config.ini') + os.remove("config.ini") return new_runtime def _parse_stdout(self, stdout): @@ -158,12 +178,11 @@ def _parse_stdout(self, stdout): if not skip: out_file = None if line.startswith("Saving "): - out_file = line[len("Saving "):] + out_file = line[len("Saving ") :] elif line.startswith("GZip..."): # for gzipped output files are not absolute - fname = line[len("GZip..."):] - if len(files) and os.path.basename( - files[-1]) == fname[:-3]: + fname = line[len("GZip...") :] + if len(files) and os.path.basename(files[-1]) == fname[:-3]: # we are seeing a previously reported conversion # as being saved in gzipped form -- remove the # obsolete, uncompressed file @@ -171,7 +190,7 @@ def _parse_stdout(self, stdout): if isdefined(self.inputs.output_dir): output_dir = self.inputs.output_dir else: - output_dir = self._gen_filename('output_dir') + output_dir = self._gen_filename("output_dir") out_file = os.path.abspath(os.path.join(output_dir, fname)) elif line.startswith("Number of diffusion directions "): if last_added_file: @@ -183,15 +202,15 @@ def _parse_stdout(self, stdout): # just above for l in (bvecs, bvals): l[-1] = os.path.join( - os.path.dirname(l[-1]), - 'x%s' % (os.path.basename(l[-1]), )) - elif re.search('.*->(.*)', line): - val = re.search('.*->(.*)', line) + os.path.dirname(l[-1]), "x%s" % (os.path.basename(l[-1]),) + ) + elif re.search(".*->(.*)", line): + val = re.search(".*->(.*)", line) val = val.groups()[0] if isdefined(self.inputs.output_dir): output_dir = self.inputs.output_dir else: - output_dir = self._gen_filename('output_dir') + output_dir = self._gen_filename("output_dir") val = os.path.join(output_dir, val) if os.path.exists(val): out_file = val @@ -203,18 +222,22 @@ def _parse_stdout(self, stdout): continue if line.startswith("Reorienting as "): - reoriented_files.append(line[len("Reorienting as "):]) + reoriented_files.append(line[len("Reorienting as ") :]) skip = True continue elif line.startswith("Cropping NIfTI/Analyze image "): base, filename = os.path.split( - line[len("Cropping NIfTI/Analyze image "):]) + line[len("Cropping NIfTI/Analyze image ") :] + ) filename = "c" + filename - if os.path.exists(os.path.join( - base, filename)) or self.inputs.reorient_and_crop: + if ( + os.path.exists(os.path.join(base, filename)) + or self.inputs.reorient_and_crop + ): # if reorient&crop is true but the file doesn't exist, this errors when setting outputs reoriented_and_cropped_files.append( - os.path.join(base, filename)) + os.path.join(base, filename) + ) skip = True continue @@ -223,18 +246,17 @@ def _parse_stdout(self, stdout): def _list_outputs(self): outputs = self.output_spec().get() - outputs['converted_files'] = self.output_files - outputs['reoriented_files'] = self.reoriented_files - outputs[ - 'reoriented_and_cropped_files'] = self.reoriented_and_cropped_files - outputs['bvecs'] = self.bvecs - outputs['bvals'] = self.bvals + outputs["converted_files"] = self.output_files + outputs["reoriented_files"] = self.reoriented_files + outputs["reoriented_and_cropped_files"] = self.reoriented_and_cropped_files + outputs["bvecs"] = self.bvecs + outputs["bvals"] = self.bvals return outputs def _gen_filename(self, name): - if name == 'output_dir': + if name == "output_dir": return os.getcwd() - elif name == 'config_file': + elif name == "config_file": self._config_created = True config_file = "config.ini" with open(config_file, "w") as f: @@ -251,103 +273,103 @@ class Dcm2niixInputSpec(CommandLineInputSpec): position=-1, copyfile=False, mandatory=True, - desc=('A set of filenames to be converted. Note that the current ' - 'version (1.0.20180328) of dcm2niix converts any files in the ' - 'directory. To only convert specific files they should be in an ' - 'isolated directory'), - xor=['source_dir']) + desc=( + "A set of filenames to be converted. Note that the current " + "version (1.0.20180328) of dcm2niix converts any files in the " + "directory. To only convert specific files they should be in an " + "isolated directory" + ), + xor=["source_dir"], + ) source_dir = Directory( exists=True, argstr="%s", position=-1, mandatory=True, - desc='A directory containing dicom files to be converted', - xor=['source_names']) + desc="A directory containing dicom files to be converted", + xor=["source_names"], + ) out_filename = traits.Str( argstr="-f %s", desc="Output filename template (" - "%a=antenna (coil) number, " - "%c=comments, " - "%d=description, " - "%e=echo number, " - "%f=folder name, " - "%i=ID of patient, " - "%j=seriesInstanceUID, " - "%k=studyInstanceUID, " - "%m=manufacturer, " - "%n=name of patient, " - "%p=protocol, " - "%s=series number, " - "%t=time, " - "%u=acquisition number, " - "%v=vendor, " - "%x=study ID; " - "%z=sequence name)") + "%a=antenna (coil) number, " + "%c=comments, " + "%d=description, " + "%e=echo number, " + "%f=folder name, " + "%i=ID of patient, " + "%j=seriesInstanceUID, " + "%k=studyInstanceUID, " + "%m=manufacturer, " + "%n=name of patient, " + "%p=protocol, " + "%s=series number, " + "%t=time, " + "%u=acquisition number, " + "%v=vendor, " + "%x=study ID; " + "%z=sequence name)", + ) output_dir = Directory( - ".", - usedefault=True, - exists=True, - argstr='-o %s', - desc="Output directory") + ".", usedefault=True, exists=True, argstr="-o %s", desc="Output directory" + ) bids_format = traits.Bool( - True, - argstr='-b', - usedefault=True, - desc="Create a BIDS sidecar file") + True, argstr="-b", usedefault=True, desc="Create a BIDS sidecar file" + ) anon_bids = traits.Bool( - argstr='-ba', - requires=["bids_format"], - desc="Anonymize BIDS") + argstr="-ba", requires=["bids_format"], desc="Anonymize BIDS" + ) compress = traits.Enum( - 'y', 'i', 'n', '3', - argstr='-z %s', + "y", + "i", + "n", + "3", + argstr="-z %s", usedefault=True, - desc="Gzip compress images - [y=pigz, i=internal, n=no, 3=no,3D]") + desc="Gzip compress images - [y=pigz, i=internal, n=no, 3=no,3D]", + ) merge_imgs = traits.Bool( - False, - argstr='-m', - usedefault=True, - desc="merge 2D slices from same series") + False, argstr="-m", usedefault=True, desc="merge 2D slices from same series" + ) single_file = traits.Bool( - False, - argstr='-s', - usedefault=True, - desc="Single file mode") - verbose = traits.Bool( - False, - argstr='-v', - usedefault=True, - desc="Verbose output") + False, argstr="-s", usedefault=True, desc="Single file mode" + ) + verbose = traits.Bool(False, argstr="-v", usedefault=True, desc="Verbose output") crop = traits.Bool( - False, - argstr='-x', - usedefault=True, - desc="Crop 3D T1 acquisitions") + False, argstr="-x", usedefault=True, desc="Crop 3D T1 acquisitions" + ) has_private = traits.Bool( False, - argstr='-t', + argstr="-t", usedefault=True, - desc="Text notes including private patient details") + desc="Text notes including private patient details", + ) compression = traits.Enum( - 1, 2, 3, 4, 5, 6, 7, 8, 9, - argstr='-%d', - desc="Gz compression level (1=fastest, 9=smallest)") - comment = traits.Str( - argstr='-c %s', - desc="Comment stored as NIfTI aux_file") + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + argstr="-%d", + desc="Gz compression level (1=fastest, 9=smallest)", + ) + comment = traits.Str(argstr="-c %s", desc="Comment stored as NIfTI aux_file") ignore_deriv = traits.Bool( - argstr='-i', - desc="Ignore derived, localizer and 2D images") + argstr="-i", desc="Ignore derived, localizer and 2D images" + ) series_numbers = InputMultiPath( traits.Str(), - argstr='-n %s...', - desc="Selectively convert by series number - can be used up to 16 times") + argstr="-n %s...", + desc="Selectively convert by series number - can be used up to 16 times", + ) philips_float = traits.Bool( - argstr='-p', - desc="Philips precise float (not display) scaling") - to_nrrd = traits.Bool( - argstr="-e", - desc="Export as NRRD instead of NIfTI") + argstr="-p", desc="Philips precise float (not display) scaling" + ) + to_nrrd = traits.Bool(argstr="-e", desc="Export as NRRD instead of NIfTI") class Dcm2niixOutputSpec(TraitedSpec): @@ -388,7 +410,7 @@ class Dcm2niix(CommandLine): input_spec = Dcm2niixInputSpec output_spec = Dcm2niixOutputSpec - _cmd = 'dcm2niix' + _cmd = "dcm2niix" @property def version(self): @@ -396,25 +418,33 @@ def version(self): def _format_arg(self, opt, spec, val): bools = [ - 'bids_format', 'merge_imgs', 'single_file', 'verbose', 'crop', - 'has_private', 'anon_bids', 'ignore_deriv', 'philips_float', - 'to_nrrd', + "bids_format", + "merge_imgs", + "single_file", + "verbose", + "crop", + "has_private", + "anon_bids", + "ignore_deriv", + "philips_float", + "to_nrrd", ] if opt in bools: spec = deepcopy(spec) if val: - spec.argstr += ' y' + spec.argstr += " y" else: - spec.argstr += ' n' + spec.argstr += " n" val = True - if opt == 'source_names': - return spec.argstr % (os.path.dirname(val[0]) or '.') + if opt == "source_names": + return spec.argstr % (os.path.dirname(val[0]) or ".") return super(Dcm2niix, self)._format_arg(opt, spec, val) def _run_interface(self, runtime): # may use return code 1 despite conversion runtime = super(Dcm2niix, self)._run_interface( - runtime, correct_return_codes=(0, 1, )) + runtime, correct_return_codes=(0, 1,) + ) self._parse_files(self._parse_stdout(runtime.stdout)) return runtime @@ -422,7 +452,7 @@ def _parse_stdout(self, stdout): filenames = [] for line in stdout.split("\n"): if line.startswith("Convert "): # output - fname = str(re.search(r'\S+/\S+', line).group(0)) + fname = str(re.search(r"\S+/\S+", line).group(0)) filenames.append(os.path.abspath(fname)) return filenames @@ -438,10 +468,10 @@ def _parse_files(self, filenames): # search for relevant files, and sort accordingly for fl in search_files(filename, outtypes): if ( - fl.endswith(".nii") or - fl.endswith(".gz") or - fl.endswith(".nrrd") or - fl.endswith(".nhdr") + fl.endswith(".nii") + or fl.endswith(".gz") + or fl.endswith(".nrrd") + or fl.endswith(".nhdr") ): outfiles.append(fl) elif fl.endswith(".bval"): @@ -457,12 +487,13 @@ def _parse_files(self, filenames): def _list_outputs(self): outputs = self.output_spec().get() - outputs['converted_files'] = self.output_files - outputs['bvecs'] = self.bvecs - outputs['bvals'] = self.bvals - outputs['bids'] = self.bids + outputs["converted_files"] = self.output_files + outputs["bvecs"] = self.bvecs + outputs["bvals"] = self.bvals + outputs["bids"] = self.bids return outputs + # https://stackoverflow.com/a/4829130 def search_files(prefix, outtypes): return it.chain.from_iterable(iglob(prefix + outtype) for outtype in outtypes) diff --git a/nipype/interfaces/dcmstack.py b/nipype/interfaces/dcmstack.py index 711d84920f..d7223468c8 100644 --- a/nipype/interfaces/dcmstack.py +++ b/nipype/interfaces/dcmstack.py @@ -11,8 +11,17 @@ import nibabel as nb import imghdr -from .base import (TraitedSpec, DynamicTraitedSpec, InputMultiPath, File, - Directory, traits, BaseInterface, isdefined, Undefined) +from .base import ( + TraitedSpec, + DynamicTraitedSpec, + InputMultiPath, + File, + Directory, + traits, + BaseInterface, + isdefined, + Undefined, +) from ..utils import NUMPY_MMAP have_dcmstack = True @@ -27,28 +36,28 @@ def sanitize_path_comp(path_comp): result = [] for char in path_comp: - if char not in string.letters + string.digits + '-_.': - result.append('_') + if char not in string.letters + string.digits + "-_.": + result.append("_") else: result.append(char) - return ''.join(result) + return "".join(result) class NiftiGeneratorBaseInputSpec(TraitedSpec): - out_format = traits.Str(desc="String which can be formatted with " - "meta data to create the output filename(s)") - out_ext = traits.Str( - '.nii.gz', usedefault=True, desc="Determines output file type") - out_path = Directory( - desc='output path, current working directory if not set') + out_format = traits.Str( + desc="String which can be formatted with " + "meta data to create the output filename(s)" + ) + out_ext = traits.Str(".nii.gz", usedefault=True, desc="Determines output file type") + out_path = Directory(desc="output path, current working directory if not set") class NiftiGeneratorBase(BaseInterface): - '''Base class for interfaces that produce Nifti files, potentially with - embedded meta data.''' + """Base class for interfaces that produce Nifti files, potentially with + embedded meta data.""" def _get_out_path(self, meta, idx=None): - '''Return the output path for the gernerated Nifti.''' + """Return the output path for the gernerated Nifti.""" if self.inputs.out_format: out_fmt = self.inputs.out_format else: @@ -56,16 +65,16 @@ def _get_out_path(self, meta, idx=None): # with the provided meta data. out_fmt = [] if idx is not None: - out_fmt.append('%03d' % idx) - if 'SeriesNumber' in meta: - out_fmt.append('%(SeriesNumber)03d') - if 'ProtocolName' in meta: - out_fmt.append('%(ProtocolName)s') - elif 'SeriesDescription' in meta: - out_fmt.append('%(SeriesDescription)s') + out_fmt.append("%03d" % idx) + if "SeriesNumber" in meta: + out_fmt.append("%(SeriesNumber)03d") + if "ProtocolName" in meta: + out_fmt.append("%(ProtocolName)s") + elif "SeriesDescription" in meta: + out_fmt.append("%(SeriesDescription)s") else: - out_fmt.append('sequence') - out_fmt = '-'.join(out_fmt) + out_fmt.append("sequence") + out_fmt = "-".join(out_fmt) out_fn = (out_fmt % meta) + self.inputs.out_ext out_fn = sanitize_path_comp(out_fn) @@ -90,16 +99,18 @@ class DcmStackInputSpec(NiftiGeneratorBaseInputSpec): InputMultiPath(File(exists=True)), Directory(exists=True), traits.Str(), - mandatory=True) + mandatory=True, + ) embed_meta = traits.Bool(desc="Embed DICOM meta data into result") - exclude_regexes = traits.List(desc="Meta data to exclude, suplementing " - "any default exclude filters") - include_regexes = traits.List(desc="Meta data to include, overriding any " - "exclude filters") + exclude_regexes = traits.List( + desc="Meta data to exclude, suplementing " "any default exclude filters" + ) + include_regexes = traits.List( + desc="Meta data to include, overriding any " "exclude filters" + ) force_read = traits.Bool( - True, - usedefault=True, - desc=('Force reading files without DICM marker')) + True, usedefault=True, desc=("Force reading files without DICM marker") + ) class DcmStackOutputSpec(TraitedSpec): @@ -107,7 +118,7 @@ class DcmStackOutputSpec(TraitedSpec): class DcmStack(NiftiGeneratorBase): - '''Create one Nifti file from a set of DICOM files. Can optionally embed + """Create one Nifti file from a set of DICOM files. Can optionally embed meta data. Example @@ -119,14 +130,15 @@ class DcmStack(NiftiGeneratorBase): >>> stacker.run() # doctest: +SKIP >>> result.outputs.out_file # doctest: +SKIP '/path/to/cwd/sequence.nii.gz' - ''' + """ + input_spec = DcmStackInputSpec output_spec = DcmStackOutputSpec def _get_filelist(self, trait_input): if isinstance(trait_input, (str, bytes)): if op.isdir(trait_input): - return glob(op.join(trait_input, '*.dcm')) + return glob(op.join(trait_input, "*.dcm")) else: return glob(trait_input) @@ -140,18 +152,17 @@ def _run_interface(self, runtime): exclude_regexes = dcmstack.default_key_excl_res if isdefined(self.inputs.exclude_regexes): exclude_regexes += self.inputs.exclude_regexes - meta_filter = dcmstack.make_key_regex_filter(exclude_regexes, - include_regexes) + meta_filter = dcmstack.make_key_regex_filter(exclude_regexes, include_regexes) stack = dcmstack.DicomStack(meta_filter=meta_filter) for src_path in src_paths: if not imghdr.what(src_path) == "gif": - src_dcm = dicom.read_file( - src_path, force=self.inputs.force_read) + src_dcm = dicom.read_file(src_path, force=self.inputs.force_read) stack.add_dcm(src_dcm) nii = stack.to_nifti(embed_meta=True) nw = NiftiWrapper(nii) - self.out_path = \ - self._get_out_path(nw.meta_ext.get_class_dict(('global', 'const'))) + self.out_path = self._get_out_path( + nw.meta_ext.get_class_dict(("global", "const")) + ) if not self.inputs.embed_meta: nw.remove_extension() nb.save(nii, self.out_path) @@ -168,8 +179,9 @@ class GroupAndStackOutputSpec(TraitedSpec): class GroupAndStack(DcmStack): - '''Create (potentially) multiple Nifti files for a set of DICOM files. - ''' + """Create (potentially) multiple Nifti files for a set of DICOM files. + """ + input_spec = DcmStackInputSpec output_spec = GroupAndStackOutputSpec @@ -180,7 +192,7 @@ def _run_interface(self, runtime): self.out_list = [] for key, stack in list(stacks.items()): nw = NiftiWrapper(stack.to_nifti(embed_meta=True)) - const_meta = nw.meta_ext.get_class_dict(('global', 'const')) + const_meta = nw.meta_ext.get_class_dict(("global", "const")) out_path = self._get_out_path(const_meta) if not self.inputs.embed_meta: nw.remove_extension() @@ -196,19 +208,22 @@ def _list_outputs(self): class LookupMetaInputSpec(TraitedSpec): - in_file = File(mandatory=True, exists=True, desc='The input Nifti file') + in_file = File(mandatory=True, exists=True, desc="The input Nifti file") meta_keys = traits.Either( traits.List(), traits.Dict(), mandatory=True, - desc=("List of meta data keys to lookup, or a " - "dict where keys specify the meta data " - "keys to lookup and the values specify " - "the output names")) + desc=( + "List of meta data keys to lookup, or a " + "dict where keys specify the meta data " + "keys to lookup and the values specify " + "the output names" + ), + ) class LookupMeta(BaseInterface): - '''Lookup meta data values from a Nifti with embedded meta data. + """Lookup meta data values from a Nifti with embedded meta data. Example ------- @@ -223,7 +238,8 @@ class LookupMeta(BaseInterface): 9500.0 >>> result.outputs.TE # doctest: +SKIP 95.0 - ''' + """ + input_spec = LookupMetaInputSpec output_spec = DynamicTraitedSpec @@ -267,11 +283,14 @@ def _list_outputs(self): class CopyMetaInputSpec(TraitedSpec): src_file = File(mandatory=True, exists=True) dest_file = File(mandatory=True, exists=True) - include_classes = traits.List(desc="List of specific meta data " - "classifications to include. If not " - "specified include everything.") - exclude_classes = traits.List(desc="List of meta data " - "classifications to exclude") + include_classes = traits.List( + desc="List of specific meta data " + "classifications to include. If not " + "specified include everything." + ) + exclude_classes = traits.List( + desc="List of meta data " "classifications to exclude" + ) class CopyMetaOutputSpec(TraitedSpec): @@ -279,8 +298,9 @@ class CopyMetaOutputSpec(TraitedSpec): class CopyMeta(BaseInterface): - '''Copy meta data from one Nifti file to another. Useful for preserving - meta data after some processing steps.''' + """Copy meta data from one Nifti file to another. Useful for preserving + meta data after some processing steps.""" + input_spec = CopyMetaInputSpec output_spec = CopyMetaOutputSpec @@ -291,14 +311,9 @@ def _run_interface(self, runtime): dest = NiftiWrapper(dest_nii, make_empty=True) classes = src.meta_ext.get_valid_classes() if self.inputs.include_classes: - classes = [ - cls for cls in classes if cls in self.inputs.include_classes - ] + classes = [cls for cls in classes if cls in self.inputs.include_classes] if self.inputs.exclude_classes: - classes = [ - cls for cls in classes - if cls not in self.inputs.exclude_classes - ] + classes = [cls for cls in classes if cls not in self.inputs.exclude_classes] for cls in classes: src_dict = src.meta_ext.get_class_dict(cls) @@ -309,15 +324,14 @@ def _run_interface(self, runtime): dest.meta_ext.slice_dim = src.meta_ext.slice_dim dest.meta_ext.shape = src.meta_ext.shape - self.out_path = op.join(os.getcwd(), op.basename( - self.inputs.dest_file)) + self.out_path = op.join(os.getcwd(), op.basename(self.inputs.dest_file)) dest.to_filename(self.out_path) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['dest_file'] = self.out_path + outputs["dest_file"] = self.out_path return outputs @@ -326,11 +340,13 @@ class MergeNiftiInputSpec(NiftiGeneratorBaseInputSpec): sort_order = traits.Either( traits.Str(), traits.List(), - desc="One or more meta data keys to " - "sort files by.") - merge_dim = traits.Int(desc="Dimension to merge along. If not " - "specified, the last singular or " - "non-existant dimension is used.") + desc="One or more meta data keys to " "sort files by.", + ) + merge_dim = traits.Int( + desc="Dimension to merge along. If not " + "specified, the last singular or " + "non-existant dimension is used." + ) class MergeNiftiOutputSpec(TraitedSpec): @@ -346,8 +362,9 @@ def key_func(src_nii): class MergeNifti(NiftiGeneratorBase): - '''Merge multiple Nifti files into one. Merges together meta data - extensions as well.''' + """Merge multiple Nifti files into one. Merges together meta data + extensions as well.""" + input_spec = MergeNiftiInputSpec output_spec = MergeNiftiOutputSpec @@ -364,21 +381,23 @@ def _run_interface(self, runtime): else: merge_dim = self.inputs.merge_dim merged = NiftiWrapper.from_sequence(nws, merge_dim) - const_meta = merged.meta_ext.get_class_dict(('global', 'const')) + const_meta = merged.meta_ext.get_class_dict(("global", "const")) self.out_path = self._get_out_path(const_meta) nb.save(merged.nii_img, self.out_path) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = self.out_path + outputs["out_file"] = self.out_path return outputs class SplitNiftiInputSpec(NiftiGeneratorBaseInputSpec): in_file = File(exists=True, mandatory=True, desc="Nifti file to split") - split_dim = traits.Int(desc="Dimension to split along. If not " - "specified, the last dimension is used.") + split_dim = traits.Int( + desc="Dimension to split along. If not " + "specified, the last dimension is used." + ) class SplitNiftiOutputSpec(TraitedSpec): @@ -386,10 +405,11 @@ class SplitNiftiOutputSpec(TraitedSpec): class SplitNifti(NiftiGeneratorBase): - ''' + """ Split one Nifti file into many along the specified dimension. Each result has an updated meta data extension as well. - ''' + """ + input_spec = SplitNiftiInputSpec output_spec = SplitNiftiOutputSpec @@ -403,7 +423,7 @@ def _run_interface(self, runtime): else: split_dim = self.inputs.split_dim for split_idx, split_nw in enumerate(nw.split(split_dim)): - const_meta = split_nw.meta_ext.get_class_dict(('global', 'const')) + const_meta = split_nw.meta_ext.get_class_dict(("global", "const")) out_path = self._get_out_path(const_meta, idx=split_idx) nb.save(split_nw.nii_img, out_path) self.out_list.append(out_path) @@ -412,5 +432,5 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['out_list'] = self.out_list + outputs["out_list"] = self.out_list return outputs diff --git a/nipype/interfaces/diffusion_toolkit/base.py b/nipype/interfaces/diffusion_toolkit/base.py index bac8e781d1..02c7deceb1 100644 --- a/nipype/interfaces/diffusion_toolkit/base.py +++ b/nipype/interfaces/diffusion_toolkit/base.py @@ -16,7 +16,7 @@ import re from ..base import CommandLine -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" class Info(object): @@ -45,13 +45,12 @@ def version(): Version number as string or None if FSL not found """ - clout = CommandLine( - command='dti_recon', terminal_output='allatonce').run() + clout = CommandLine(command="dti_recon", terminal_output="allatonce").run() if clout.runtime.returncode is not 0: return None dtirecon = clout.runtime.stdout - result = re.search('dti_recon (.*)\n', dtirecon) + result = re.search("dti_recon (.*)\n", dtirecon) version = result.group(0).split()[1] return version diff --git a/nipype/interfaces/diffusion_toolkit/dti.py b/nipype/interfaces/diffusion_toolkit/dti.py index 793641a5d7..765ef6d8b9 100644 --- a/nipype/interfaces/diffusion_toolkit/dti.py +++ b/nipype/interfaces/diffusion_toolkit/dti.py @@ -7,37 +7,41 @@ import re from ...utils.filemanip import fname_presuffix, split_filename, copyfile -from ..base import (TraitedSpec, File, traits, CommandLine, - CommandLineInputSpec, isdefined) +from ..base import ( + TraitedSpec, + File, + traits, + CommandLine, + CommandLineInputSpec, + isdefined, +) -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" class DTIReconInputSpec(CommandLineInputSpec): DWI = File( - desc='Input diffusion volume', - argstr='%s', + desc="Input diffusion volume", + argstr="%s", exists=True, mandatory=True, - position=1) + position=1, + ) out_prefix = traits.Str( - "dti", - desc='Output file prefix', - argstr='%s', - usedefault=True, - position=2) + "dti", desc="Output file prefix", argstr="%s", usedefault=True, position=2 + ) output_type = traits.Enum( - 'nii', - 'analyze', - 'ni1', - 'nii.gz', - argstr='-ot %s', - desc='output file type', - usedefault=True) - bvecs = File( - exists=True, desc='b vectors file', argstr='-gm %s', mandatory=True) - bvals = File(exists=True, desc='b values file', mandatory=True) - n_averages = traits.Int(desc='Number of averages', argstr='-nex %s') + "nii", + "analyze", + "ni1", + "nii.gz", + argstr="-ot %s", + desc="output file type", + usedefault=True, + ) + bvecs = File(exists=True, desc="b vectors file", argstr="-gm %s", mandatory=True) + bvals = File(exists=True, desc="b values file", mandatory=True) + n_averages = traits.Int(desc="Number of averages", argstr="-nex %s") image_orientation_vectors = traits.List( traits.Float(), minlen=6, @@ -49,18 +53,20 @@ class DTIReconInputSpec(CommandLineInputSpec): one automatically. this information will be used to determine image orientation, as well as to adjust gradient vectors with oblique angle when""", - argstr="-iop %f") + argstr="-iop %f", + ) oblique_correction = traits.Bool( desc="""when oblique angle(s) applied, some SIEMENS dti protocols do not adjust gradient accordingly, thus it requires adjustment for correct diffusion tensor calculation""", - argstr="-oc") + argstr="-oc", + ) b0_threshold = traits.Float( - desc= - """program will use b0 image with the given threshold to mask out high + desc="""program will use b0 image with the given threshold to mask out high background of fa/adc maps. by default it will calculate threshold automatically. but if it failed, you need to set it manually.""", - argstr="-b0_th") + argstr="-b0_th", + ) class DTIReconOutputSpec(TraitedSpec): @@ -85,28 +91,27 @@ class DTIRecon(CommandLine): input_spec = DTIReconInputSpec output_spec = DTIReconOutputSpec - _cmd = 'dti_recon' + _cmd = "dti_recon" def _create_gradient_matrix(self, bvecs_file, bvals_file): - _gradient_matrix_file = 'gradient_matrix.txt' + _gradient_matrix_file = "gradient_matrix.txt" with open(bvals_file) as fbvals: - bvals = [val for val in re.split(r'\s+', fbvals.readline().strip())] + bvals = [val for val in re.split(r"\s+", fbvals.readline().strip())] with open(bvecs_file) as fbvecs: - bvecs_x = fbvecs.readline().split() - bvecs_y = fbvecs.readline().split() - bvecs_z = fbvecs.readline().split() + bvecs_x = fbvecs.readline().split() + bvecs_y = fbvecs.readline().split() + bvecs_z = fbvecs.readline().split() - with open(_gradient_matrix_file, 'w') as gradient_matrix_f: + with open(_gradient_matrix_file, "w") as gradient_matrix_f: for i in range(len(bvals)): - gradient_matrix_f.write("%s, %s, %s, %s\n" % - (bvecs_x[i], bvecs_y[i], bvecs_z[i], - bvals[i])) + gradient_matrix_f.write( + "%s, %s, %s, %s\n" % (bvecs_x[i], bvecs_y[i], bvecs_z[i], bvals[i]) + ) return _gradient_matrix_file def _format_arg(self, name, spec, value): if name == "bvecs": - new_val = self._create_gradient_matrix(self.inputs.bvecs, - self.inputs.bvals) + new_val = self._create_gradient_matrix(self.inputs.bvecs, self.inputs.bvals) return super(DTIRecon, self)._format_arg("bvecs", spec, new_val) return super(DTIRecon, self)._format_arg(name, spec, value) @@ -115,42 +120,42 @@ def _list_outputs(self): output_type = self.inputs.output_type outputs = self.output_spec().get() - outputs['ADC'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_adc.' + output_type)) - outputs['B0'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_b0.' + output_type)) - outputs['L1'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_e1.' + output_type)) - outputs['L2'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_e2.' + output_type)) - outputs['L3'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_e3.' + output_type)) - outputs['exp'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_exp.' + output_type)) - outputs['FA'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_fa.' + output_type)) - outputs['FA_color'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_fa_color.' + output_type)) - outputs['tensor'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_tensor.' + output_type)) - outputs['V1'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_v1.' + output_type)) - outputs['V2'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_v2.' + output_type)) - outputs['V3'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_v3.' + output_type)) + outputs["ADC"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_adc." + output_type) + ) + outputs["B0"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_b0." + output_type) + ) + outputs["L1"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_e1." + output_type) + ) + outputs["L2"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_e2." + output_type) + ) + outputs["L3"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_e3." + output_type) + ) + outputs["exp"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_exp." + output_type) + ) + outputs["FA"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_fa." + output_type) + ) + outputs["FA_color"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_fa_color." + output_type) + ) + outputs["tensor"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_tensor." + output_type) + ) + outputs["V1"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_v1." + output_type) + ) + outputs["V2"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_v2." + output_type) + ) + outputs["V3"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_v3." + output_type) + ) return outputs @@ -158,89 +163,87 @@ def _list_outputs(self): class DTITrackerInputSpec(CommandLineInputSpec): tensor_file = File(exists=True, desc="reconstructed tensor file") input_type = traits.Enum( - 'nii', - 'analyze', - 'ni1', - 'nii.gz', + "nii", + "analyze", + "ni1", + "nii.gz", desc="""input and output file type. accepted values are: analyze -> analyze format 7.5 ni1 -> nifti format saved in seperate .hdr and .img file nii -> nifti format with one .nii file nii.gz -> nifti format with compression default type is 'nii'""", - argstr="-it %s") + argstr="-it %s", + ) tracking_method = traits.Enum( - 'fact', - 'rk2', - 'tl', - 'sl', + "fact", + "rk2", + "tl", + "sl", desc="""fact -> use FACT method for tracking. this is the default method. rk2 -> use 2nd order runge-kutta method for tracking. tl -> use tensorline method for tracking. sl -> use interpolated streamline method with fixed step-length""", - argstr="-%s") + argstr="-%s", + ) step_length = traits.Float( desc="""set step length, in the unit of minimum voxel size. default value is 0.5 for interpolated streamline method and 0.1 for other methods""", - argstr="-l %f") + argstr="-l %f", + ) angle_threshold = traits.Float( - desc="set angle threshold. default value is 35 degree", - argstr="-at %f") + desc="set angle threshold. default value is 35 degree", argstr="-at %f" + ) angle_threshold_weight = traits.Float( - desc= - "set angle threshold weighting factor. weighting will be be applied \ + desc="set angle threshold weighting factor. weighting will be be applied \ on top of the angle_threshold", - argstr="-atw %f") + argstr="-atw %f", + ) random_seed = traits.Int( desc="use random location in a voxel instead of the center of the voxel \ to seed. can also define number of seed per voxel. default is 1", - argstr="-rseed %d") - invert_x = traits.Bool( - desc="invert x component of the vector", argstr="-ix") - invert_y = traits.Bool( - desc="invert y component of the vector", argstr="-iy") - invert_z = traits.Bool( - desc="invert z component of the vector", argstr="-iz") - swap_xy = traits.Bool( - desc="swap x & y vectors while tracking", argstr="-sxy") - swap_yz = traits.Bool( - desc="swap y & z vectors while tracking", argstr="-syz") - swap_zx = traits.Bool( - desc="swap x & z vectors while tracking", argstr="-szx") + argstr="-rseed %d", + ) + invert_x = traits.Bool(desc="invert x component of the vector", argstr="-ix") + invert_y = traits.Bool(desc="invert y component of the vector", argstr="-iy") + invert_z = traits.Bool(desc="invert z component of the vector", argstr="-iz") + swap_xy = traits.Bool(desc="swap x & y vectors while tracking", argstr="-sxy") + swap_yz = traits.Bool(desc="swap y & z vectors while tracking", argstr="-syz") + swap_zx = traits.Bool(desc="swap x & z vectors while tracking", argstr="-szx") mask1_file = File( - desc="first mask image", mandatory=True, argstr="-m %s", position=2) + desc="first mask image", mandatory=True, argstr="-m %s", position=2 + ) mask1_threshold = traits.Float( - desc= - "threshold value for the first mask image, if not given, the program will \ + desc="threshold value for the first mask image, if not given, the program will \ try automatically find the threshold", - position=3) + position=3, + ) mask2_file = File(desc="second mask image", argstr="-m2 %s", position=4) mask2_threshold = traits.Float( - desc= - "threshold value for the second mask image, if not given, the program will \ + desc="threshold value for the second mask image, if not given, the program will \ try automatically find the threshold", - position=5) + position=5, + ) input_data_prefix = traits.Str( "dti", desc="for internal naming use only", position=0, argstr="%s", - usedefault=True) + usedefault=True, + ) output_file = File( - "tracks.trk", - "file containing tracks", - argstr="%s", - position=1, - usedefault=True) + "tracks.trk", "file containing tracks", argstr="%s", position=1, usedefault=True + ) output_mask = File( - desc="output a binary mask file in analyze format", argstr="-om %s") + desc="output a binary mask file in analyze format", argstr="-om %s" + ) primary_vector = traits.Enum( - 'v2', - 'v3', - desc= - "which vector to use for fibre tracking: v2 or v3. If not set use v1", - argstr="-%s") + "v2", + "v3", + desc="which vector to use for fibre tracking: v2 or v3. If not set use v1", + argstr="-%s", + ) class DTITrackerOutputSpec(TraitedSpec): @@ -252,21 +255,22 @@ class DTITracker(CommandLine): input_spec = DTITrackerInputSpec output_spec = DTITrackerOutputSpec - _cmd = 'dti_tracker' + _cmd = "dti_tracker" def _run_interface(self, runtime): _, _, ext = split_filename(self.inputs.tensor_file) copyfile( self.inputs.tensor_file, os.path.abspath(self.inputs.input_data_prefix + "_tensor" + ext), - copy=False) + copy=False, + ) return super(DTITracker, self)._run_interface(runtime) def _list_outputs(self): outputs = self.output_spec().get() - outputs['track_file'] = os.path.abspath(self.inputs.output_file) + outputs["track_file"] = os.path.abspath(self.inputs.output_file) if isdefined(self.inputs.output_mask) and self.inputs.output_mask: - outputs['mask_file'] = os.path.abspath(self.inputs.output_mask) + outputs["mask_file"] = os.path.abspath(self.inputs.output_mask) return outputs diff --git a/nipype/interfaces/diffusion_toolkit/odf.py b/nipype/interfaces/diffusion_toolkit/odf.py index 9c2d6d2505..705a4a5d33 100644 --- a/nipype/interfaces/diffusion_toolkit/odf.py +++ b/nipype/interfaces/diffusion_toolkit/odf.py @@ -7,54 +7,59 @@ import re from ...utils.filemanip import fname_presuffix, split_filename, copyfile -from ..base import (TraitedSpec, File, traits, CommandLine, - CommandLineInputSpec, isdefined) +from ..base import ( + TraitedSpec, + File, + traits, + CommandLine, + CommandLineInputSpec, + isdefined, +) -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" class HARDIMatInputSpec(CommandLineInputSpec): bvecs = File( - exists=True, - desc='b vectors file', - argstr='%s', - position=1, - mandatory=True) - bvals = File(exists=True, desc='b values file', mandatory=True) + exists=True, desc="b vectors file", argstr="%s", position=1, mandatory=True + ) + bvals = File(exists=True, desc="b values file", mandatory=True) out_file = File( "recon_mat.dat", - desc='output matrix file', - argstr='%s', + desc="output matrix file", + argstr="%s", usedefault=True, - position=2) + position=2, + ) order = traits.Int( - argstr='-order %s', - desc= - """maximum order of spherical harmonics. must be even number. default - is 4""") + argstr="-order %s", + desc="""maximum order of spherical harmonics. must be even number. default + is 4""", + ) odf_file = File( exists=True, - argstr='-odf %s', - desc= - """filename that contains the reconstruction points on a HEMI-sphere. - use the pre-set 181 points by default""") + argstr="-odf %s", + desc="""filename that contains the reconstruction points on a HEMI-sphere. + use the pre-set 181 points by default""", + ) reference_file = File( exists=True, - argstr='-ref %s', - desc= - """provide a dicom or nifti image as the reference for the program to + argstr="-ref %s", + desc="""provide a dicom or nifti image as the reference for the program to figure out the image orientation information. if no such info was found in the given image header, the next 5 options -info, etc., will be used if provided. if image orientation info can be found in the given reference, all other 5 image orientation options will - be IGNORED""") + be IGNORED""", + ) image_info = File( exists=True, - argstr='-info %s', + argstr="-info %s", desc="""specify image information file. the image info file is generated from original dicom image by diff_unpack program and contains image orientation and other information needed for reconstruction and - tracking. by default will look into the image folder for .info file""") + tracking. by default will look into the image folder for .info file""", + ) image_orientation_vectors = traits.List( traits.Float(), minlen=6, @@ -66,112 +71,107 @@ class HARDIMatInputSpec(CommandLineInputSpec): one automatically. this information will be used to determine image orientation, as well as to adjust gradient vectors with oblique angle when""", - argstr="-iop %f") + argstr="-iop %f", + ) oblique_correction = traits.Bool( desc="""when oblique angle(s) applied, some SIEMENS dti protocols do not adjust gradient accordingly, thus it requires adjustment for correct diffusion tensor calculation""", - argstr="-oc") + argstr="-oc", + ) class HARDIMatOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='output matrix file') + out_file = File(exists=True, desc="output matrix file") class HARDIMat(CommandLine): """Use hardi_mat to calculate a reconstruction matrix from a gradient table """ + input_spec = HARDIMatInputSpec output_spec = HARDIMatOutputSpec - _cmd = 'hardi_mat' + _cmd = "hardi_mat" def _create_gradient_matrix(self, bvecs_file, bvals_file): - _gradient_matrix_file = 'gradient_matrix.txt' - bvals = [ - val for val in re.split('\s+', - open(bvals_file).readline().strip()) - ] + _gradient_matrix_file = "gradient_matrix.txt" + bvals = [val for val in re.split("\s+", open(bvals_file).readline().strip())] bvecs_f = open(bvecs_file) - bvecs_x = [val for val in re.split('\s+', bvecs_f.readline().strip())] - bvecs_y = [val for val in re.split('\s+', bvecs_f.readline().strip())] - bvecs_z = [val for val in re.split('\s+', bvecs_f.readline().strip())] + bvecs_x = [val for val in re.split("\s+", bvecs_f.readline().strip())] + bvecs_y = [val for val in re.split("\s+", bvecs_f.readline().strip())] + bvecs_z = [val for val in re.split("\s+", bvecs_f.readline().strip())] bvecs_f.close() - gradient_matrix_f = open(_gradient_matrix_file, 'w') + gradient_matrix_f = open(_gradient_matrix_file, "w") for i in range(len(bvals)): if int(bvals[i]) == 0: continue - gradient_matrix_f.write("%s %s %s\n" % (bvecs_x[i], bvecs_y[i], - bvecs_z[i])) + gradient_matrix_f.write("%s %s %s\n" % (bvecs_x[i], bvecs_y[i], bvecs_z[i])) gradient_matrix_f.close() return _gradient_matrix_file def _format_arg(self, name, spec, value): if name == "bvecs": - new_val = self._create_gradient_matrix(self.inputs.bvecs, - self.inputs.bvals) + new_val = self._create_gradient_matrix(self.inputs.bvecs, self.inputs.bvals) return super(HARDIMat, self)._format_arg("bvecs", spec, new_val) return super(HARDIMat, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class ODFReconInputSpec(CommandLineInputSpec): DWI = File( - desc='Input raw data', - argstr='%s', - exists=True, - mandatory=True, - position=1) + desc="Input raw data", argstr="%s", exists=True, mandatory=True, position=1 + ) n_directions = traits.Int( - desc='Number of directions', argstr='%s', mandatory=True, position=2) + desc="Number of directions", argstr="%s", mandatory=True, position=2 + ) n_output_directions = traits.Int( - desc='Number of output directions', - argstr='%s', - mandatory=True, - position=3) + desc="Number of output directions", argstr="%s", mandatory=True, position=3 + ) out_prefix = traits.Str( - "odf", - desc='Output file prefix', - argstr='%s', - usedefault=True, - position=4) + "odf", desc="Output file prefix", argstr="%s", usedefault=True, position=4 + ) matrix = File( - argstr='-mat %s', + argstr="-mat %s", exists=True, desc="""use given file as reconstruction matrix.""", - mandatory=True) + mandatory=True, + ) n_b0 = traits.Int( - argstr='-b0 %s', + argstr="-b0 %s", desc="""number of b0 scans. by default the program gets this information from the number of directions and number of volumes in the raw data. useful when dealing with incomplete raw data set or only using part of raw data set to reconstruct""", - mandatory=True) + mandatory=True, + ) output_type = traits.Enum( - 'nii', - 'analyze', - 'ni1', - 'nii.gz', - argstr='-ot %s', - desc='output file type', - usedefault=True) + "nii", + "analyze", + "ni1", + "nii.gz", + argstr="-ot %s", + desc="output file type", + usedefault=True, + ) sharpness = traits.Float( desc="""smooth or sharpen the raw data. factor > 0 is smoothing. factor < 0 is sharpening. default value is 0 NOTE: this option applies to DSI study only""", - argstr='-s %f') + argstr="-s %f", + ) filter = traits.Bool( - desc="""apply a filter (e.g. high pass) to the raw image""", - argstr='-f') + desc="""apply a filter (e.g. high pass) to the raw image""", argstr="-f" + ) subtract_background = traits.Bool( - desc="""subtract the background value before reconstruction""", - argstr='-bg') - dsi = traits.Bool(desc="""indicates that the data is dsi""", argstr='-dsi') - output_entropy = traits.Bool(desc="""output entropy map""", argstr='-oe') + desc="""subtract the background value before reconstruction""", argstr="-bg" + ) + dsi = traits.Bool(desc="""indicates that the data is dsi""", argstr="-dsi") + output_entropy = traits.Bool(desc="""output entropy map""", argstr="-oe") image_orientation_vectors = traits.List( traits.Float(), minlen=6, @@ -183,12 +183,14 @@ class ODFReconInputSpec(CommandLineInputSpec): one automatically. this information will be used to determine image orientation, as well as to adjust gradient vectors with oblique angle when""", - argstr="-iop %f") + argstr="-iop %f", + ) oblique_correction = traits.Bool( desc="""when oblique angle(s) applied, some SIEMENS dti protocols do not adjust gradient accordingly, thus it requires adjustment for correct diffusion tensor calculation""", - argstr="-oc") + argstr="-oc", + ) class ODFReconOutputSpec(TraitedSpec): @@ -206,29 +208,29 @@ class ODFRecon(CommandLine): input_spec = ODFReconInputSpec output_spec = ODFReconOutputSpec - _cmd = 'odf_recon' + _cmd = "odf_recon" def _list_outputs(self): out_prefix = self.inputs.out_prefix output_type = self.inputs.output_type outputs = self.output_spec().get() - outputs['B0'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_b0.' + output_type)) - outputs['DWI'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_dwi.' + output_type)) - outputs['max'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_max.' + output_type)) - outputs['ODF'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_odf.' + output_type)) + outputs["B0"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_b0." + output_type) + ) + outputs["DWI"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_dwi." + output_type) + ) + outputs["max"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_max." + output_type) + ) + outputs["ODF"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_odf." + output_type) + ) if isdefined(self.inputs.output_entropy): - outputs['entropy'] = os.path.abspath( - fname_presuffix( - "", prefix=out_prefix, suffix='_entropy.' + output_type)) + outputs["entropy"] = os.path.abspath( + fname_presuffix("", prefix=out_prefix, suffix="_entropy." + output_type) + ) return outputs @@ -237,77 +239,73 @@ class ODFTrackerInputSpec(CommandLineInputSpec): max = File(exists=True, mandatory=True) ODF = File(exists=True, mandatory=True) input_data_prefix = traits.Str( - "odf", - desc='recon data prefix', - argstr='%s', - usedefault=True, - position=0) + "odf", desc="recon data prefix", argstr="%s", usedefault=True, position=0 + ) out_file = File( - "tracks.trk", - desc='output track file', - argstr='%s', - usedefault=True, - position=1) + "tracks.trk", desc="output track file", argstr="%s", usedefault=True, position=1 + ) input_output_type = traits.Enum( - 'nii', - 'analyze', - 'ni1', - 'nii.gz', - argstr='-it %s', - desc='input and output file type', - usedefault=True) + "nii", + "analyze", + "ni1", + "nii.gz", + argstr="-it %s", + desc="input and output file type", + usedefault=True, + ) runge_kutta2 = traits.Bool( - argstr='-rk2', + argstr="-rk2", desc="""use 2nd order runge-kutta method for tracking. - default tracking method is non-interpolate streamline""") + default tracking method is non-interpolate streamline""", + ) step_length = traits.Float( - argstr='-l %f', + argstr="-l %f", desc="""set step length, in the unit of minimum voxel size. - default value is 0.1.""") + default value is 0.1.""", + ) angle_threshold = traits.Float( - argstr='-at %f', + argstr="-at %f", desc="""set angle threshold. default value is 35 degree for - default tracking method and 25 for rk2""") + default tracking method and 25 for rk2""", + ) random_seed = traits.Int( - argstr='-rseed %s', + argstr="-rseed %s", desc="""use random location in a voxel instead of the center of the voxel - to seed. can also define number of seed per voxel. default is 1""") - invert_x = traits.Bool( - argstr='-ix', desc='invert x component of the vector') - invert_y = traits.Bool( - argstr='-iy', desc='invert y component of the vector') - invert_z = traits.Bool( - argstr='-iz', desc='invert z component of the vector') - swap_xy = traits.Bool( - argstr='-sxy', desc='swap x and y vectors while tracking') - swap_yz = traits.Bool( - argstr='-syz', desc='swap y and z vectors while tracking') - swap_zx = traits.Bool( - argstr='-szx', desc='swap x and z vectors while tracking') - disc = traits.Bool(argstr='-disc', desc='use disc tracking') + to seed. can also define number of seed per voxel. default is 1""", + ) + invert_x = traits.Bool(argstr="-ix", desc="invert x component of the vector") + invert_y = traits.Bool(argstr="-iy", desc="invert y component of the vector") + invert_z = traits.Bool(argstr="-iz", desc="invert z component of the vector") + swap_xy = traits.Bool(argstr="-sxy", desc="swap x and y vectors while tracking") + swap_yz = traits.Bool(argstr="-syz", desc="swap y and z vectors while tracking") + swap_zx = traits.Bool(argstr="-szx", desc="swap x and z vectors while tracking") + disc = traits.Bool(argstr="-disc", desc="use disc tracking") mask1_file = File( - desc="first mask image", mandatory=True, argstr="-m %s", position=2) + desc="first mask image", mandatory=True, argstr="-m %s", position=2 + ) mask1_threshold = traits.Float( - desc= - "threshold value for the first mask image, if not given, the program will \ + desc="threshold value for the first mask image, if not given, the program will \ try automatically find the threshold", - position=3) + position=3, + ) mask2_file = File(desc="second mask image", argstr="-m2 %s", position=4) mask2_threshold = traits.Float( - desc= - "threshold value for the second mask image, if not given, the program will \ + desc="threshold value for the second mask image, if not given, the program will \ try automatically find the threshold", - position=5) + position=5, + ) limit = traits.Int( - argstr='-limit %d', + argstr="-limit %d", desc="""in some special case, such as heart data, some track may go into infinite circle and take long time to stop. this option allows - setting a limit for the longest tracking steps (voxels)""") + setting a limit for the longest tracking steps (voxels)""", + ) dsi = traits.Bool( - argstr='-dsi', + argstr="-dsi", desc=""" specify the input odf data is dsi. because dsi recon uses fixed pre-calculated matrix, some special orientation patch needs to - be applied to keep dti/dsi/q-ball consistent.""") + be applied to keep dti/dsi/q-ball consistent.""", + ) image_orientation_vectors = traits.List( traits.Float(), minlen=6, @@ -319,24 +317,23 @@ class ODFTrackerInputSpec(CommandLineInputSpec): one automatically. this information will be used to determine image orientation, as well as to adjust gradient vectors with oblique angle when""", - argstr="-iop %f") + argstr="-iop %f", + ) slice_order = traits.Int( - argstr='-sorder %d', - desc= - 'set the slice order. 1 means normal, -1 means reversed. default value is 1' + argstr="-sorder %d", + desc="set the slice order. 1 means normal, -1 means reversed. default value is 1", ) voxel_order = traits.Enum( - 'RAS', - 'RPS', - 'RAI', - 'RPI', - 'LAI', - 'LAS', - 'LPS', - 'LPI', - argstr='-vorder %s', - desc= - """specify the voxel order in RL/AP/IS (human brain) reference. must be + "RAS", + "RPS", + "RAI", + "RPI", + "LAI", + "LAS", + "LPS", + "LPI", + argstr="-vorder %s", + desc="""specify the voxel order in RL/AP/IS (human brain) reference. must be 3 letters with no space in between. for example, RAS means the voxel row is from L->R, the column is from P->A and the slice order is from I->S. @@ -346,11 +343,12 @@ class ODFTrackerInputSpec(CommandLineInputSpec): sagittal image is PIL. this information also is NOT needed for tracking but will be saved in the track file and is essential for track display to map onto - the right coordinates""") + the right coordinates""", + ) class ODFTrackerOutputSpec(TraitedSpec): - track_file = File(exists=True, desc='output track file') + track_file = File(exists=True, desc="output track file") class ODFTracker(CommandLine): @@ -360,24 +358,26 @@ class ODFTracker(CommandLine): input_spec = ODFTrackerInputSpec output_spec = ODFTrackerOutputSpec - _cmd = 'odf_tracker' + _cmd = "odf_tracker" def _run_interface(self, runtime): _, _, ext = split_filename(self.inputs.max) copyfile( self.inputs.max, os.path.abspath(self.inputs.input_data_prefix + "_max" + ext), - copy=False) + copy=False, + ) _, _, ext = split_filename(self.inputs.ODF) copyfile( self.inputs.ODF, os.path.abspath(self.inputs.input_data_prefix + "_odf" + ext), - copy=False) + copy=False, + ) return super(ODFTracker, self)._run_interface(runtime) def _list_outputs(self): outputs = self.output_spec().get() - outputs['track_file'] = os.path.abspath(self.inputs.out_file) + outputs["track_file"] = os.path.abspath(self.inputs.out_file) return outputs diff --git a/nipype/interfaces/diffusion_toolkit/postproc.py b/nipype/interfaces/diffusion_toolkit/postproc.py index 19cafe8d7d..534b747a0d 100644 --- a/nipype/interfaces/diffusion_toolkit/postproc.py +++ b/nipype/interfaces/diffusion_toolkit/postproc.py @@ -5,10 +5,16 @@ """ import os -from ..base import (TraitedSpec, File, traits, CommandLine, InputMultiPath, - CommandLineInputSpec) +from ..base import ( + TraitedSpec, + File, + traits, + CommandLine, + InputMultiPath, + CommandLineInputSpec, +) -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" class SplineFilterInputSpec(CommandLineInputSpec): @@ -17,18 +23,21 @@ class SplineFilterInputSpec(CommandLineInputSpec): desc="file containing tracks to be filtered", position=0, argstr="%s", - mandatory=True) + mandatory=True, + ) step_length = traits.Float( desc="in the unit of minimum voxel size", position=1, argstr="%f", - mandatory=True) + mandatory=True, + ) output_file = File( "spline_tracks.trk", desc="target file for smoothed tracks", position=2, argstr="%s", - usedefault=True) + usedefault=True, + ) class SplineFilterOutputSpec(TraitedSpec): @@ -53,6 +62,7 @@ class SplineFilter(CommandLine): >>> filt.inputs.step_length = 0.5 >>> filt.run() # doctest: +SKIP """ + input_spec = SplineFilterInputSpec output_spec = SplineFilterOutputSpec @@ -60,8 +70,7 @@ class SplineFilter(CommandLine): def _list_outputs(self): outputs = self.output_spec().get() - outputs['smoothed_track_file'] = os.path.abspath( - self.inputs.output_file) + outputs["smoothed_track_file"] = os.path.abspath(self.inputs.output_file) return outputs @@ -71,13 +80,15 @@ class TrackMergeInputSpec(CommandLineInputSpec): desc="file containing tracks to be filtered", position=0, argstr="%s...", - mandatory=True) + mandatory=True, + ) output_file = File( "merged_tracks.trk", desc="target file for merged tracks", position=-1, argstr="%s", - usedefault=True) + usedefault=True, + ) class TrackMergeOutputSpec(TraitedSpec): @@ -104,6 +115,7 @@ class TrackMerge(CommandLine): >>> mrg.inputs.track_files = ['track1.trk','track2.trk'] >>> mrg.run() # doctest: +SKIP """ + input_spec = TrackMergeInputSpec output_spec = TrackMergeOutputSpec @@ -111,5 +123,5 @@ class TrackMerge(CommandLine): def _list_outputs(self): outputs = self.output_spec().get() - outputs['track_file'] = os.path.abspath(self.inputs.output_file) + outputs["track_file"] = os.path.abspath(self.inputs.output_file) return outputs diff --git a/nipype/interfaces/diffusion_toolkit/tests/test_auto_DTIRecon.py b/nipype/interfaces/diffusion_toolkit/tests/test_auto_DTIRecon.py index 8623872a7e..91e164f35a 100644 --- a/nipype/interfaces/diffusion_toolkit/tests/test_auto_DTIRecon.py +++ b/nipype/interfaces/diffusion_toolkit/tests/test_auto_DTIRecon.py @@ -4,59 +4,39 @@ def test_DTIRecon_inputs(): input_map = dict( - DWI=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), - args=dict(argstr='%s', ), - b0_threshold=dict(argstr='-b0_th', ), - bvals=dict( - extensions=None, - mandatory=True, - ), - bvecs=dict( - argstr='-gm %s', - extensions=None, - mandatory=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - image_orientation_vectors=dict(argstr='-iop %f', ), - n_averages=dict(argstr='-nex %s', ), - oblique_correction=dict(argstr='-oc', ), - out_prefix=dict( - argstr='%s', - position=2, - usedefault=True, - ), - output_type=dict( - argstr='-ot %s', - usedefault=True, - ), + DWI=dict(argstr="%s", extensions=None, mandatory=True, position=1,), + args=dict(argstr="%s",), + b0_threshold=dict(argstr="-b0_th",), + bvals=dict(extensions=None, mandatory=True,), + bvecs=dict(argstr="-gm %s", extensions=None, mandatory=True,), + environ=dict(nohash=True, usedefault=True,), + image_orientation_vectors=dict(argstr="-iop %f",), + n_averages=dict(argstr="-nex %s",), + oblique_correction=dict(argstr="-oc",), + out_prefix=dict(argstr="%s", position=2, usedefault=True,), + output_type=dict(argstr="-ot %s", usedefault=True,), ) inputs = DTIRecon.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DTIRecon_outputs(): output_map = dict( - ADC=dict(extensions=None, ), - B0=dict(extensions=None, ), - FA=dict(extensions=None, ), - FA_color=dict(extensions=None, ), - L1=dict(extensions=None, ), - L2=dict(extensions=None, ), - L3=dict(extensions=None, ), - V1=dict(extensions=None, ), - V2=dict(extensions=None, ), - V3=dict(extensions=None, ), - exp=dict(extensions=None, ), - tensor=dict(extensions=None, ), + ADC=dict(extensions=None,), + B0=dict(extensions=None,), + FA=dict(extensions=None,), + FA_color=dict(extensions=None,), + L1=dict(extensions=None,), + L2=dict(extensions=None,), + L3=dict(extensions=None,), + V1=dict(extensions=None,), + V2=dict(extensions=None,), + V3=dict(extensions=None,), + exp=dict(extensions=None,), + tensor=dict(extensions=None,), ) outputs = DTIRecon.output_spec() diff --git a/nipype/interfaces/diffusion_toolkit/tests/test_auto_DTITracker.py b/nipype/interfaces/diffusion_toolkit/tests/test_auto_DTITracker.py index 2f4f3417ef..d1fd3bd1e8 100644 --- a/nipype/interfaces/diffusion_toolkit/tests/test_auto_DTITracker.py +++ b/nipype/interfaces/diffusion_toolkit/tests/test_auto_DTITracker.py @@ -4,63 +4,40 @@ def test_DTITracker_inputs(): input_map = dict( - angle_threshold=dict(argstr='-at %f', ), - angle_threshold_weight=dict(argstr='-atw %f', ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - input_data_prefix=dict( - argstr='%s', - position=0, - usedefault=True, - ), - input_type=dict(argstr='-it %s', ), - invert_x=dict(argstr='-ix', ), - invert_y=dict(argstr='-iy', ), - invert_z=dict(argstr='-iz', ), - mask1_file=dict( - argstr='-m %s', - extensions=None, - mandatory=True, - position=2, - ), - mask1_threshold=dict(position=3, ), - mask2_file=dict( - argstr='-m2 %s', - extensions=None, - position=4, - ), - mask2_threshold=dict(position=5, ), - output_file=dict( - argstr='%s', - extensions=None, - position=1, - usedefault=True, - ), - output_mask=dict( - argstr='-om %s', - extensions=None, - ), - primary_vector=dict(argstr='-%s', ), - random_seed=dict(argstr='-rseed %d', ), - step_length=dict(argstr='-l %f', ), - swap_xy=dict(argstr='-sxy', ), - swap_yz=dict(argstr='-syz', ), - swap_zx=dict(argstr='-szx', ), - tensor_file=dict(extensions=None, ), - tracking_method=dict(argstr='-%s', ), + angle_threshold=dict(argstr="-at %f",), + angle_threshold_weight=dict(argstr="-atw %f",), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + input_data_prefix=dict(argstr="%s", position=0, usedefault=True,), + input_type=dict(argstr="-it %s",), + invert_x=dict(argstr="-ix",), + invert_y=dict(argstr="-iy",), + invert_z=dict(argstr="-iz",), + mask1_file=dict(argstr="-m %s", extensions=None, mandatory=True, position=2,), + mask1_threshold=dict(position=3,), + mask2_file=dict(argstr="-m2 %s", extensions=None, position=4,), + mask2_threshold=dict(position=5,), + output_file=dict(argstr="%s", extensions=None, position=1, usedefault=True,), + output_mask=dict(argstr="-om %s", extensions=None,), + primary_vector=dict(argstr="-%s",), + random_seed=dict(argstr="-rseed %d",), + step_length=dict(argstr="-l %f",), + swap_xy=dict(argstr="-sxy",), + swap_yz=dict(argstr="-syz",), + swap_zx=dict(argstr="-szx",), + tensor_file=dict(extensions=None,), + tracking_method=dict(argstr="-%s",), ) inputs = DTITracker.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DTITracker_outputs(): output_map = dict( - mask_file=dict(extensions=None, ), - track_file=dict(extensions=None, ), + mask_file=dict(extensions=None,), track_file=dict(extensions=None,), ) outputs = DTITracker.output_spec() diff --git a/nipype/interfaces/diffusion_toolkit/tests/test_auto_HARDIMat.py b/nipype/interfaces/diffusion_toolkit/tests/test_auto_HARDIMat.py index d043890f9d..6cddb31fdc 100644 --- a/nipype/interfaces/diffusion_toolkit/tests/test_auto_HARDIMat.py +++ b/nipype/interfaces/diffusion_toolkit/tests/test_auto_HARDIMat.py @@ -4,50 +4,27 @@ def test_HARDIMat_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bvals=dict( - extensions=None, - mandatory=True, - ), - bvecs=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - image_info=dict( - argstr='-info %s', - extensions=None, - ), - image_orientation_vectors=dict(argstr='-iop %f', ), - oblique_correction=dict(argstr='-oc', ), - odf_file=dict( - argstr='-odf %s', - extensions=None, - ), - order=dict(argstr='-order %s', ), - out_file=dict( - argstr='%s', - extensions=None, - position=2, - usedefault=True, - ), - reference_file=dict( - argstr='-ref %s', - extensions=None, - ), + args=dict(argstr="%s",), + bvals=dict(extensions=None, mandatory=True,), + bvecs=dict(argstr="%s", extensions=None, mandatory=True, position=1,), + environ=dict(nohash=True, usedefault=True,), + image_info=dict(argstr="-info %s", extensions=None,), + image_orientation_vectors=dict(argstr="-iop %f",), + oblique_correction=dict(argstr="-oc",), + odf_file=dict(argstr="-odf %s", extensions=None,), + order=dict(argstr="-order %s",), + out_file=dict(argstr="%s", extensions=None, position=2, usedefault=True,), + reference_file=dict(argstr="-ref %s", extensions=None,), ) inputs = HARDIMat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_HARDIMat_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = HARDIMat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/diffusion_toolkit/tests/test_auto_ODFRecon.py b/nipype/interfaces/diffusion_toolkit/tests/test_auto_ODFRecon.py index e1f6ad47b6..0e8132a7f6 100644 --- a/nipype/interfaces/diffusion_toolkit/tests/test_auto_ODFRecon.py +++ b/nipype/interfaces/diffusion_toolkit/tests/test_auto_ODFRecon.py @@ -4,65 +4,37 @@ def test_ODFRecon_inputs(): input_map = dict( - DWI=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), - args=dict(argstr='%s', ), - dsi=dict(argstr='-dsi', ), - environ=dict( - nohash=True, - usedefault=True, - ), - filter=dict(argstr='-f', ), - image_orientation_vectors=dict(argstr='-iop %f', ), - matrix=dict( - argstr='-mat %s', - extensions=None, - mandatory=True, - ), - n_b0=dict( - argstr='-b0 %s', - mandatory=True, - ), - n_directions=dict( - argstr='%s', - mandatory=True, - position=2, - ), - n_output_directions=dict( - argstr='%s', - mandatory=True, - position=3, - ), - oblique_correction=dict(argstr='-oc', ), - out_prefix=dict( - argstr='%s', - position=4, - usedefault=True, - ), - output_entropy=dict(argstr='-oe', ), - output_type=dict( - argstr='-ot %s', - usedefault=True, - ), - sharpness=dict(argstr='-s %f', ), - subtract_background=dict(argstr='-bg', ), + DWI=dict(argstr="%s", extensions=None, mandatory=True, position=1,), + args=dict(argstr="%s",), + dsi=dict(argstr="-dsi",), + environ=dict(nohash=True, usedefault=True,), + filter=dict(argstr="-f",), + image_orientation_vectors=dict(argstr="-iop %f",), + matrix=dict(argstr="-mat %s", extensions=None, mandatory=True,), + n_b0=dict(argstr="-b0 %s", mandatory=True,), + n_directions=dict(argstr="%s", mandatory=True, position=2,), + n_output_directions=dict(argstr="%s", mandatory=True, position=3,), + oblique_correction=dict(argstr="-oc",), + out_prefix=dict(argstr="%s", position=4, usedefault=True,), + output_entropy=dict(argstr="-oe",), + output_type=dict(argstr="-ot %s", usedefault=True,), + sharpness=dict(argstr="-s %f",), + subtract_background=dict(argstr="-bg",), ) inputs = ODFRecon.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ODFRecon_outputs(): output_map = dict( - B0=dict(extensions=None, ), - DWI=dict(extensions=None, ), - ODF=dict(extensions=None, ), - entropy=dict(extensions=None, ), - max=dict(extensions=None, ), + B0=dict(extensions=None,), + DWI=dict(extensions=None,), + ODF=dict(extensions=None,), + entropy=dict(extensions=None,), + max=dict(extensions=None,), ) outputs = ODFRecon.output_spec() diff --git a/nipype/interfaces/diffusion_toolkit/tests/test_auto_ODFTracker.py b/nipype/interfaces/diffusion_toolkit/tests/test_auto_ODFTracker.py index 805c28831e..233aea3e3a 100644 --- a/nipype/interfaces/diffusion_toolkit/tests/test_auto_ODFTracker.py +++ b/nipype/interfaces/diffusion_toolkit/tests/test_auto_ODFTracker.py @@ -4,71 +4,43 @@ def test_ODFTracker_inputs(): input_map = dict( - ODF=dict( - extensions=None, - mandatory=True, - ), - angle_threshold=dict(argstr='-at %f', ), - args=dict(argstr='%s', ), - disc=dict(argstr='-disc', ), - dsi=dict(argstr='-dsi', ), - environ=dict( - nohash=True, - usedefault=True, - ), - image_orientation_vectors=dict(argstr='-iop %f', ), - input_data_prefix=dict( - argstr='%s', - position=0, - usedefault=True, - ), - input_output_type=dict( - argstr='-it %s', - usedefault=True, - ), - invert_x=dict(argstr='-ix', ), - invert_y=dict(argstr='-iy', ), - invert_z=dict(argstr='-iz', ), - limit=dict(argstr='-limit %d', ), - mask1_file=dict( - argstr='-m %s', - extensions=None, - mandatory=True, - position=2, - ), - mask1_threshold=dict(position=3, ), - mask2_file=dict( - argstr='-m2 %s', - extensions=None, - position=4, - ), - mask2_threshold=dict(position=5, ), - max=dict( - extensions=None, - mandatory=True, - ), - out_file=dict( - argstr='%s', - extensions=None, - position=1, - usedefault=True, - ), - random_seed=dict(argstr='-rseed %s', ), - runge_kutta2=dict(argstr='-rk2', ), - slice_order=dict(argstr='-sorder %d', ), - step_length=dict(argstr='-l %f', ), - swap_xy=dict(argstr='-sxy', ), - swap_yz=dict(argstr='-syz', ), - swap_zx=dict(argstr='-szx', ), - voxel_order=dict(argstr='-vorder %s', ), + ODF=dict(extensions=None, mandatory=True,), + angle_threshold=dict(argstr="-at %f",), + args=dict(argstr="%s",), + disc=dict(argstr="-disc",), + dsi=dict(argstr="-dsi",), + environ=dict(nohash=True, usedefault=True,), + image_orientation_vectors=dict(argstr="-iop %f",), + input_data_prefix=dict(argstr="%s", position=0, usedefault=True,), + input_output_type=dict(argstr="-it %s", usedefault=True,), + invert_x=dict(argstr="-ix",), + invert_y=dict(argstr="-iy",), + invert_z=dict(argstr="-iz",), + limit=dict(argstr="-limit %d",), + mask1_file=dict(argstr="-m %s", extensions=None, mandatory=True, position=2,), + mask1_threshold=dict(position=3,), + mask2_file=dict(argstr="-m2 %s", extensions=None, position=4,), + mask2_threshold=dict(position=5,), + max=dict(extensions=None, mandatory=True,), + out_file=dict(argstr="%s", extensions=None, position=1, usedefault=True,), + random_seed=dict(argstr="-rseed %s",), + runge_kutta2=dict(argstr="-rk2",), + slice_order=dict(argstr="-sorder %d",), + step_length=dict(argstr="-l %f",), + swap_xy=dict(argstr="-sxy",), + swap_yz=dict(argstr="-syz",), + swap_zx=dict(argstr="-szx",), + voxel_order=dict(argstr="-vorder %s",), ) inputs = ODFTracker.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ODFTracker_outputs(): - output_map = dict(track_file=dict(extensions=None, ), ) + output_map = dict(track_file=dict(extensions=None,),) outputs = ODFTracker.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/diffusion_toolkit/tests/test_auto_SplineFilter.py b/nipype/interfaces/diffusion_toolkit/tests/test_auto_SplineFilter.py index 53074aab81..ddff69b5de 100644 --- a/nipype/interfaces/diffusion_toolkit/tests/test_auto_SplineFilter.py +++ b/nipype/interfaces/diffusion_toolkit/tests/test_auto_SplineFilter.py @@ -4,36 +4,21 @@ def test_SplineFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - output_file=dict( - argstr='%s', - extensions=None, - position=2, - usedefault=True, - ), - step_length=dict( - argstr='%f', - mandatory=True, - position=1, - ), - track_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=0, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + output_file=dict(argstr="%s", extensions=None, position=2, usedefault=True,), + step_length=dict(argstr="%f", mandatory=True, position=1,), + track_file=dict(argstr="%s", extensions=None, mandatory=True, position=0,), ) inputs = SplineFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SplineFilter_outputs(): - output_map = dict(smoothed_track_file=dict(extensions=None, ), ) + output_map = dict(smoothed_track_file=dict(extensions=None,),) outputs = SplineFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/diffusion_toolkit/tests/test_auto_TrackMerge.py b/nipype/interfaces/diffusion_toolkit/tests/test_auto_TrackMerge.py index 9fea4d1b5e..1c274533f0 100644 --- a/nipype/interfaces/diffusion_toolkit/tests/test_auto_TrackMerge.py +++ b/nipype/interfaces/diffusion_toolkit/tests/test_auto_TrackMerge.py @@ -4,30 +4,20 @@ def test_TrackMerge_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - output_file=dict( - argstr='%s', - extensions=None, - position=-1, - usedefault=True, - ), - track_files=dict( - argstr='%s...', - mandatory=True, - position=0, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + output_file=dict(argstr="%s", extensions=None, position=-1, usedefault=True,), + track_files=dict(argstr="%s...", mandatory=True, position=0,), ) inputs = TrackMerge.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TrackMerge_outputs(): - output_map = dict(track_file=dict(extensions=None, ), ) + output_map = dict(track_file=dict(extensions=None,),) outputs = TrackMerge.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dipy/anisotropic_power.py b/nipype/interfaces/dipy/anisotropic_power.py index 21208326f4..3982ed7106 100644 --- a/nipype/interfaces/dipy/anisotropic_power.py +++ b/nipype/interfaces/dipy/anisotropic_power.py @@ -6,11 +6,11 @@ from ..base import TraitedSpec, File, isdefined from .base import DipyDiffusionInterface, DipyBaseInterfaceInputSpec -IFLOGGER = logging.getLogger('nipype.interface') +IFLOGGER = logging.getLogger("nipype.interface") class APMQballInputSpec(DipyBaseInterfaceInputSpec): - mask_file = File(exists=True, desc='An optional brain mask') + mask_file = File(exists=True, desc="An optional brain mask") class APMQballOutputSpec(TraitedSpec): @@ -31,6 +31,7 @@ class APMQball(DipyDiffusionInterface): >>> apm.inputs.in_bval = 'bvals' >>> apm.run() # doctest: +SKIP """ + input_spec = APMQballInputSpec output_spec = APMQballOutputSpec @@ -50,23 +51,24 @@ def _run_interface(self, runtime): # Fit it model = shm.QballModel(gtab, 8) - sphere = get_sphere('symmetric724') + sphere = get_sphere("symmetric724") peaks = peaks_from_model( model=model, data=data, - relative_peak_threshold=.5, + relative_peak_threshold=0.5, min_separation_angle=25, sphere=sphere, - mask=mask) + mask=mask, + ) apm = shm.anisotropic_power(peaks.shm_coeff) - out_file = self._gen_filename('apm') + out_file = self._gen_filename("apm") nb.Nifti1Image(apm.astype("float32"), affine).to_filename(out_file) - IFLOGGER.info('APM qball image saved as %s', out_file) + IFLOGGER.info("APM qball image saved as %s", out_file) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = self._gen_filename('apm') + outputs["out_file"] = self._gen_filename("apm") return outputs diff --git a/nipype/interfaces/dipy/base.py b/nipype/interfaces/dipy/base.py index 93546b69ac..1b168b5732 100644 --- a/nipype/interfaces/dipy/base.py +++ b/nipype/interfaces/dipy/base.py @@ -5,11 +5,17 @@ import inspect import numpy as np from ... import logging -from ..base import (traits, File, isdefined, LibraryBaseInterface, - BaseInterfaceInputSpec, TraitedSpec) +from ..base import ( + traits, + File, + isdefined, + LibraryBaseInterface, + BaseInterfaceInputSpec, + TraitedSpec, +) # List of workflows to ignore -SKIP_WORKFLOWS_LIST = ['Workflow', 'CombinedWorkflow'] +SKIP_WORKFLOWS_LIST = ["Workflow", "CombinedWorkflow"] HAVE_DIPY = True @@ -38,27 +44,30 @@ class DipyBaseInterface(LibraryBaseInterface): """ A base interface for py:mod:`dipy` computations """ - _pkg = 'dipy' + + _pkg = "dipy" class DipyBaseInterfaceInputSpec(BaseInterfaceInputSpec): - in_file = File(exists=True, mandatory=True, desc=('input diffusion data')) - in_bval = File(exists=True, mandatory=True, desc=('input b-values table')) - in_bvec = File(exists=True, mandatory=True, desc=('input b-vectors table')) - b0_thres = traits.Int(700, usedefault=True, desc=('b0 threshold')) - out_prefix = traits.Str(desc=('output prefix for file names')) + in_file = File(exists=True, mandatory=True, desc=("input diffusion data")) + in_bval = File(exists=True, mandatory=True, desc=("input b-values table")) + in_bvec = File(exists=True, mandatory=True, desc=("input b-vectors table")) + b0_thres = traits.Int(700, usedefault=True, desc=("b0 threshold")) + out_prefix = traits.Str(desc=("output prefix for file names")) class DipyDiffusionInterface(DipyBaseInterface): """ A base interface for py:mod:`dipy` computations """ + input_spec = DipyBaseInterfaceInputSpec def _get_gradient_table(self): bval = np.loadtxt(self.inputs.in_bval) bvec = np.loadtxt(self.inputs.in_bvec).T from dipy.core.gradients import gradient_table + gtab = gradient_table(bval, bvec) gtab.b0_threshold = self.inputs.b0_thres @@ -66,7 +75,7 @@ def _get_gradient_table(self): def _gen_filename(self, name, ext=None): fname, fext = op.splitext(op.basename(self.inputs.in_file)) - if fext == '.gz': + if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext @@ -78,7 +87,7 @@ def _gen_filename(self, name, ext=None): if ext is None: ext = fext - return out_prefix + '_' + name + ext + return out_prefix + "_" + name + ext def convert_to_traits_type(dipy_type, is_file=False): @@ -108,8 +117,10 @@ def convert_to_traits_type(dipy_type, is_file=False): elif "complex" in dipy_type: return traits.Complex, is_mandatory else: - msg = "Error during convert_to_traits_type({0}).".format(dipy_type) + \ - "Unknown DIPY type." + msg = ( + "Error during convert_to_traits_type({0}).".format(dipy_type) + + "Unknown DIPY type." + ) raise IOError(msg) @@ -136,22 +147,21 @@ def create_interface_specs(class_name, params=None, BaseClass=TraitedSpec): for p in params: name, dipy_type, desc = p[0], p[1], p[2] is_file = bool("files" in name or "out_" in name) - traits_type, is_mandatory = convert_to_traits_type(dipy_type, - is_file) + traits_type, is_mandatory = convert_to_traits_type(dipy_type, is_file) # print(name, dipy_type, desc, is_file, traits_type, is_mandatory) if BaseClass.__name__ == BaseInterfaceInputSpec.__name__: if len(p) > 3: - attr[name] = traits_type(p[3], desc=desc[-1], - usedefault=True, - mandatory=is_mandatory) + attr[name] = traits_type( + p[3], desc=desc[-1], usedefault=True, mandatory=is_mandatory + ) else: - attr[name] = traits_type(desc=desc[-1], - mandatory=is_mandatory) + attr[name] = traits_type(desc=desc[-1], mandatory=is_mandatory) else: - attr[name] = traits_type(p[3], desc=desc[-1], exists=True, - usedefault=True,) + attr[name] = traits_type( + p[3], desc=desc[-1], exists=True, usedefault=True, + ) - newclass = type(str(class_name), (BaseClass, ), attr) + newclass = type(str(class_name), (BaseClass,), attr) return newclass @@ -180,19 +190,26 @@ def dipy_to_nipype_interface(cls_name, dipy_flow, BaseClass=DipyBaseInterface): flow = dipy_flow() parser.add_workflow(flow) default_values = inspect.getargspec(flow.run).defaults - optional_params = [args + (val,) for args, val in zip(parser.optional_parameters, default_values)] + optional_params = [ + args + (val,) for args, val in zip(parser.optional_parameters, default_values) + ] start = len(parser.optional_parameters) - len(parser.output_parameters) - output_parameters = [args + (val,) for args, val in zip(parser.output_parameters, default_values[start:])] + output_parameters = [ + args + (val,) + for args, val in zip(parser.output_parameters, default_values[start:]) + ] input_parameters = parser.positional_parameters + optional_params - input_spec = create_interface_specs("{}InputSpec".format(cls_name), - input_parameters, - BaseClass=BaseInterfaceInputSpec) + input_spec = create_interface_specs( + "{}InputSpec".format(cls_name), + input_parameters, + BaseClass=BaseInterfaceInputSpec, + ) - output_spec = create_interface_specs("{}OutputSpec".format(cls_name), - output_parameters, - BaseClass=TraitedSpec) + output_spec = create_interface_specs( + "{}OutputSpec".format(cls_name), output_parameters, BaseClass=TraitedSpec + ) def _run_interface(self, runtime): flow = dipy_flow() @@ -207,11 +224,16 @@ def _list_outputs(self): return outputs - newclass = type(str(cls_name), (BaseClass, ), - {"input_spec": input_spec, - "output_spec": output_spec, - "_run_interface": _run_interface, - "_list_outputs:": _list_outputs}) + newclass = type( + str(cls_name), + (BaseClass,), + { + "input_spec": input_spec, + "output_spec": output_spec, + "_run_interface": _run_interface, + "_list_outputs:": _list_outputs, + }, + ) return newclass @@ -235,7 +257,10 @@ def get_dipy_workflows(module): >>> get_dipy_workflows(align) # doctest: +SKIP """ - return [(m, obj) for m, obj in inspect.getmembers(module) - if inspect.isclass(obj) and - issubclass(obj, module.Workflow) and - m not in SKIP_WORKFLOWS_LIST] + return [ + (m, obj) + for m, obj in inspect.getmembers(module) + if inspect.isclass(obj) + and issubclass(obj, module.Workflow) + and m not in SKIP_WORKFLOWS_LIST + ] diff --git a/nipype/interfaces/dipy/preprocess.py b/nipype/interfaces/dipy/preprocess.py index 03686e0258..97d43e9220 100644 --- a/nipype/interfaces/dipy/preprocess.py +++ b/nipype/interfaces/dipy/preprocess.py @@ -8,44 +8,54 @@ from ...utils import NUMPY_MMAP from ... import logging -from ..base import (traits, TraitedSpec, File, isdefined) -from .base import (HAVE_DIPY, dipy_version, dipy_to_nipype_interface, - get_dipy_workflows, DipyBaseInterface) - -IFLOGGER = logging.getLogger('nipype.interface') - -if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion('0.15'): +from ..base import traits, TraitedSpec, File, isdefined +from .base import ( + HAVE_DIPY, + dipy_version, + dipy_to_nipype_interface, + get_dipy_workflows, + DipyBaseInterface, +) + +IFLOGGER = logging.getLogger("nipype.interface") + +if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion("0.15"): from dipy.workflows import denoise, mask l_wkflw = get_dipy_workflows(denoise) + get_dipy_workflows(mask) for name, obj in l_wkflw: - new_name = name.replace('Flow', '') + new_name = name.replace("Flow", "") globals()[new_name] = dipy_to_nipype_interface(new_name, obj) del l_wkflw else: - IFLOGGER.info("We advise you to upgrade DIPY version. This upgrade will" - " open access to more function") + IFLOGGER.info( + "We advise you to upgrade DIPY version. This upgrade will" + " open access to more function" + ) class ResampleInputSpec(TraitedSpec): in_file = File( - exists=True, - mandatory=True, - desc='The input 4D diffusion-weighted image file') + exists=True, mandatory=True, desc="The input 4D diffusion-weighted image file" + ) vox_size = traits.Tuple( traits.Float, traits.Float, traits.Float, - desc=('specify the new voxel zooms. If no vox_size' - ' is set, then isotropic regridding will ' - 'be performed, with spacing equal to the ' - 'smallest current zoom.')) + desc=( + "specify the new voxel zooms. If no vox_size" + " is set, then isotropic regridding will " + "be performed, with spacing equal to the " + "smallest current zoom." + ), + ) interp = traits.Int( 1, mandatory=True, usedefault=True, - desc=('order of the interpolator (0 = nearest, 1 = linear, etc.')) + desc=("order of the interpolator (0 = nearest, 1 = linear, etc."), + ) class ResampleOutputSpec(TraitedSpec): @@ -66,6 +76,7 @@ class Resample(DipyBaseInterface): >>> reslice.inputs.in_file = 'diffusion.nii' >>> reslice.run() # doctest: +SKIP """ + input_spec = ResampleInputSpec output_spec = ResampleOutputSpec @@ -78,50 +89,47 @@ def _run_interface(self, runtime): out_file = op.abspath(self._gen_outfilename()) resample_proxy( - self.inputs.in_file, - order=order, - new_zooms=vox_size, - out_file=out_file) + self.inputs.in_file, order=order, new_zooms=vox_size, out_file=out_file + ) - IFLOGGER.info('Resliced image saved as %s', out_file) + IFLOGGER.info("Resliced image saved as %s", out_file) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = op.abspath(self._gen_outfilename()) + outputs["out_file"] = op.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): fname, fext = op.splitext(op.basename(self.inputs.in_file)) - if fext == '.gz': + if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext - return op.abspath('%s_reslice%s' % (fname, fext)) + return op.abspath("%s_reslice%s" % (fname, fext)) class DenoiseInputSpec(TraitedSpec): in_file = File( - exists=True, - mandatory=True, - desc='The input 4D diffusion-weighted image file') - in_mask = File(exists=True, desc='brain mask') + exists=True, mandatory=True, desc="The input 4D diffusion-weighted image file" + ) + in_mask = File(exists=True, desc="brain mask") noise_model = traits.Enum( - 'rician', - 'gaussian', + "rician", + "gaussian", mandatory=True, usedefault=True, - desc=('noise distribution model')) + desc=("noise distribution model"), + ) signal_mask = File( - desc=('mask in which the mean signal ' - 'will be computed'), - exists=True) + desc=("mask in which the mean signal " "will be computed"), exists=True + ) noise_mask = File( - desc=('mask in which the standard deviation of noise ' - 'will be computed'), - exists=True) - patch_radius = traits.Int(1, usedefault=True, desc='patch radius') - block_radius = traits.Int(5, usedefault=True, desc='block_radius') - snr = traits.Float(desc='manually set an SNR') + desc=("mask in which the standard deviation of noise " "will be computed"), + exists=True, + ) + patch_radius = traits.Int(1, usedefault=True, desc="patch radius") + block_radius = traits.Int(5, usedefault=True, desc="block_radius") + snr = traits.Float(desc="manually set an SNR") class DenoiseOutputSpec(TraitedSpec): @@ -148,23 +156,23 @@ class Denoise(DipyBaseInterface): >>> denoise.inputs.in_file = 'diffusion.nii' >>> denoise.run() # doctest: +SKIP """ + input_spec = DenoiseInputSpec output_spec = DenoiseOutputSpec def _run_interface(self, runtime): out_file = op.abspath(self._gen_outfilename()) - settings = dict( - mask=None, rician=(self.inputs.noise_model == 'rician')) + settings = dict(mask=None, rician=(self.inputs.noise_model == "rician")) if isdefined(self.inputs.in_mask): - settings['mask'] = nb.load(self.inputs.in_mask).get_data() + settings["mask"] = nb.load(self.inputs.in_mask).get_data() if isdefined(self.inputs.patch_radius): - settings['patch_radius'] = self.inputs.patch_radius + settings["patch_radius"] = self.inputs.patch_radius if isdefined(self.inputs.block_radius): - settings['block_radius'] = self.inputs.block_radius + settings["block_radius"] = self.inputs.block_radius snr = None if isdefined(self.inputs.snr): @@ -183,22 +191,22 @@ def _run_interface(self, runtime): snr=snr, smask=signal_mask, nmask=noise_mask, - out_file=out_file) - IFLOGGER.info('Denoised image saved as %s, estimated SNR=%s', out_file, - str(s)) + out_file=out_file, + ) + IFLOGGER.info("Denoised image saved as %s, estimated SNR=%s", out_file, str(s)) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = op.abspath(self._gen_outfilename()) + outputs["out_file"] = op.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): fname, fext = op.splitext(op.basename(self.inputs.in_file)) - if fext == '.gz': + if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext - return op.abspath('%s_denoise%s' % (fname, fext)) + return op.abspath("%s_denoise%s" % (fname, fext)) def resample_proxy(in_file, order=3, new_zooms=None, out_file=None): @@ -209,10 +217,10 @@ def resample_proxy(in_file, order=3, new_zooms=None, out_file=None): if out_file is None: fname, fext = op.splitext(op.basename(in_file)) - if fext == '.gz': + if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext - out_file = op.abspath('./%s_reslice%s' % (fname, fext)) + out_file = op.abspath("./%s_reslice%s" % (fname, fext)) img = nb.load(in_file, mmap=NUMPY_MMAP) hdr = img.header.copy() @@ -222,7 +230,7 @@ def resample_proxy(in_file, order=3, new_zooms=None, out_file=None): if new_zooms is None: minzoom = np.array(im_zooms).min() - new_zooms = tuple(np.ones((3, )) * minzoom) + new_zooms = tuple(np.ones((3,)) * minzoom) if np.all(im_zooms == new_zooms): return in_file @@ -232,18 +240,14 @@ def resample_proxy(in_file, order=3, new_zooms=None, out_file=None): tmp_zooms[:3] = new_zooms[0] hdr.set_zooms(tuple(tmp_zooms)) hdr.set_data_shape(data2.shape) - hdr.set_xyzt_units('mm') - nb.Nifti1Image(data2.astype(hdr.get_data_dtype()), affine2, - hdr).to_filename(out_file) + hdr.set_xyzt_units("mm") + nb.Nifti1Image(data2.astype(hdr.get_data_dtype()), affine2, hdr).to_filename( + out_file + ) return out_file, new_zooms -def nlmeans_proxy(in_file, - settings, - snr=None, - smask=None, - nmask=None, - out_file=None): +def nlmeans_proxy(in_file, settings, snr=None, smask=None, nmask=None, out_file=None): """ Uses non-local means to denoise 4D datasets """ @@ -253,10 +257,10 @@ def nlmeans_proxy(in_file, if out_file is None: fname, fext = op.splitext(op.basename(in_file)) - if fext == '.gz': + if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext - out_file = op.abspath('./%s_denoise%s' % (fname, fext)) + out_file = op.abspath("./%s_denoise%s" % (fname, fext)) img = nb.load(in_file, mmap=NUMPY_MMAP) hdr = img.header @@ -269,25 +273,24 @@ def nlmeans_proxy(in_file, data = np.nan_to_num(data) if data.max() < 1.0e-4: - raise RuntimeError('There is no signal in the image') + raise RuntimeError("There is no signal in the image") df = 1.0 if data.max() < 1000.0: - df = 1000. / data.max() + df = 1000.0 / data.max() data *= df b0 = data[..., 0] if smask is None: smask = np.zeros_like(b0) - smask[b0 > np.percentile(b0, 85.)] = 1 + smask[b0 > np.percentile(b0, 85.0)] = 1 - smask = binary_erosion( - smask.astype(np.uint8), iterations=2).astype(np.uint8) + smask = binary_erosion(smask.astype(np.uint8), iterations=2).astype(np.uint8) if nmask is None: nmask = np.ones_like(b0, dtype=np.uint8) - bmask = settings['mask'] + bmask = settings["mask"] if bmask is None: bmask = np.zeros_like(b0) bmask[b0 > np.percentile(b0[b0 > 0], 10)] = 1 @@ -326,6 +329,5 @@ def nlmeans_proxy(in_file, den = np.squeeze(den) den /= df - nb.Nifti1Image(den.astype(hdr.get_data_dtype()), aff, - hdr).to_filename(out_file) + nb.Nifti1Image(den.astype(hdr.get_data_dtype()), aff, hdr).to_filename(out_file) return out_file, snr diff --git a/nipype/interfaces/dipy/reconstruction.py b/nipype/interfaces/dipy/reconstruction.py index 9351d1e400..ae60aab143 100644 --- a/nipype/interfaces/dipy/reconstruction.py +++ b/nipype/interfaces/dipy/reconstruction.py @@ -11,46 +11,49 @@ from ... import logging from ..base import TraitedSpec, File, traits, isdefined -from .base import (DipyDiffusionInterface, DipyBaseInterfaceInputSpec, - HAVE_DIPY, dipy_version, dipy_to_nipype_interface, - get_dipy_workflows) +from .base import ( + DipyDiffusionInterface, + DipyBaseInterfaceInputSpec, + HAVE_DIPY, + dipy_version, + dipy_to_nipype_interface, + get_dipy_workflows, +) -IFLOGGER = logging.getLogger('nipype.interface') +IFLOGGER = logging.getLogger("nipype.interface") -if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion('0.15'): +if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion("0.15"): from dipy.workflows import reconst l_wkflw = get_dipy_workflows(reconst) for name, obj in l_wkflw: - new_name = name.replace('Flow', '') + new_name = name.replace("Flow", "") globals()[new_name] = dipy_to_nipype_interface(new_name, obj) del l_wkflw else: - IFLOGGER.info("We advise you to upgrade DIPY version. This upgrade will" - " open access to more models") + IFLOGGER.info( + "We advise you to upgrade DIPY version. This upgrade will" + " open access to more models" + ) class RESTOREInputSpec(DipyBaseInterfaceInputSpec): - in_mask = File(exists=True, desc=('input mask in which compute tensors')) - noise_mask = File( - exists=True, desc=('input mask in which compute noise variance')) + in_mask = File(exists=True, desc=("input mask in which compute tensors")) + noise_mask = File(exists=True, desc=("input mask in which compute noise variance")) class RESTOREOutputSpec(TraitedSpec): - fa = File(desc='output fractional anisotropy (FA) map computed from ' - 'the fitted DTI') - md = File(desc='output mean diffusivity (MD) map computed from the ' - 'fitted DTI') - rd = File(desc='output radial diffusivity (RD) map computed from ' - 'the fitted DTI') - mode = File(desc=('output mode (MO) map computed from the fitted DTI')) - trace = File( - desc=('output the tensor trace map computed from the ' - 'fitted DTI')) - evals = File(desc=('output the eigenvalues of the fitted DTI')) - evecs = File(desc=('output the eigenvectors of the fitted DTI')) + fa = File( + desc="output fractional anisotropy (FA) map computed from " "the fitted DTI" + ) + md = File(desc="output mean diffusivity (MD) map computed from the " "fitted DTI") + rd = File(desc="output radial diffusivity (RD) map computed from " "the fitted DTI") + mode = File(desc=("output mode (MO) map computed from the fitted DTI")) + trace = File(desc=("output the tensor trace map computed from the " "fitted DTI")) + evals = File(desc=("output the eigenvalues of the fitted DTI")) + evecs = File(desc=("output the eigenvectors of the fitted DTI")) class RESTORE(DipyDiffusionInterface): @@ -77,6 +80,7 @@ class RESTORE(DipyDiffusionInterface): """ + input_spec = RESTOREInputSpec output_spec = RESTOREOutputSpec @@ -104,7 +108,7 @@ def _run_interface(self, runtime): noise_msk = noise_msk.astype(np.uint8) try_b0 = False elif np.all(data[msk == 0, 0] == 0): - IFLOGGER.info('Input data are masked.') + IFLOGGER.info("Input data are masked.") noise_msk = msk.reshape(-1).astype(np.uint8) else: noise_msk = (1 - msk).reshape(-1).astype(np.uint8) @@ -113,8 +117,9 @@ def _run_interface(self, runtime): dsample = data.reshape(-1, data.shape[-1]) if try_b0 and (nb0 > 1): - noise_data = dsample.take( - np.where(gtab.b0s_mask), axis=-1)[noise_msk == 0, ...] + noise_data = dsample.take(np.where(gtab.b0s_mask), axis=-1)[ + noise_msk == 0, ... + ] n = nb0 else: nodiff = np.where(~gtab.b0s_mask) @@ -126,22 +131,25 @@ def _run_interface(self, runtime): # Estimate sigma required by RESTORE mean_std = np.median(noise_data.std(-1)) try: - bias = (1. - np.sqrt(2. / (n - 1)) * (gamma(n / 2.) / gamma( - (n - 1) / 2.))) + bias = 1.0 - np.sqrt(2.0 / (n - 1)) * ( + gamma(n / 2.0) / gamma((n - 1) / 2.0) + ) except: - bias = .0 + bias = 0.0 pass sigma = mean_std * (1 + bias) if sigma == 0: - IFLOGGER.warning('Noise std is 0.0, looks like data was masked and ' - 'noise cannot be estimated correctly. Using default ' - 'tensor model instead of RESTORE.') + IFLOGGER.warning( + "Noise std is 0.0, looks like data was masked and " + "noise cannot be estimated correctly. Using default " + "tensor model instead of RESTORE." + ) dti = TensorModel(gtab) else: - IFLOGGER.info('Performing RESTORE with noise std=%.4f.', sigma) - dti = TensorModel(gtab, fit_method='RESTORE', sigma=sigma) + IFLOGGER.info("Performing RESTORE with noise std=%.4f.", sigma) + dti = TensorModel(gtab, fit_method="RESTORE", sigma=sigma) try: fit_restore = dti.fit(data, msk) @@ -150,13 +158,14 @@ def _run_interface(self, runtime): fit_restore = dti.fit(data, msk) hdr.set_data_dtype(np.float32) - hdr['data_type'] = 16 + hdr["data_type"] = 16 for k in self._outputs().get(): scalar = getattr(fit_restore, k) hdr.set_data_shape(np.shape(scalar)) nb.Nifti1Image(scalar.astype(np.float32), affine, hdr).to_filename( - self._gen_filename(k)) + self._gen_filename(k) + ) return runtime @@ -168,25 +177,25 @@ def _list_outputs(self): class EstimateResponseSHInputSpec(DipyBaseInterfaceInputSpec): - in_evals = File( - exists=True, mandatory=True, desc=('input eigenvalues file')) - in_mask = File( - exists=True, desc=('input mask in which we find single fibers')) - fa_thresh = traits.Float(0.7, usedefault=True, desc=('FA threshold')) + in_evals = File(exists=True, mandatory=True, desc=("input eigenvalues file")) + in_mask = File(exists=True, desc=("input mask in which we find single fibers")) + fa_thresh = traits.Float(0.7, usedefault=True, desc=("FA threshold")) roi_radius = traits.Int( - 10, usedefault=True, desc=('ROI radius to be used in auto_response')) + 10, usedefault=True, desc=("ROI radius to be used in auto_response") + ) auto = traits.Bool( - xor=['recursive'], desc='use the auto_response estimator from dipy') + xor=["recursive"], desc="use the auto_response estimator from dipy" + ) recursive = traits.Bool( - xor=['auto'], desc='use the recursive response estimator from dipy') - response = File( - 'response.txt', usedefault=True, desc=('the output response file')) - out_mask = File('wm_mask.nii.gz', usedefault=True, desc='computed wm mask') + xor=["auto"], desc="use the recursive response estimator from dipy" + ) + response = File("response.txt", usedefault=True, desc=("the output response file")) + out_mask = File("wm_mask.nii.gz", usedefault=True, desc="computed wm mask") class EstimateResponseSHOutputSpec(TraitedSpec): - response = File(exists=True, desc=('the response file')) - out_mask = File(exists=True, desc=('output wm mask')) + response = File(exists=True, desc=("the response file")) + out_mask = File(exists=True, desc=("output wm mask")) class EstimateResponseSH(DipyDiffusionInterface): @@ -209,6 +218,7 @@ class EstimateResponseSH(DipyDiffusionInterface): """ + input_spec = EstimateResponseSHInputSpec output_spec = EstimateResponseSHOutputSpec @@ -242,12 +252,14 @@ def _run_interface(self, runtime): gtab, data, roi_radius=self.inputs.roi_radius, - fa_thr=self.inputs.fa_thresh) + fa_thr=self.inputs.fa_thresh, + ) response = response[0].tolist() + [S0] elif self.inputs.recursive: MD = np.nan_to_num(mean_diffusivity(evals)) * msk - indices = np.logical_or(FA >= 0.4, - (np.logical_and(FA >= 0.15, MD >= 0.0011))) + indices = np.logical_or( + FA >= 0.4, (np.logical_and(FA >= 0.15, MD >= 0.0011)) + ) data = nb.load(self.inputs.in_file).get_data() response = recursive_response( gtab, @@ -259,7 +271,8 @@ def _run_interface(self, runtime): init_trace=0.0021, iter=8, convergence=0.001, - parallel=True) + parallel=True, + ) ratio = abs(response[1] / response[0]) else: lambdas = evals[indices] @@ -269,42 +282,44 @@ def _run_interface(self, runtime): ratio = abs(response[1] / response[0]) if ratio > 0.25: - IFLOGGER.warning('Estimated response is not prolate enough. ' - 'Ratio=%0.3f.', ratio) - elif ratio < 1.e-5 or np.any(np.isnan(response)): - response = np.array([1.8e-3, 3.6e-4, 3.6e-4, S0]) IFLOGGER.warning( - 'Estimated response is not valid, using a default one') + "Estimated response is not prolate enough. " "Ratio=%0.3f.", ratio + ) + elif ratio < 1.0e-5 or np.any(np.isnan(response)): + response = np.array([1.8e-3, 3.6e-4, 3.6e-4, S0]) + IFLOGGER.warning("Estimated response is not valid, using a default one") else: - IFLOGGER.info('Estimated response: %s', str(response[:3])) + IFLOGGER.info("Estimated response: %s", str(response[:3])) np.savetxt(op.abspath(self.inputs.response), response) wm_mask = np.zeros_like(FA) wm_mask[indices] = 1 nb.Nifti1Image(wm_mask.astype(np.uint8), affine, None).to_filename( - op.abspath(self.inputs.out_mask)) + op.abspath(self.inputs.out_mask) + ) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['response'] = op.abspath(self.inputs.response) - outputs['out_mask'] = op.abspath(self.inputs.out_mask) + outputs["response"] = op.abspath(self.inputs.response) + outputs["out_mask"] = op.abspath(self.inputs.out_mask) return outputs class CSDInputSpec(DipyBaseInterfaceInputSpec): - in_mask = File(exists=True, desc=('input mask in which compute tensors')) - response = File(exists=True, desc=('single fiber estimated response')) + in_mask = File(exists=True, desc=("input mask in which compute tensors")) + response = File(exists=True, desc=("single fiber estimated response")) sh_order = traits.Int( - 8, usedefault=True, desc=('maximal shperical harmonics order')) - save_fods = traits.Bool(True, usedefault=True, desc=('save fODFs in file')) - out_fods = File(desc=('fODFs output file name')) + 8, usedefault=True, desc=("maximal shperical harmonics order") + ) + save_fods = traits.Bool(True, usedefault=True, desc=("save fODFs in file")) + out_fods = File(desc=("fODFs output file name")) class CSDOutputSpec(TraitedSpec): - model = File(desc='Python pickled object of the CSD model fitted.') - out_fods = File(desc=('fODFs output file name')) + model = File(desc="Python pickled object of the CSD model fitted.") + out_fods = File(desc=("fODFs output file name")) class CSD(DipyDiffusionInterface): @@ -328,12 +343,14 @@ class CSD(DipyDiffusionInterface): >>> csd.inputs.in_bvec = 'bvecs' >>> res = csd.run() # doctest: +SKIP """ + input_spec = CSDInputSpec output_spec = CSDOutputSpec def _run_interface(self, runtime): from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel from dipy.data import get_sphere + # import marshal as pickle import pickle as pickle import gzip @@ -355,30 +372,33 @@ def _run_interface(self, runtime): ratio = response[0][1] / response[0][0] if abs(ratio - 0.2) > 0.1: - IFLOGGER.warning('Estimated response is not prolate enough. ' - 'Ratio=%0.3f.', ratio) + IFLOGGER.warning( + "Estimated response is not prolate enough. " "Ratio=%0.3f.", ratio + ) csd_model = ConstrainedSphericalDeconvModel( - gtab, response, sh_order=self.inputs.sh_order) + gtab, response, sh_order=self.inputs.sh_order + ) - IFLOGGER.info('Fitting CSD model') + IFLOGGER.info("Fitting CSD model") csd_fit = csd_model.fit(data, msk) - f = gzip.open(self._gen_filename('csdmodel', ext='.pklz'), 'wb') + f = gzip.open(self._gen_filename("csdmodel", ext=".pklz"), "wb") pickle.dump(csd_model, f, -1) f.close() if self.inputs.save_fods: - sphere = get_sphere('symmetric724') + sphere = get_sphere("symmetric724") fods = csd_fit.odf(sphere) - nb.Nifti1Image(fods.astype(np.float32), img.affine, - None).to_filename(self._gen_filename('fods')) + nb.Nifti1Image(fods.astype(np.float32), img.affine, None).to_filename( + self._gen_filename("fods") + ) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['model'] = self._gen_filename('csdmodel', ext='.pklz') + outputs["model"] = self._gen_filename("csdmodel", ext=".pklz") if self.inputs.save_fods: - outputs['out_fods'] = self._gen_filename('fods') + outputs["out_fods"] = self._gen_filename("fods") return outputs diff --git a/nipype/interfaces/dipy/registration.py b/nipype/interfaces/dipy/registration.py index e2e5c1e7ec..f70c566194 100644 --- a/nipype/interfaces/dipy/registration.py +++ b/nipype/interfaces/dipy/registration.py @@ -1,21 +1,21 @@ - from distutils.version import LooseVersion from ... import logging -from .base import (HAVE_DIPY, dipy_version, dipy_to_nipype_interface, - get_dipy_workflows) +from .base import HAVE_DIPY, dipy_version, dipy_to_nipype_interface, get_dipy_workflows -IFLOGGER = logging.getLogger('nipype.interface') +IFLOGGER = logging.getLogger("nipype.interface") -if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion('0.15'): +if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion("0.15"): from dipy.workflows import align l_wkflw = get_dipy_workflows(align) for name, obj in l_wkflw: - new_name = name.replace('Flow', '') + new_name = name.replace("Flow", "") globals()[new_name] = dipy_to_nipype_interface(new_name, obj) del l_wkflw else: - IFLOGGER.info("We advise you to upgrade DIPY version. This upgrade will" - " open access to more function") + IFLOGGER.info( + "We advise you to upgrade DIPY version. This upgrade will" + " open access to more function" + ) diff --git a/nipype/interfaces/dipy/setup.py b/nipype/interfaces/dipy/setup.py index 408d7af731..082d88f841 100644 --- a/nipype/interfaces/dipy/setup.py +++ b/nipype/interfaces/dipy/setup.py @@ -3,15 +3,16 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: -def configuration(parent_package='', top_path=None): +def configuration(parent_package="", top_path=None): from numpy.distutils.misc_util import Configuration - config = Configuration('dipy', parent_package, top_path) + config = Configuration("dipy", parent_package, top_path) # config.add_data_dir('tests') return config -if __name__ == '__main__': +if __name__ == "__main__": from numpy.distutils.core import setup - setup(**configuration(top_path='').todict()) + + setup(**configuration(top_path="").todict()) diff --git a/nipype/interfaces/dipy/simulate.py b/nipype/interfaces/dipy/simulate.py index dfab8b2118..d9f0ed1023 100644 --- a/nipype/interfaces/dipy/simulate.py +++ b/nipype/interfaces/dipy/simulate.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -from multiprocessing import (Pool, cpu_count) +from multiprocessing import Pool, cpu_count import os.path as op import numpy as np @@ -7,73 +7,84 @@ from ... import logging from ...utils import NUMPY_MMAP -from ..base import (traits, TraitedSpec, BaseInterfaceInputSpec, File, - InputMultiPath, isdefined) +from ..base import ( + traits, + TraitedSpec, + BaseInterfaceInputSpec, + File, + InputMultiPath, + isdefined, +) from .base import DipyBaseInterface -IFLOGGER = logging.getLogger('nipype.interface') + +IFLOGGER = logging.getLogger("nipype.interface") class SimulateMultiTensorInputSpec(BaseInterfaceInputSpec): in_dirs = InputMultiPath( - File(exists=True), - mandatory=True, - desc='list of fibers (principal directions)') + File(exists=True), mandatory=True, desc="list of fibers (principal directions)" + ) in_frac = InputMultiPath( - File(exists=True), - mandatory=True, - desc=('volume fraction of each fiber')) + File(exists=True), mandatory=True, desc=("volume fraction of each fiber") + ) in_vfms = InputMultiPath( File(exists=True), mandatory=True, - desc=('volume fractions of isotropic ' - 'compartiments')) - in_mask = File(exists=True, desc='mask to simulate data') + desc=("volume fractions of isotropic " "compartiments"), + ) + in_mask = File(exists=True, desc="mask to simulate data") diff_iso = traits.List( [3000e-6, 960e-6, 680e-6], traits.Float, usedefault=True, - desc='Diffusivity of isotropic compartments') + desc="Diffusivity of isotropic compartments", + ) diff_sf = traits.Tuple( (1700e-6, 200e-6, 200e-6), traits.Float, traits.Float, traits.Float, usedefault=True, - desc='Single fiber tensor') - - n_proc = traits.Int(0, usedefault=True, desc='number of processes') - baseline = File(exists=True, mandatory=True, desc='baseline T2 signal') - gradients = File(exists=True, desc='gradients file') - in_bvec = File(exists=True, desc='input bvecs file') - in_bval = File(exists=True, desc='input bvals file') + desc="Single fiber tensor", + ) + + n_proc = traits.Int(0, usedefault=True, desc="number of processes") + baseline = File(exists=True, mandatory=True, desc="baseline T2 signal") + gradients = File(exists=True, desc="gradients file") + in_bvec = File(exists=True, desc="input bvecs file") + in_bval = File(exists=True, desc="input bvals file") num_dirs = traits.Int( 32, usedefault=True, - desc=('number of gradient directions (when table ' - 'is automatically generated)')) + desc=( + "number of gradient directions (when table " "is automatically generated)" + ), + ) bvalues = traits.List( traits.Int, value=[1000, 3000], usedefault=True, - desc=('list of b-values (when table ' - 'is automatically generated)')) + desc=("list of b-values (when table " "is automatically generated)"), + ) out_file = File( - 'sim_dwi.nii.gz', + "sim_dwi.nii.gz", usedefault=True, - desc='output file with fractions to be simluated') + desc="output file with fractions to be simluated", + ) out_mask = File( - 'sim_msk.nii.gz', usedefault=True, desc='file with the mask simulated') - out_bvec = File('bvec.sim', usedefault=True, desc='simulated b vectors') - out_bval = File('bval.sim', usedefault=True, desc='simulated b values') - snr = traits.Int(0, usedefault=True, desc='signal-to-noise ratio (dB)') + "sim_msk.nii.gz", usedefault=True, desc="file with the mask simulated" + ) + out_bvec = File("bvec.sim", usedefault=True, desc="simulated b vectors") + out_bval = File("bval.sim", usedefault=True, desc="simulated b values") + snr = traits.Int(0, usedefault=True, desc="signal-to-noise ratio (dB)") class SimulateMultiTensorOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='simulated DWIs') - out_mask = File(exists=True, desc='mask file') - out_bvec = File(exists=True, desc='simulated b vectors') - out_bval = File(exists=True, desc='simulated b values') + out_file = File(exists=True, desc="simulated DWIs") + out_mask = File(exists=True, desc="mask file") + out_bvec = File(exists=True, desc="simulated b vectors") + out_bval = File(exists=True, desc="simulated b values") class SimulateMultiTensor(DipyBaseInterface): @@ -95,6 +106,7 @@ class SimulateMultiTensor(DipyBaseInterface): >>> sim.inputs.in_bval = 'bvals' >>> sim.run() # doctest: +SKIP """ + input_spec = SimulateMultiTensorInputSpec output_spec = SimulateMultiTensorOutputSpec @@ -108,8 +120,7 @@ def _run_interface(self, runtime): bvecs = np.loadtxt(self.inputs.in_bvec).T gtab = gradient_table(bvals, bvecs) else: - gtab = _generate_gradients(self.inputs.num_dirs, - self.inputs.bvalues) + gtab = _generate_gradients(self.inputs.num_dirs, self.inputs.bvalues) ndirs = len(gtab.bvals) np.savetxt(op.abspath(self.inputs.out_bvec), gtab.bvecs.T) np.savetxt(op.abspath(self.inputs.out_bval), gtab.bvals) @@ -123,15 +134,17 @@ def _run_interface(self, runtime): # Check and load sticks and their volume fractions nsticks = len(self.inputs.in_dirs) if len(self.inputs.in_frac) != nsticks: - raise RuntimeError(('Number of sticks and their volume fractions' - ' must match.')) + raise RuntimeError( + ("Number of sticks and their volume fractions" " must match.") + ) # Volume fractions of isotropic compartments nballs = len(self.inputs.in_vfms) vfs = np.squeeze( - nb.concat_images([ - nb.load(f, mmap=NUMPY_MMAP) for f in self.inputs.in_vfms - ]).get_data()) + nb.concat_images( + [nb.load(f, mmap=NUMPY_MMAP) for f in self.inputs.in_vfms] + ).get_data() + ) if nballs == 1: vfs = vfs[..., np.newaxis] total_vf = np.sum(vfs, axis=3) @@ -150,9 +163,10 @@ def _run_interface(self, runtime): # Fiber fractions ffsim = nb.concat_images( - [nb.load(f, mmap=NUMPY_MMAP) for f in self.inputs.in_frac]) + [nb.load(f, mmap=NUMPY_MMAP) for f in self.inputs.in_frac] + ) ffs = np.nan_to_num(np.squeeze(ffsim.get_data())) # fiber fractions - ffs = np.clip(ffs, 0., 1.) + ffs = np.clip(ffs, 0.0, 1.0) if nsticks == 1: ffs = ffs[..., np.newaxis] @@ -172,19 +186,19 @@ def _run_interface(self, runtime): for i in range(vfs.shape[-1]): vfs[..., i] -= total_ff - vfs = np.clip(vfs, 0., 1.) + vfs = np.clip(vfs, 0.0, 1.0) fractions = np.concatenate((ffs, vfs), axis=3) - nb.Nifti1Image(fractions, aff, None).to_filename('fractions.nii.gz') - nb.Nifti1Image(np.sum(fractions, axis=3), aff, - None).to_filename('total_vf.nii.gz') + nb.Nifti1Image(fractions, aff, None).to_filename("fractions.nii.gz") + nb.Nifti1Image(np.sum(fractions, axis=3), aff, None).to_filename( + "total_vf.nii.gz" + ) mhdr = hdr.copy() mhdr.set_data_dtype(np.uint8) - mhdr.set_xyzt_units('mm', 'sec') - nb.Nifti1Image(msk, aff, mhdr).to_filename( - op.abspath(self.inputs.out_mask)) + mhdr.set_xyzt_units("mm", "sec") + nb.Nifti1Image(msk, aff, mhdr).to_filename(op.abspath(self.inputs.out_mask)) # Initialize stack of args fracs = fractions[msk > 0] @@ -206,7 +220,7 @@ def _run_interface(self, runtime): for d in range(nballs): fd = np.random.randn(nvox, 3) w = np.linalg.norm(fd, axis=1) - fd[w < np.finfo(float).eps, ...] = np.array([1., 0., 0.]) + fd[w < np.finfo(float).eps, ...] = np.array([1.0, 0.0, 0.0]) w[w < np.finfo(float).eps] = 1.0 fd /= w[..., np.newaxis] dirs = np.hstack((dirs, fd)) @@ -214,26 +228,23 @@ def _run_interface(self, runtime): sf_evals = list(self.inputs.diff_sf) ba_evals = list(self.inputs.diff_iso) - mevals = [sf_evals] * nsticks + \ - [[ba_evals[d]] * 3 for d in range(nballs)] + mevals = [sf_evals] * nsticks + [[ba_evals[d]] * 3 for d in range(nballs)] b0 = b0_im.get_data()[msk > 0] args = [] for i in range(nvox): - args.append({ - 'fractions': - fracs[i, ...].tolist(), - 'sticks': - [tuple(dirs[i, j:j + 3]) for j in range(nsticks + nballs)], - 'gradients': - gtab, - 'mevals': - mevals, - 'S0': - b0[i], - 'snr': - self.inputs.snr - }) + args.append( + { + "fractions": fracs[i, ...].tolist(), + "sticks": [ + tuple(dirs[i, j : j + 3]) for j in range(nsticks + nballs) + ], + "gradients": gtab, + "mevals": mevals, + "S0": b0[i], + "snr": self.inputs.snr, + } + ) n_proc = self.inputs.n_proc if n_proc == 0: @@ -246,30 +257,34 @@ def _run_interface(self, runtime): # Simulate sticks using dipy IFLOGGER.info( - 'Starting simulation of %d voxels, %d diffusion directions.', - len(args), ndirs) + "Starting simulation of %d voxels, %d diffusion directions.", + len(args), + ndirs, + ) result = np.array(pool.map(_compute_voxel, args)) if np.shape(result)[1] != ndirs: - raise RuntimeError(('Computed directions do not match number' - 'of b-values.')) + raise RuntimeError( + ("Computed directions do not match number" "of b-values.") + ) signal = np.zeros((shape[0], shape[1], shape[2], ndirs)) signal[msk > 0] = result simhdr = hdr.copy() simhdr.set_data_dtype(np.float32) - simhdr.set_xyzt_units('mm', 'sec') + simhdr.set_xyzt_units("mm", "sec") nb.Nifti1Image(signal.astype(np.float32), aff, simhdr).to_filename( - op.abspath(self.inputs.out_file)) + op.abspath(self.inputs.out_file) + ) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) - outputs['out_mask'] = op.abspath(self.inputs.out_mask) - outputs['out_bvec'] = op.abspath(self.inputs.out_bvec) - outputs['out_bval'] = op.abspath(self.inputs.out_bval) + outputs["out_file"] = op.abspath(self.inputs.out_file) + outputs["out_mask"] = op.abspath(self.inputs.out_mask) + outputs["out_bvec"] = op.abspath(self.inputs.out_bvec) + outputs["out_bval"] = op.abspath(self.inputs.out_bval) return outputs @@ -291,24 +306,25 @@ def _compute_voxel(args): """ from dipy.sims.voxel import multi_tensor - ffs = args['fractions'] - gtab = args['gradients'] + ffs = args["fractions"] + gtab = args["gradients"] signal = np.zeros_like(gtab.bvals, dtype=np.float32) # Simulate dwi signal sf_vf = np.sum(ffs) if sf_vf > 0.0: - ffs = ((np.array(ffs) / sf_vf) * 100) - snr = args['snr'] if args['snr'] > 0 else None + ffs = (np.array(ffs) / sf_vf) * 100 + snr = args["snr"] if args["snr"] > 0 else None try: signal, _ = multi_tensor( gtab, - args['mevals'], - S0=args['S0'], - angles=args['sticks'], + args["mevals"], + S0=args["S0"], + angles=args["sticks"], fractions=ffs, - snr=snr) + snr=snr, + ) except Exception: pass @@ -322,7 +338,7 @@ def _generate_gradients(ndirs=64, values=[1000, 3000], nb0s=1): """ import numpy as np - from dipy.core.sphere import (disperse_charges, Sphere, HemiSphere) + from dipy.core.sphere import disperse_charges, Sphere, HemiSphere from dipy.core.gradients import gradient_table theta = np.pi * np.random.rand(ndirs) diff --git a/nipype/interfaces/dipy/stats.py b/nipype/interfaces/dipy/stats.py index 8f55b3322a..fff0184a56 100644 --- a/nipype/interfaces/dipy/stats.py +++ b/nipype/interfaces/dipy/stats.py @@ -1,20 +1,20 @@ - from distutils.version import LooseVersion from ... import logging -from .base import (HAVE_DIPY, dipy_version, dipy_to_nipype_interface, - get_dipy_workflows) +from .base import HAVE_DIPY, dipy_version, dipy_to_nipype_interface, get_dipy_workflows -IFLOGGER = logging.getLogger('nipype.interface') +IFLOGGER = logging.getLogger("nipype.interface") -if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion('0.16'): +if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion("0.16"): from dipy.workflows import stats l_wkflw = get_dipy_workflows(stats) for name, obj in l_wkflw: - new_name = name.replace('Flow', '') + new_name = name.replace("Flow", "") globals()[new_name] = dipy_to_nipype_interface(new_name, obj) del l_wkflw else: - IFLOGGER.info("We advise you to upgrade DIPY version. This upgrade will" - " open access to more function") + IFLOGGER.info( + "We advise you to upgrade DIPY version. This upgrade will" + " open access to more function" + ) diff --git a/nipype/interfaces/dipy/tensors.py b/nipype/interfaces/dipy/tensors.py index 3dc06c0356..2f9ad95f5b 100644 --- a/nipype/interfaces/dipy/tensors.py +++ b/nipype/interfaces/dipy/tensors.py @@ -6,11 +6,11 @@ from ..base import TraitedSpec, File, isdefined from .base import DipyDiffusionInterface, DipyBaseInterfaceInputSpec -IFLOGGER = logging.getLogger('nipype.interface') +IFLOGGER = logging.getLogger("nipype.interface") class DTIInputSpec(DipyBaseInterfaceInputSpec): - mask_file = File(exists=True, desc='An optional white matter mask') + mask_file = File(exists=True, desc="An optional white matter mask") class DTIOutputSpec(TraitedSpec): @@ -36,12 +36,14 @@ class DTI(DipyDiffusionInterface): >>> dti.inputs.in_bval = 'bvals' >>> dti.run() # doctest: +SKIP """ + input_spec = DTIInputSpec output_spec = DTIOutputSpec def _run_interface(self, runtime): from dipy.reconst import dti from dipy.io.utils import nifti1_symmat + gtab = self._get_gradient_table() img = nb.load(self.inputs.in_file) @@ -56,22 +58,22 @@ def _run_interface(self, runtime): ten_fit = tenmodel.fit(data, mask) lower_triangular = ten_fit.lower_triangular() img = nifti1_symmat(lower_triangular, affine) - out_file = self._gen_filename('dti') + out_file = self._gen_filename("dti") nb.save(img, out_file) - IFLOGGER.info('DTI parameters image saved as %s', out_file) + IFLOGGER.info("DTI parameters image saved as %s", out_file) # FA MD RD and AD for metric in ["fa", "md", "rd", "ad", "color_fa"]: data = getattr(ten_fit, metric).astype("float32") out_name = self._gen_filename(metric) nb.Nifti1Image(data, affine).to_filename(out_name) - IFLOGGER.info('DTI %s image saved as %s', metric, out_name) + IFLOGGER.info("DTI %s image saved as %s", metric, out_name) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = self._gen_filename('dti') + outputs["out_file"] = self._gen_filename("dti") for metric in ["fa", "md", "rd", "ad", "color_fa"]: outputs["{}_file".format(metric)] = self._gen_filename(metric) @@ -80,7 +82,7 @@ def _list_outputs(self): class TensorModeInputSpec(DipyBaseInterfaceInputSpec): - mask_file = File(exists=True, desc='An optional white matter mask') + mask_file = File(exists=True, desc="An optional white matter mask") class TensorModeOutputSpec(TraitedSpec): @@ -109,6 +111,7 @@ class TensorMode(DipyDiffusionInterface): >>> mode.inputs.in_bval = 'bvals' >>> mode.run() # doctest: +SKIP """ + input_spec = TensorModeInputSpec output_spec = TensorModeOutputSpec @@ -136,12 +139,12 @@ def _run_interface(self, runtime): # Write as a 3D Nifti image with the original affine img = nb.Nifti1Image(mode_data, affine) - out_file = self._gen_filename('mode') + out_file = self._gen_filename("mode") nb.save(img, out_file) - IFLOGGER.info('Tensor mode image saved as %s', out_file) + IFLOGGER.info("Tensor mode image saved as %s", out_file) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = self._gen_filename('mode') + outputs["out_file"] = self._gen_filename("mode") return outputs diff --git a/nipype/interfaces/dipy/tests/test_auto_APMQball.py b/nipype/interfaces/dipy/tests/test_auto_APMQball.py index 35a73b8d87..fcc97ebf70 100644 --- a/nipype/interfaces/dipy/tests/test_auto_APMQball.py +++ b/nipype/interfaces/dipy/tests/test_auto_APMQball.py @@ -4,20 +4,11 @@ def test_APMQball_inputs(): input_map = dict( - b0_thres=dict(usedefault=True, ), - in_bval=dict( - extensions=None, - mandatory=True, - ), - in_bvec=dict( - extensions=None, - mandatory=True, - ), - in_file=dict( - extensions=None, - mandatory=True, - ), - mask_file=dict(extensions=None, ), + b0_thres=dict(usedefault=True,), + in_bval=dict(extensions=None, mandatory=True,), + in_bvec=dict(extensions=None, mandatory=True,), + in_file=dict(extensions=None, mandatory=True,), + mask_file=dict(extensions=None,), out_prefix=dict(), ) inputs = APMQball.input_spec() @@ -25,8 +16,10 @@ def test_APMQball_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_APMQball_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = APMQball.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dipy/tests/test_auto_CSD.py b/nipype/interfaces/dipy/tests/test_auto_CSD.py index dde8f52295..a9c92b02b1 100644 --- a/nipype/interfaces/dipy/tests/test_auto_CSD.py +++ b/nipype/interfaces/dipy/tests/test_auto_CSD.py @@ -4,36 +4,26 @@ def test_CSD_inputs(): input_map = dict( - b0_thres=dict(usedefault=True, ), - in_bval=dict( - extensions=None, - mandatory=True, - ), - in_bvec=dict( - extensions=None, - mandatory=True, - ), - in_file=dict( - extensions=None, - mandatory=True, - ), - in_mask=dict(extensions=None, ), - out_fods=dict(extensions=None, ), + b0_thres=dict(usedefault=True,), + in_bval=dict(extensions=None, mandatory=True,), + in_bvec=dict(extensions=None, mandatory=True,), + in_file=dict(extensions=None, mandatory=True,), + in_mask=dict(extensions=None,), + out_fods=dict(extensions=None,), out_prefix=dict(), - response=dict(extensions=None, ), - save_fods=dict(usedefault=True, ), - sh_order=dict(usedefault=True, ), + response=dict(extensions=None,), + save_fods=dict(usedefault=True,), + sh_order=dict(usedefault=True,), ) inputs = CSD.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CSD_outputs(): - output_map = dict( - model=dict(extensions=None, ), - out_fods=dict(extensions=None, ), - ) + output_map = dict(model=dict(extensions=None,), out_fods=dict(extensions=None,),) outputs = CSD.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dipy/tests/test_auto_DTI.py b/nipype/interfaces/dipy/tests/test_auto_DTI.py index be2b3375af..8e2482b129 100644 --- a/nipype/interfaces/dipy/tests/test_auto_DTI.py +++ b/nipype/interfaces/dipy/tests/test_auto_DTI.py @@ -4,20 +4,11 @@ def test_DTI_inputs(): input_map = dict( - b0_thres=dict(usedefault=True, ), - in_bval=dict( - extensions=None, - mandatory=True, - ), - in_bvec=dict( - extensions=None, - mandatory=True, - ), - in_file=dict( - extensions=None, - mandatory=True, - ), - mask_file=dict(extensions=None, ), + b0_thres=dict(usedefault=True,), + in_bval=dict(extensions=None, mandatory=True,), + in_bvec=dict(extensions=None, mandatory=True,), + in_file=dict(extensions=None, mandatory=True,), + mask_file=dict(extensions=None,), out_prefix=dict(), ) inputs = DTI.input_spec() @@ -25,14 +16,16 @@ def test_DTI_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DTI_outputs(): output_map = dict( - ad_file=dict(extensions=None, ), - color_fa_file=dict(extensions=None, ), - fa_file=dict(extensions=None, ), - md_file=dict(extensions=None, ), - out_file=dict(extensions=None, ), - rd_file=dict(extensions=None, ), + ad_file=dict(extensions=None,), + color_fa_file=dict(extensions=None,), + fa_file=dict(extensions=None,), + md_file=dict(extensions=None,), + out_file=dict(extensions=None,), + rd_file=dict(extensions=None,), ) outputs = DTI.output_spec() diff --git a/nipype/interfaces/dipy/tests/test_auto_Denoise.py b/nipype/interfaces/dipy/tests/test_auto_Denoise.py index c76fc9b18a..453e794f39 100644 --- a/nipype/interfaces/dipy/tests/test_auto_Denoise.py +++ b/nipype/interfaces/dipy/tests/test_auto_Denoise.py @@ -4,19 +4,13 @@ def test_Denoise_inputs(): input_map = dict( - block_radius=dict(usedefault=True, ), - in_file=dict( - extensions=None, - mandatory=True, - ), - in_mask=dict(extensions=None, ), - noise_mask=dict(extensions=None, ), - noise_model=dict( - mandatory=True, - usedefault=True, - ), - patch_radius=dict(usedefault=True, ), - signal_mask=dict(extensions=None, ), + block_radius=dict(usedefault=True,), + in_file=dict(extensions=None, mandatory=True,), + in_mask=dict(extensions=None,), + noise_mask=dict(extensions=None,), + noise_model=dict(mandatory=True, usedefault=True,), + patch_radius=dict(usedefault=True,), + signal_mask=dict(extensions=None,), snr=dict(), ) inputs = Denoise.input_spec() @@ -24,8 +18,10 @@ def test_Denoise_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Denoise_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Denoise.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dipy/tests/test_auto_DipyDiffusionInterface.py b/nipype/interfaces/dipy/tests/test_auto_DipyDiffusionInterface.py index 48940061f8..657128b050 100644 --- a/nipype/interfaces/dipy/tests/test_auto_DipyDiffusionInterface.py +++ b/nipype/interfaces/dipy/tests/test_auto_DipyDiffusionInterface.py @@ -4,19 +4,10 @@ def test_DipyDiffusionInterface_inputs(): input_map = dict( - b0_thres=dict(usedefault=True, ), - in_bval=dict( - extensions=None, - mandatory=True, - ), - in_bvec=dict( - extensions=None, - mandatory=True, - ), - in_file=dict( - extensions=None, - mandatory=True, - ), + b0_thres=dict(usedefault=True,), + in_bval=dict(extensions=None, mandatory=True,), + in_bvec=dict(extensions=None, mandatory=True,), + in_file=dict(extensions=None, mandatory=True,), out_prefix=dict(), ) inputs = DipyDiffusionInterface.input_spec() diff --git a/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py b/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py index a3ca7819a9..9b9cf49d6f 100644 --- a/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py +++ b/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py @@ -4,48 +4,29 @@ def test_EstimateResponseSH_inputs(): input_map = dict( - auto=dict(xor=['recursive'], ), - b0_thres=dict(usedefault=True, ), - fa_thresh=dict(usedefault=True, ), - in_bval=dict( - extensions=None, - mandatory=True, - ), - in_bvec=dict( - extensions=None, - mandatory=True, - ), - in_evals=dict( - extensions=None, - mandatory=True, - ), - in_file=dict( - extensions=None, - mandatory=True, - ), - in_mask=dict(extensions=None, ), - out_mask=dict( - extensions=None, - usedefault=True, - ), + auto=dict(xor=["recursive"],), + b0_thres=dict(usedefault=True,), + fa_thresh=dict(usedefault=True,), + in_bval=dict(extensions=None, mandatory=True,), + in_bvec=dict(extensions=None, mandatory=True,), + in_evals=dict(extensions=None, mandatory=True,), + in_file=dict(extensions=None, mandatory=True,), + in_mask=dict(extensions=None,), + out_mask=dict(extensions=None, usedefault=True,), out_prefix=dict(), - recursive=dict(xor=['auto'], ), - response=dict( - extensions=None, - usedefault=True, - ), - roi_radius=dict(usedefault=True, ), + recursive=dict(xor=["auto"],), + response=dict(extensions=None, usedefault=True,), + roi_radius=dict(usedefault=True,), ) inputs = EstimateResponseSH.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EstimateResponseSH_outputs(): - output_map = dict( - out_mask=dict(extensions=None, ), - response=dict(extensions=None, ), - ) + output_map = dict(out_mask=dict(extensions=None,), response=dict(extensions=None,),) outputs = EstimateResponseSH.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dipy/tests/test_auto_RESTORE.py b/nipype/interfaces/dipy/tests/test_auto_RESTORE.py index 0795f4ea16..a172847174 100644 --- a/nipype/interfaces/dipy/tests/test_auto_RESTORE.py +++ b/nipype/interfaces/dipy/tests/test_auto_RESTORE.py @@ -4,21 +4,12 @@ def test_RESTORE_inputs(): input_map = dict( - b0_thres=dict(usedefault=True, ), - in_bval=dict( - extensions=None, - mandatory=True, - ), - in_bvec=dict( - extensions=None, - mandatory=True, - ), - in_file=dict( - extensions=None, - mandatory=True, - ), - in_mask=dict(extensions=None, ), - noise_mask=dict(extensions=None, ), + b0_thres=dict(usedefault=True,), + in_bval=dict(extensions=None, mandatory=True,), + in_bvec=dict(extensions=None, mandatory=True,), + in_file=dict(extensions=None, mandatory=True,), + in_mask=dict(extensions=None,), + noise_mask=dict(extensions=None,), out_prefix=dict(), ) inputs = RESTORE.input_spec() @@ -26,15 +17,17 @@ def test_RESTORE_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RESTORE_outputs(): output_map = dict( - evals=dict(extensions=None, ), - evecs=dict(extensions=None, ), - fa=dict(extensions=None, ), - md=dict(extensions=None, ), - mode=dict(extensions=None, ), - rd=dict(extensions=None, ), - trace=dict(extensions=None, ), + evals=dict(extensions=None,), + evecs=dict(extensions=None,), + fa=dict(extensions=None,), + md=dict(extensions=None,), + mode=dict(extensions=None,), + rd=dict(extensions=None,), + trace=dict(extensions=None,), ) outputs = RESTORE.output_spec() diff --git a/nipype/interfaces/dipy/tests/test_auto_Resample.py b/nipype/interfaces/dipy/tests/test_auto_Resample.py index 044fdbe8f7..ac1b6ce9cd 100644 --- a/nipype/interfaces/dipy/tests/test_auto_Resample.py +++ b/nipype/interfaces/dipy/tests/test_auto_Resample.py @@ -4,14 +4,8 @@ def test_Resample_inputs(): input_map = dict( - in_file=dict( - extensions=None, - mandatory=True, - ), - interp=dict( - mandatory=True, - usedefault=True, - ), + in_file=dict(extensions=None, mandatory=True,), + interp=dict(mandatory=True, usedefault=True,), vox_size=dict(), ) inputs = Resample.input_spec() @@ -19,8 +13,10 @@ def test_Resample_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Resample_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Resample.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dipy/tests/test_auto_SimulateMultiTensor.py b/nipype/interfaces/dipy/tests/test_auto_SimulateMultiTensor.py index 12eea9e961..3202306026 100644 --- a/nipype/interfaces/dipy/tests/test_auto_SimulateMultiTensor.py +++ b/nipype/interfaces/dipy/tests/test_auto_SimulateMultiTensor.py @@ -4,51 +4,38 @@ def test_SimulateMultiTensor_inputs(): input_map = dict( - baseline=dict( - extensions=None, - mandatory=True, - ), - bvalues=dict(usedefault=True, ), - diff_iso=dict(usedefault=True, ), - diff_sf=dict(usedefault=True, ), - gradients=dict(extensions=None, ), - in_bval=dict(extensions=None, ), - in_bvec=dict(extensions=None, ), - in_dirs=dict(mandatory=True, ), - in_frac=dict(mandatory=True, ), - in_mask=dict(extensions=None, ), - in_vfms=dict(mandatory=True, ), - n_proc=dict(usedefault=True, ), - num_dirs=dict(usedefault=True, ), - out_bval=dict( - extensions=None, - usedefault=True, - ), - out_bvec=dict( - extensions=None, - usedefault=True, - ), - out_file=dict( - extensions=None, - usedefault=True, - ), - out_mask=dict( - extensions=None, - usedefault=True, - ), - snr=dict(usedefault=True, ), + baseline=dict(extensions=None, mandatory=True,), + bvalues=dict(usedefault=True,), + diff_iso=dict(usedefault=True,), + diff_sf=dict(usedefault=True,), + gradients=dict(extensions=None,), + in_bval=dict(extensions=None,), + in_bvec=dict(extensions=None,), + in_dirs=dict(mandatory=True,), + in_frac=dict(mandatory=True,), + in_mask=dict(extensions=None,), + in_vfms=dict(mandatory=True,), + n_proc=dict(usedefault=True,), + num_dirs=dict(usedefault=True,), + out_bval=dict(extensions=None, usedefault=True,), + out_bvec=dict(extensions=None, usedefault=True,), + out_file=dict(extensions=None, usedefault=True,), + out_mask=dict(extensions=None, usedefault=True,), + snr=dict(usedefault=True,), ) inputs = SimulateMultiTensor.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SimulateMultiTensor_outputs(): output_map = dict( - out_bval=dict(extensions=None, ), - out_bvec=dict(extensions=None, ), - out_file=dict(extensions=None, ), - out_mask=dict(extensions=None, ), + out_bval=dict(extensions=None,), + out_bvec=dict(extensions=None,), + out_file=dict(extensions=None,), + out_mask=dict(extensions=None,), ) outputs = SimulateMultiTensor.output_spec() diff --git a/nipype/interfaces/dipy/tests/test_auto_StreamlineTractography.py b/nipype/interfaces/dipy/tests/test_auto_StreamlineTractography.py index c88795ca1c..bbe4abce94 100644 --- a/nipype/interfaces/dipy/tests/test_auto_StreamlineTractography.py +++ b/nipype/interfaces/dipy/tests/test_auto_StreamlineTractography.py @@ -4,52 +4,33 @@ def test_StreamlineTractography_inputs(): input_map = dict( - gfa_thresh=dict( - mandatory=True, - usedefault=True, - ), - in_file=dict( - extensions=None, - mandatory=True, - ), - in_model=dict(extensions=None, ), - in_peaks=dict(extensions=None, ), - min_angle=dict( - mandatory=True, - usedefault=True, - ), - multiprocess=dict( - mandatory=True, - usedefault=True, - ), - num_seeds=dict( - mandatory=True, - usedefault=True, - ), + gfa_thresh=dict(mandatory=True, usedefault=True,), + in_file=dict(extensions=None, mandatory=True,), + in_model=dict(extensions=None,), + in_peaks=dict(extensions=None,), + min_angle=dict(mandatory=True, usedefault=True,), + multiprocess=dict(mandatory=True, usedefault=True,), + num_seeds=dict(mandatory=True, usedefault=True,), out_prefix=dict(), - peak_threshold=dict( - mandatory=True, - usedefault=True, - ), - save_seeds=dict( - mandatory=True, - usedefault=True, - ), - seed_coord=dict(extensions=None, ), - seed_mask=dict(extensions=None, ), - tracking_mask=dict(extensions=None, ), + peak_threshold=dict(mandatory=True, usedefault=True,), + save_seeds=dict(mandatory=True, usedefault=True,), + seed_coord=dict(extensions=None,), + seed_mask=dict(extensions=None,), + tracking_mask=dict(extensions=None,), ) inputs = StreamlineTractography.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_StreamlineTractography_outputs(): output_map = dict( - gfa=dict(extensions=None, ), - odf_peaks=dict(extensions=None, ), - out_seeds=dict(extensions=None, ), - tracks=dict(extensions=None, ), + gfa=dict(extensions=None,), + odf_peaks=dict(extensions=None,), + out_seeds=dict(extensions=None,), + tracks=dict(extensions=None,), ) outputs = StreamlineTractography.output_spec() diff --git a/nipype/interfaces/dipy/tests/test_auto_TensorMode.py b/nipype/interfaces/dipy/tests/test_auto_TensorMode.py index 274c9dbc77..29a01e4a75 100644 --- a/nipype/interfaces/dipy/tests/test_auto_TensorMode.py +++ b/nipype/interfaces/dipy/tests/test_auto_TensorMode.py @@ -4,20 +4,11 @@ def test_TensorMode_inputs(): input_map = dict( - b0_thres=dict(usedefault=True, ), - in_bval=dict( - extensions=None, - mandatory=True, - ), - in_bvec=dict( - extensions=None, - mandatory=True, - ), - in_file=dict( - extensions=None, - mandatory=True, - ), - mask_file=dict(extensions=None, ), + b0_thres=dict(usedefault=True,), + in_bval=dict(extensions=None, mandatory=True,), + in_bvec=dict(extensions=None, mandatory=True,), + in_file=dict(extensions=None, mandatory=True,), + mask_file=dict(extensions=None,), out_prefix=dict(), ) inputs = TensorMode.input_spec() @@ -25,8 +16,10 @@ def test_TensorMode_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TensorMode_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = TensorMode.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dipy/tests/test_auto_TrackDensityMap.py b/nipype/interfaces/dipy/tests/test_auto_TrackDensityMap.py index a52b40e0fa..06265ffd0f 100644 --- a/nipype/interfaces/dipy/tests/test_auto_TrackDensityMap.py +++ b/nipype/interfaces/dipy/tests/test_auto_TrackDensityMap.py @@ -5,16 +5,10 @@ def test_TrackDensityMap_inputs(): input_map = dict( data_dims=dict(), - in_file=dict( - extensions=None, - mandatory=True, - ), - out_filename=dict( - extensions=None, - usedefault=True, - ), - points_space=dict(usedefault=True, ), - reference=dict(extensions=None, ), + in_file=dict(extensions=None, mandatory=True,), + out_filename=dict(extensions=None, usedefault=True,), + points_space=dict(usedefault=True,), + reference=dict(extensions=None,), voxel_dims=dict(), ) inputs = TrackDensityMap.input_spec() @@ -22,8 +16,10 @@ def test_TrackDensityMap_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TrackDensityMap_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = TrackDensityMap.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dipy/tests/test_base.py b/nipype/interfaces/dipy/tests/test_base.py index 740057bcd3..38922ba0b7 100644 --- a/nipype/interfaces/dipy/tests/test_base.py +++ b/nipype/interfaces/dipy/tests/test_base.py @@ -1,50 +1,72 @@ import pytest from collections import namedtuple from ...base import traits, File, TraitedSpec, BaseInterfaceInputSpec -from ..base import (convert_to_traits_type, create_interface_specs, - dipy_to_nipype_interface, DipyBaseInterface, no_dipy, - get_dipy_workflows) +from ..base import ( + convert_to_traits_type, + create_interface_specs, + dipy_to_nipype_interface, + DipyBaseInterface, + no_dipy, + get_dipy_workflows, +) def test_convert_to_traits_type(): Params = namedtuple("Params", "traits_type is_file") Res = namedtuple("Res", "traits_type is_mandatory") - l_entries = [Params('variable string', False), - Params('variable int', False), - Params('variable float', False), - Params('variable bool', False), - Params('variable complex', False), - Params('variable int, optional', False), - Params('variable string, optional', False), - Params('variable float, optional', False), - Params('variable bool, optional', False), - Params('variable complex, optional', False), - Params('string', False), Params('int', False), - Params('string', True), Params('float', False), - Params('bool', False), Params('complex', False), - Params('string, optional', False), - Params('int, optional', False), - Params('string, optional', True), - Params('float, optional', False), - Params('bool, optional', False), - Params('complex, optional', False), - ] - l_expected = [Res(traits.ListStr, True), Res(traits.ListInt, True), - Res(traits.ListFloat, True), Res(traits.ListBool, True), - Res(traits.ListComplex, True), Res(traits.ListInt, False), - Res(traits.ListStr, False), Res(traits.ListFloat, False), - Res(traits.ListBool, False), Res(traits.ListComplex, False), - Res(traits.Str, True), Res(traits.Int, True), - Res(File, True), Res(traits.Float, True), - Res(traits.Bool, True), Res(traits.Complex, True), - Res(traits.Str, False), Res(traits.Int, False), - Res(File, False), Res(traits.Float, False), - Res(traits.Bool, False), Res(traits.Complex, False), - ] + l_entries = [ + Params("variable string", False), + Params("variable int", False), + Params("variable float", False), + Params("variable bool", False), + Params("variable complex", False), + Params("variable int, optional", False), + Params("variable string, optional", False), + Params("variable float, optional", False), + Params("variable bool, optional", False), + Params("variable complex, optional", False), + Params("string", False), + Params("int", False), + Params("string", True), + Params("float", False), + Params("bool", False), + Params("complex", False), + Params("string, optional", False), + Params("int, optional", False), + Params("string, optional", True), + Params("float, optional", False), + Params("bool, optional", False), + Params("complex, optional", False), + ] + l_expected = [ + Res(traits.ListStr, True), + Res(traits.ListInt, True), + Res(traits.ListFloat, True), + Res(traits.ListBool, True), + Res(traits.ListComplex, True), + Res(traits.ListInt, False), + Res(traits.ListStr, False), + Res(traits.ListFloat, False), + Res(traits.ListBool, False), + Res(traits.ListComplex, False), + Res(traits.Str, True), + Res(traits.Int, True), + Res(File, True), + Res(traits.Float, True), + Res(traits.Bool, True), + Res(traits.Complex, True), + Res(traits.Str, False), + Res(traits.Int, False), + Res(File, False), + Res(traits.Float, False), + Res(traits.Bool, False), + Res(traits.Complex, False), + ] for entry, res in zip(l_entries, l_expected): - traits_type, is_mandatory = convert_to_traits_type(entry.traits_type, - entry.is_file) + traits_type, is_mandatory = convert_to_traits_type( + entry.traits_type, entry.is_file + ) assert traits_type == res.traits_type assert is_mandatory == res.is_mandatory @@ -60,28 +82,34 @@ def test_create_interface_specs(): assert new_interface.__name__ == "MyInterface" assert not new_interface().get() - new_interface = create_interface_specs("MyInterface", - BaseClass=BaseInterfaceInputSpec) + new_interface = create_interface_specs( + "MyInterface", BaseClass=BaseInterfaceInputSpec + ) assert new_interface.__base__ == BaseInterfaceInputSpec assert isinstance(new_interface(), BaseInterfaceInputSpec) assert new_interface.__name__ == "MyInterface" assert not new_interface().get() - params = [("params1", "string", ["my description"]), ("params2_files", "string", ["my description @"]), - ("params3", "int, optional", ["useful option"]), ("out_params", "string", ["my out description"])] + params = [ + ("params1", "string", ["my description"]), + ("params2_files", "string", ["my description @"]), + ("params3", "int, optional", ["useful option"]), + ("out_params", "string", ["my out description"]), + ] - new_interface = create_interface_specs("MyInterface", params=params, - BaseClass=BaseInterfaceInputSpec) + new_interface = create_interface_specs( + "MyInterface", params=params, BaseClass=BaseInterfaceInputSpec + ) assert new_interface.__base__ == BaseInterfaceInputSpec assert isinstance(new_interface(), BaseInterfaceInputSpec) assert new_interface.__name__ == "MyInterface" current_params = new_interface().get() assert len(current_params) == 4 - assert 'params1' in current_params.keys() - assert 'params2_files' in current_params.keys() - assert 'params3' in current_params.keys() - assert 'out_params' in current_params.keys() + assert "params1" in current_params.keys() + assert "params2_files" in current_params.keys() + assert "params3" in current_params.keys() + assert "out_params" in current_params.keys() @pytest.mark.skipif(no_dipy(), reason="DIPY is not installed") @@ -89,12 +117,11 @@ def test_dipy_to_nipype_interface(): from dipy.workflows.workflow import Workflow class DummyWorkflow(Workflow): - @classmethod def get_short_name(cls): - return 'dwf1' + return "dwf1" - def run(self, in_files, param1=1, out_dir='', out_ref='out1.txt'): + def run(self, in_files, param1=1, out_dir="", out_ref="out1.txt"): """Workflow used to test basic workflows. Parameters @@ -119,19 +146,19 @@ def run(self, in_files, param1=1, out_dir='', out_ref='out1.txt'): assert new_specs.__base__ == DipyBaseInterface assert isinstance(new_specs(), DipyBaseInterface) assert new_specs.__name__ == "MyModelSpec" - assert hasattr(new_specs, 'input_spec') + assert hasattr(new_specs, "input_spec") assert new_specs().input_spec.__base__ == BaseInterfaceInputSpec - assert hasattr(new_specs, 'output_spec') + assert hasattr(new_specs, "output_spec") assert new_specs().output_spec.__base__ == TraitedSpec - assert hasattr(new_specs, '_run_interface') - assert hasattr(new_specs, '_list_outputs') + assert hasattr(new_specs, "_run_interface") + assert hasattr(new_specs, "_list_outputs") params_in = new_specs().inputs.get() params_out = new_specs()._outputs().get() assert len(params_in) == 4 - assert 'in_files' in params_in.keys() - assert 'param1' in params_in.keys() - assert 'out_dir' in params_out.keys() - assert 'out_ref' in params_out.keys() + assert "in_files" in params_in.keys() + assert "param1" in params_in.keys() + assert "out_dir" in params_out.keys() + assert "out_ref" in params_out.keys() with pytest.raises(ValueError): new_specs().run() @@ -143,7 +170,7 @@ def test_get_dipy_workflows(): l_wkflw = get_dipy_workflows(align) for name, obj in l_wkflw: - assert name.endswith('Flow') + assert name.endswith("Flow") assert issubclass(obj, align.Workflow) diff --git a/nipype/interfaces/dipy/tracks.py b/nipype/interfaces/dipy/tracks.py index 8688b8d106..947bf22121 100644 --- a/nipype/interfaces/dipy/tracks.py +++ b/nipype/interfaces/dipy/tracks.py @@ -6,60 +6,65 @@ from distutils.version import LooseVersion from ... import logging -from ..base import (TraitedSpec, BaseInterfaceInputSpec, File, isdefined, - traits) -from .base import (DipyBaseInterface, HAVE_DIPY, dipy_version, - dipy_to_nipype_interface, get_dipy_workflows) +from ..base import TraitedSpec, BaseInterfaceInputSpec, File, isdefined, traits +from .base import ( + DipyBaseInterface, + HAVE_DIPY, + dipy_version, + dipy_to_nipype_interface, + get_dipy_workflows, +) -IFLOGGER = logging.getLogger('nipype.interface') +IFLOGGER = logging.getLogger("nipype.interface") -if HAVE_DIPY and (LooseVersion('0.15') >= LooseVersion(dipy_version()) >= LooseVersion('0.16')): +if HAVE_DIPY and ( + LooseVersion("0.15") >= LooseVersion(dipy_version()) >= LooseVersion("0.16") +): try: from dipy.workflows.tracking import LocalFiberTrackingPAMFlow as DetTrackFlow except ImportError: # different name in 0.15 from dipy.workflows.tracking import DetTrackPAMFlow as DetTrackFlow - DeterministicTracking = dipy_to_nipype_interface("DeterministicTracking", - DetTrackFlow) + DeterministicTracking = dipy_to_nipype_interface( + "DeterministicTracking", DetTrackFlow + ) -if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion('0.15'): +if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion("0.15"): from dipy.workflows import segment, tracking l_wkflw = get_dipy_workflows(segment) + get_dipy_workflows(tracking) for name, obj in l_wkflw: - new_name = name.replace('Flow', '') + new_name = name.replace("Flow", "") globals()[new_name] = dipy_to_nipype_interface(new_name, obj) del l_wkflw else: - IFLOGGER.info("We advise you to upgrade DIPY version. This upgrade will" - " open access to more function") + IFLOGGER.info( + "We advise you to upgrade DIPY version. This upgrade will" + " open access to more function" + ) class TrackDensityMapInputSpec(BaseInterfaceInputSpec): - in_file = File( - exists=True, mandatory=True, desc='The input TrackVis track file') + in_file = File(exists=True, mandatory=True, desc="The input TrackVis track file") reference = File( - exists=True, desc='A reference file to define RAS coordinates space') + exists=True, desc="A reference file to define RAS coordinates space" + ) points_space = traits.Enum( - 'rasmm', - 'voxel', - None, - usedefault=True, - desc='coordinates of trk file') + "rasmm", "voxel", None, usedefault=True, desc="coordinates of trk file" + ) voxel_dims = traits.List( - traits.Float, minlen=3, maxlen=3, desc='The size of each voxel in mm.') + traits.Float, minlen=3, maxlen=3, desc="The size of each voxel in mm." + ) data_dims = traits.List( - traits.Int, - minlen=3, - maxlen=3, - desc='The size of the image in voxels.') + traits.Int, minlen=3, maxlen=3, desc="The size of the image in voxels." + ) out_filename = File( - 'tdi.nii', + "tdi.nii", usedefault=True, - desc='The output filename for the tracks in TrackVis ' - '(.trk) format') + desc="The output filename for the tracks in TrackVis " "(.trk) format", + ) class TrackDensityMapOutputSpec(TraitedSpec): @@ -80,6 +85,7 @@ class TrackDensityMap(DipyBaseInterface): >>> trk2tdi.run() # doctest: +SKIP """ + input_spec = TrackDensityMapInputSpec output_spec = TrackDensityMapOutputSpec @@ -97,19 +103,21 @@ def _run_interface(self, runtime): data_dims = refnii.shape[:3] kwargs = dict(affine=affine) else: - IFLOGGER.warning('voxel_dims and data_dims are deprecated as of dipy ' - '0.7.1. Please use reference input instead') + IFLOGGER.warning( + "voxel_dims and data_dims are deprecated as of dipy " + "0.7.1. Please use reference input instead" + ) if not isdefined(self.inputs.data_dims): - data_dims = header['dim'] + data_dims = header["dim"] else: data_dims = self.inputs.data_dims if not isdefined(self.inputs.voxel_dims): - voxel_size = header['voxel_size'] + voxel_size = header["voxel_size"] else: voxel_size = self.inputs.voxel_dims - affine = header['vox_to_ras'] + affine = header["vox_to_ras"] kwargs = dict(voxel_size=voxel_size) data = density_map(streams, data_dims, **kwargs) @@ -118,68 +126,72 @@ def _run_interface(self, runtime): out_file = op.abspath(self.inputs.out_filename) nb.save(img, out_file) - IFLOGGER.info('Track density map saved as %s, size=%s, dimensions=%s', - out_file, img.shape, img.header.get_zooms()) + IFLOGGER.info( + "Track density map saved as %s, size=%s, dimensions=%s", + out_file, + img.shape, + img.header.get_zooms(), + ) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = op.abspath(self.inputs.out_filename) + outputs["out_file"] = op.abspath(self.inputs.out_filename) return outputs class StreamlineTractographyInputSpec(BaseInterfaceInputSpec): - in_file = File(exists=True, mandatory=True, desc=('input diffusion data')) - in_model = File(exists=True, desc=('input f/d-ODF model extracted from.')) - tracking_mask = File( - exists=True, desc=('input mask within which perform tracking')) - seed_mask = File( - exists=True, desc=('input mask within which perform seeding')) - in_peaks = File(exists=True, desc=('peaks computed from the odf')) + in_file = File(exists=True, mandatory=True, desc=("input diffusion data")) + in_model = File(exists=True, desc=("input f/d-ODF model extracted from.")) + tracking_mask = File(exists=True, desc=("input mask within which perform tracking")) + seed_mask = File(exists=True, desc=("input mask within which perform seeding")) + in_peaks = File(exists=True, desc=("peaks computed from the odf")) seed_coord = File( exists=True, - desc=('file containing the list of seed voxel ' - 'coordinates (N,3)')) + desc=("file containing the list of seed voxel " "coordinates (N,3)"), + ) gfa_thresh = traits.Float( 0.2, mandatory=True, usedefault=True, - desc=('GFA threshold to compute tracking mask')) + desc=("GFA threshold to compute tracking mask"), + ) peak_threshold = traits.Float( 0.5, mandatory=True, usedefault=True, - desc=('threshold to consider peaks from model')) + desc=("threshold to consider peaks from model"), + ) min_angle = traits.Float( - 25.0, - mandatory=True, - usedefault=True, - desc=('minimum separation angle')) + 25.0, mandatory=True, usedefault=True, desc=("minimum separation angle") + ) multiprocess = traits.Bool( - True, mandatory=True, usedefault=True, desc=('use multiprocessing')) + True, mandatory=True, usedefault=True, desc=("use multiprocessing") + ) save_seeds = traits.Bool( - False, - mandatory=True, - usedefault=True, - desc=('save seeding voxels coordinates')) + False, mandatory=True, usedefault=True, desc=("save seeding voxels coordinates") + ) num_seeds = traits.Int( 10000, mandatory=True, usedefault=True, - desc=('desired number of tracks in tractography')) - out_prefix = traits.Str(desc=('output prefix for file names')) + desc=("desired number of tracks in tractography"), + ) + out_prefix = traits.Str(desc=("output prefix for file names")) class StreamlineTractographyOutputSpec(TraitedSpec): - tracks = File(desc='TrackVis file containing extracted streamlines') + tracks = File(desc="TrackVis file containing extracted streamlines") gfa = File( - desc=('The resulting GFA (generalized FA) computed using the ' - 'peaks of the ODF')) - odf_peaks = File(desc=('peaks computed from the odf')) + desc=( + "The resulting GFA (generalized FA) computed using the " "peaks of the ODF" + ) + ) + odf_peaks = File(desc=("peaks computed from the odf")) out_seeds = File( - desc=('file containing the (N,3) *voxel* coordinates used' - ' in seeding.')) + desc=("file containing the (N,3) *voxel* coordinates used" " in seeding.") + ) class StreamlineTractography(DipyBaseInterface): @@ -199,6 +211,7 @@ class StreamlineTractography(DipyBaseInterface): >>> track.inputs.tracking_mask = 'dilated_wm_mask.nii' >>> res = track.run() # doctest: +SKIP """ + input_spec = StreamlineTractographyInputSpec output_spec = StreamlineTractographyOutputSpec @@ -206,14 +219,15 @@ def _run_interface(self, runtime): from dipy.reconst.peaks import peaks_from_model from dipy.tracking.eudx import EuDX from dipy.data import get_sphere + # import marshal as pickle import pickle as pickle import gzip - if (not (isdefined(self.inputs.in_model) - or isdefined(self.inputs.in_peaks))): - raise RuntimeError(('At least one of in_model or in_peaks should ' - 'be supplied')) + if not (isdefined(self.inputs.in_model) or isdefined(self.inputs.in_peaks)): + raise RuntimeError( + ("At least one of in_model or in_peaks should " "be supplied") + ) img = nb.load(self.inputs.in_file) imref = nb.four_to_three(img)[0] @@ -222,20 +236,20 @@ def _run_interface(self, runtime): data = img.get_data().astype(np.float32) hdr = imref.header.copy() hdr.set_data_dtype(np.float32) - hdr['data_type'] = 16 + hdr["data_type"] = 16 - sphere = get_sphere('symmetric724') + sphere = get_sphere("symmetric724") self._save_peaks = False if isdefined(self.inputs.in_peaks): - IFLOGGER.info('Peaks file found, skipping ODF peaks search...') - f = gzip.open(self.inputs.in_peaks, 'rb') + IFLOGGER.info("Peaks file found, skipping ODF peaks search...") + f = gzip.open(self.inputs.in_peaks, "rb") peaks = pickle.load(f) f.close() else: self._save_peaks = True - IFLOGGER.info('Loading model and computing ODF peaks') - f = gzip.open(self.inputs.in_model, 'rb') + IFLOGGER.info("Loading model and computing ODF peaks") + f = gzip.open(self.inputs.in_model, "rb") odf_model = pickle.load(f) f.close() @@ -245,17 +259,19 @@ def _run_interface(self, runtime): sphere=sphere, relative_peak_threshold=self.inputs.peak_threshold, min_separation_angle=self.inputs.min_angle, - parallel=self.inputs.multiprocess) + parallel=self.inputs.multiprocess, + ) - f = gzip.open(self._gen_filename('peaks', ext='.pklz'), 'wb') + f = gzip.open(self._gen_filename("peaks", ext=".pklz"), "wb") pickle.dump(peaks, f, -1) f.close() hdr.set_data_shape(peaks.gfa.shape) nb.Nifti1Image(peaks.gfa.astype(np.float32), affine, hdr).to_filename( - self._gen_filename('gfa')) + self._gen_filename("gfa") + ) - IFLOGGER.info('Performing tractography') + IFLOGGER.info("Performing tractography") if isdefined(self.inputs.tracking_mask): msk = nb.load(self.inputs.tracking_mask).get_data() @@ -272,26 +288,31 @@ def _run_interface(self, runtime): elif isdefined(self.inputs.seed_mask): seedmsk = nb.load(self.inputs.seed_mask).get_data() - assert (seedmsk.shape == data.shape[:3]) + assert seedmsk.shape == data.shape[:3] seedmsk[seedmsk > 0] = 1 seedmsk[seedmsk < 1] = 0 seedps = np.array(np.where(seedmsk == 1), dtype=np.float32).T vseeds = seedps.shape[0] nsperv = (seeds // vseeds) + 1 - IFLOGGER.info('Seed mask is provided (%d voxels inside ' - 'mask), computing seeds (%d seeds/voxel).', vseeds, - nsperv) + IFLOGGER.info( + "Seed mask is provided (%d voxels inside " + "mask), computing seeds (%d seeds/voxel).", + vseeds, + nsperv, + ) if nsperv > 1: - IFLOGGER.info('Needed %d seeds per selected voxel (total %d).', - nsperv, vseeds) + IFLOGGER.info( + "Needed %d seeds per selected voxel (total %d).", nsperv, vseeds + ) seedps = np.vstack(np.array([seedps] * nsperv)) voxcoord = seedps + np.random.uniform(-1, 1, size=seedps.shape) nseeds = voxcoord.shape[0] - seeds = affine.dot( - np.vstack((voxcoord.T, np.ones((1, nseeds)))))[:3, :].T + seeds = affine.dot(np.vstack((voxcoord.T, np.ones((1, nseeds)))))[ + :3, : + ].T if self.inputs.save_seeds: - np.savetxt(self._gen_filename('seeds', ext='.txt'), seeds) + np.savetxt(self._gen_filename("seeds", ext=".txt"), seeds) if isdefined(self.inputs.tracking_mask): tmask = msk @@ -306,34 +327,34 @@ def _run_interface(self, runtime): seeds=seeds, affine=affine, odf_vertices=sphere.vertices, - a_low=a_low) + a_low=a_low, + ) ss_mm = [np.array(s) for s in eu] trkfilev = nb.trackvis.TrackvisFile( - [(s, None, None) for s in ss_mm], - points_space='rasmm', - affine=np.eye(4)) - trkfilev.to_file(self._gen_filename('tracked', ext='.trk')) + [(s, None, None) for s in ss_mm], points_space="rasmm", affine=np.eye(4) + ) + trkfilev.to_file(self._gen_filename("tracked", ext=".trk")) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['tracks'] = self._gen_filename('tracked', ext='.trk') - outputs['gfa'] = self._gen_filename('gfa') + outputs["tracks"] = self._gen_filename("tracked", ext=".trk") + outputs["gfa"] = self._gen_filename("gfa") if self._save_peaks: - outputs['odf_peaks'] = self._gen_filename('peaks', ext='.pklz') + outputs["odf_peaks"] = self._gen_filename("peaks", ext=".pklz") if self.inputs.save_seeds: if isdefined(self.inputs.seed_coord): - outputs['out_seeds'] = self.inputs.seed_coord + outputs["out_seeds"] = self.inputs.seed_coord else: - outputs['out_seeds'] = self._gen_filename('seeds', ext='.txt') + outputs["out_seeds"] = self._gen_filename("seeds", ext=".txt") return outputs def _gen_filename(self, name, ext=None): fname, fext = op.splitext(op.basename(self.inputs.in_file)) - if fext == '.gz': + if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext @@ -345,4 +366,4 @@ def _gen_filename(self, name, ext=None): if ext is None: ext = fext - return out_prefix + '_' + name + ext + return out_prefix + "_" + name + ext diff --git a/nipype/interfaces/dtitk/__init__.py b/nipype/interfaces/dtitk/__init__.py index a41c09e588..6c9569114c 100644 --- a/nipype/interfaces/dtitk/__init__.py +++ b/nipype/interfaces/dtitk/__init__.py @@ -6,8 +6,21 @@ """ # from .base import () -from .registration import (Rigid, Affine, Diffeo, - ComposeXfm, DiffeoSymTensor3DVol, AffSymTensor3DVol, - AffScalarVol, DiffeoScalarVol) -from .utils import (TVAdjustVoxSp, SVAdjustVoxSp, TVResample, SVResample, - TVtool, BinThresh) +from .registration import ( + Rigid, + Affine, + Diffeo, + ComposeXfm, + DiffeoSymTensor3DVol, + AffSymTensor3DVol, + AffScalarVol, + DiffeoScalarVol, +) +from .utils import ( + TVAdjustVoxSp, + SVAdjustVoxSp, + TVResample, + SVResample, + TVtool, + BinThresh, +) diff --git a/nipype/interfaces/dtitk/base.py b/nipype/interfaces/dtitk/base.py index 32289e5217..aad1b4d521 100644 --- a/nipype/interfaces/dtitk/base.py +++ b/nipype/interfaces/dtitk/base.py @@ -33,28 +33,28 @@ from nipype.interfaces.fsl.base import Info import warnings -LOGGER = logging.getLogger('nipype.interface') +LOGGER = logging.getLogger("nipype.interface") class DTITKRenameMixin(object): def __init__(self, *args, **kwargs): classes = [cls.__name__ for cls in self.__class__.mro()] dep_name = classes[0] - rename_idx = classes.index('DTITKRenameMixin') + rename_idx = classes.index("DTITKRenameMixin") new_name = classes[rename_idx + 1] - warnings.warn('The {} interface has been renamed to {}\n' - 'Please see the documentation for DTI-TK ' - 'interfaces, as some inputs have been ' - 'added or renamed for clarity.' - ''.format(dep_name, new_name), - DeprecationWarning) + warnings.warn( + "The {} interface has been renamed to {}\n" + "Please see the documentation for DTI-TK " + "interfaces, as some inputs have been " + "added or renamed for clarity." + "".format(dep_name, new_name), + DeprecationWarning, + ) super(DTITKRenameMixin, self).__init__(*args, **kwargs) class CommandLineDtitk(CommandLine): - - def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True, - ext=None): + def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True, ext=None): """Generate a filename based on the given parameters. The filename will take the form: cwd/basename. @@ -80,9 +80,9 @@ def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True, """ - if basename == '': - msg = 'Unable to generate filename for command %s. ' % self.cmd - msg += 'basename is not set!' + if basename == "": + msg = "Unable to generate filename for command %s. " % self.cmd + msg += "basename is not set!" raise ValueError(msg) if cwd is None: cwd = os.getcwd() @@ -90,11 +90,10 @@ def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True, ext = Info.output_type_to_ext(self.inputs.output_type) if change_ext: if suffix: - suffix = ''.join((suffix, ext)) + suffix = "".join((suffix, ext)) else: suffix = ext if suffix is None: - suffix = '' - fname = fname_presuffix(basename, suffix=suffix, - use_ext=False, newpath=cwd) + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) return fname diff --git a/nipype/interfaces/dtitk/registration.py b/nipype/interfaces/dtitk/registration.py index 6aa40d4201..4a50d5b1ad 100644 --- a/nipype/interfaces/dtitk/registration.py +++ b/nipype/interfaces/dtitk/registration.py @@ -29,25 +29,60 @@ from .base import CommandLineDtitk, DTITKRenameMixin import os -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" class RigidInputSpec(CommandLineInputSpec): - fixed_file = File(desc="fixed tensor volume", exists=True, - mandatory=True, position=0, argstr="%s", copyfile=False) - moving_file = File(desc="moving tensor volume", exists=True, - mandatory=True, position=1, argstr="%s", copyfile=False) - similarity_metric = traits.Enum('EDS', 'GDS', 'DDS', 'NMI', - mandatory=True, position=2, argstr="%s", - desc="similarity metric", usedefault=True) - sampling_xyz = traits.Tuple((4, 4, 4), mandatory=True, position=3, - argstr="%g %g %g", usedefault=True, - desc="dist between samp points (mm) (x,y,z)") - ftol = traits.Float(mandatory=True, position=4, argstr="%g", - desc="cost function tolerance", default_value=0.01, - usedefault=True) - initialize_xfm = File(copyfile=True, desc="Initialize w/DTITK-FORMAT" - "affine", position=5, argstr="%s", exists=True) + fixed_file = File( + desc="fixed tensor volume", + exists=True, + mandatory=True, + position=0, + argstr="%s", + copyfile=False, + ) + moving_file = File( + desc="moving tensor volume", + exists=True, + mandatory=True, + position=1, + argstr="%s", + copyfile=False, + ) + similarity_metric = traits.Enum( + "EDS", + "GDS", + "DDS", + "NMI", + mandatory=True, + position=2, + argstr="%s", + desc="similarity metric", + usedefault=True, + ) + sampling_xyz = traits.Tuple( + (4, 4, 4), + mandatory=True, + position=3, + argstr="%g %g %g", + usedefault=True, + desc="dist between samp points (mm) (x,y,z)", + ) + ftol = traits.Float( + mandatory=True, + position=4, + argstr="%g", + desc="cost function tolerance", + default_value=0.01, + usedefault=True, + ) + initialize_xfm = File( + copyfile=True, + desc="Initialize w/DTITK-FORMAT" "affine", + position=5, + argstr="%s", + exists=True, + ) class RigidOutputSpec(TraitedSpec): @@ -72,27 +107,27 @@ class Rigid(CommandLineDtitk): 'dti_rigid_reg im1.nii im2.nii EDS 4 4 4 0.01' >>> node.run() # doctest: +SKIP """ + input_spec = RigidInputSpec output_spec = RigidOutputSpec - _cmd = 'dti_rigid_reg' + _cmd = "dti_rigid_reg" - '''def _format_arg(self, name, spec, value): + """def _format_arg(self, name, spec, value): if name == 'initialize_xfm': value = 1 - return super(Rigid, self)._format_arg(name, spec, value)''' + return super(Rigid, self)._format_arg(name, spec, value)""" def _run_interface(self, runtime): runtime = super(Rigid, self)._run_interface(runtime) - if '''.aff doesn't exist or can't be opened''' in runtime.stderr: + if """.aff doesn't exist or can't be opened""" in runtime.stderr: self.raise_exception(runtime) return runtime def _list_outputs(self): outputs = self.output_spec().get() moving = self.inputs.moving_file - outputs['out_file_xfm'] = fname_presuffix(moving, suffix='.aff', - use_ext=False) - outputs['out_file'] = fname_presuffix(moving, suffix='_aff') + outputs["out_file_xfm"] = fname_presuffix(moving, suffix=".aff", use_ext=False) + outputs["out_file"] = fname_presuffix(moving, suffix="_aff") return outputs @@ -114,24 +149,44 @@ class Affine(Rigid): 'dti_affine_reg im1.nii im2.nii EDS 4 4 4 0.01 im_affine.aff' >>> node.run() # doctest: +SKIP """ - _cmd = 'dti_affine_reg' + + _cmd = "dti_affine_reg" class DiffeoInputSpec(CommandLineInputSpec): - fixed_file = File(desc="fixed tensor volume", - exists=True, position=0, argstr="%s") - moving_file = File(desc="moving tensor volume", - exists=True, position=1, argstr="%s", copyfile=False) - mask_file = File(desc="mask", exists=True, position=2, argstr="%s") - legacy = traits.Enum(1, desc="legacy parameter; always set to 1", - usedefault=True, mandatory=True, - position=3, argstr="%d") - n_iters = traits.Int(6, desc="number of iterations", - mandatory=True, - position=4, argstr="%d", usedefault=True) - ftol = traits.Float(0.002, desc="iteration for the optimization to stop", - mandatory=True, position=5, argstr="%g", - usedefault=True) + fixed_file = File(desc="fixed tensor volume", exists=True, position=0, argstr="%s") + moving_file = File( + desc="moving tensor volume", + exists=True, + position=1, + argstr="%s", + copyfile=False, + ) + mask_file = File(desc="mask", exists=True, position=2, argstr="%s") + legacy = traits.Enum( + 1, + desc="legacy parameter; always set to 1", + usedefault=True, + mandatory=True, + position=3, + argstr="%d", + ) + n_iters = traits.Int( + 6, + desc="number of iterations", + mandatory=True, + position=4, + argstr="%d", + usedefault=True, + ) + ftol = traits.Float( + 0.002, + desc="iteration for the optimization to stop", + mandatory=True, + position=5, + argstr="%g", + usedefault=True, + ) class DiffeoOutputSpec(TraitedSpec): @@ -157,25 +212,27 @@ class Diffeo(CommandLineDtitk): 'dti_diffeomorphic_reg im1.nii im2.nii mask.nii 1 6 0.002' >>> node.run() # doctest: +SKIP """ + input_spec = DiffeoInputSpec output_spec = DiffeoOutputSpec - _cmd = 'dti_diffeomorphic_reg' + _cmd = "dti_diffeomorphic_reg" def _list_outputs(self): outputs = self.output_spec().get() moving = self.inputs.moving_file - outputs['out_file_xfm'] = fname_presuffix(moving, suffix='_diffeo.df') - outputs['out_file'] = fname_presuffix(moving, suffix='_diffeo') + outputs["out_file_xfm"] = fname_presuffix(moving, suffix="_diffeo.df") + outputs["out_file"] = fname_presuffix(moving, suffix="_diffeo") return outputs class ComposeXfmInputSpec(CommandLineInputSpec): - in_df = File(desc='diffeomorphic warp file', exists=True, - argstr="-df %s", mandatory=True) - in_aff = File(desc='affine transform file', exists=True, - argstr="-aff %s", mandatory=True) - out_file = File(desc='output path', - argstr="-out %s", genfile=True) + in_df = File( + desc="diffeomorphic warp file", exists=True, argstr="-df %s", mandatory=True + ) + in_aff = File( + desc="affine transform file", exists=True, argstr="-aff %s", mandatory=True + ) + out_file = File(desc="output path", argstr="-out %s", genfile=True) class ComposeXfmOutputSpec(TraitedSpec): @@ -198,60 +255,89 @@ class ComposeXfm(CommandLineDtitk): im_warp_affdf.df.nii' >>> node.run() # doctest: +SKIP """ + input_spec = ComposeXfmInputSpec output_spec = ComposeXfmOutputSpec - _cmd = 'dfRightComposeAffine' + _cmd = "dfRightComposeAffine" def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.out_file if not isdefined(out_file): - out_file = self._gen_filename('out_file') - outputs['out_file'] = os.path.abspath(out_file) + out_file = self._gen_filename("out_file") + outputs["out_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): - if name != 'out_file': + if name != "out_file": return path, base, ext = split_filename(self.inputs.in_df) - suffix = '_affdf' - if base.endswith('.df'): - suffix += '.df' + suffix = "_affdf" + if base.endswith(".df"): + suffix += ".df" base = base[:-3] return fname_presuffix(base, suffix=suffix + ext, use_ext=False) class AffSymTensor3DVolInputSpec(CommandLineInputSpec): - in_file = File(desc='moving tensor volume', exists=True, - argstr="-in %s", mandatory=True) - out_file = File(desc='output filename', - argstr="-out %s", name_source="in_file", - name_template="%s_affxfmd", keep_extension=True) - transform = File(exists=True, argstr="-trans %s", - xor=['target', 'translation', 'euler', 'deformation'], - desc='transform to apply: specify an input transformation' - ' file; parameters input will be ignored',) - interpolation = traits.Enum('LEI', 'EI', usedefault=True, - argstr="-interp %s", - desc='Log Euclidean/Euclidean Interpolation') - reorient = traits.Enum('PPD', 'NO', 'FS', argstr='-reorient %s', - usedefault=True, desc='Reorientation strategy: ' - 'preservation of principal direction, no ' - 'reorientation, or finite strain') - target = File(exists=True, argstr="-target %s", xor=['transform'], - desc='output volume specification read from the target ' - 'volume if specified') - translation = traits.Tuple((traits.Float(), traits.Float(), - traits.Float()), - desc='translation (x,y,z) in mm', - argstr='-translation %g %g %g', - xor=['transform']) - euler = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='(theta, phi, psi) in degrees', - xor=['transform'], argstr='-euler %g %g %g') - deformation = traits.Tuple((traits.Float(),) * 6, - desc='(xx,yy,zz,xy,yz,xz)', xor=['transform'], - argstr='-deformation %g %g %g %g %g %g') + in_file = File( + desc="moving tensor volume", exists=True, argstr="-in %s", mandatory=True + ) + out_file = File( + desc="output filename", + argstr="-out %s", + name_source="in_file", + name_template="%s_affxfmd", + keep_extension=True, + ) + transform = File( + exists=True, + argstr="-trans %s", + xor=["target", "translation", "euler", "deformation"], + desc="transform to apply: specify an input transformation" + " file; parameters input will be ignored", + ) + interpolation = traits.Enum( + "LEI", + "EI", + usedefault=True, + argstr="-interp %s", + desc="Log Euclidean/Euclidean Interpolation", + ) + reorient = traits.Enum( + "PPD", + "NO", + "FS", + argstr="-reorient %s", + usedefault=True, + desc="Reorientation strategy: " + "preservation of principal direction, no " + "reorientation, or finite strain", + ) + target = File( + exists=True, + argstr="-target %s", + xor=["transform"], + desc="output volume specification read from the target " "volume if specified", + ) + translation = traits.Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="translation (x,y,z) in mm", + argstr="-translation %g %g %g", + xor=["transform"], + ) + euler = traits.Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="(theta, phi, psi) in degrees", + xor=["transform"], + argstr="-euler %g %g %g", + ) + deformation = traits.Tuple( + (traits.Float(),) * 6, + desc="(xx,yy,zz,xy,yz,xz)", + xor=["transform"], + argstr="-deformation %g %g %g %g %g %g", + ) class AffSymTensor3DVolOutputSpec(TraitedSpec): @@ -274,43 +360,65 @@ class AffSymTensor3DVol(CommandLineDtitk): -reorient PPD -trans im_affine.aff' >>> node.run() # doctest: +SKIP """ + input_spec = AffSymTensor3DVolInputSpec output_spec = AffSymTensor3DVolOutputSpec - _cmd = 'affineSymTensor3DVolume' + _cmd = "affineSymTensor3DVolume" class AffScalarVolInputSpec(CommandLineInputSpec): - in_file = File(desc='moving scalar volume', exists=True, - argstr="-in %s", mandatory=True) - out_file = File(desc='output filename', - argstr="-out %s", name_source="in_file", - name_template="%s_affxfmd", keep_extension=True) - transform = File(exists=True, argstr="-trans %s", - xor=['target', 'translation', 'euler', 'deformation'], - desc='transform to apply: specify an input transformation' - ' file; parameters input will be ignored',) - interpolation = traits.Enum('trilinear', 'NN', - usedefault=True, argstr="-interp %s", - desc='trilinear or nearest neighbor' - ' interpolation') - target = File(exists=True, argstr="-target %s", xor=['transform'], - desc='output volume specification read from the target ' - 'volume if specified') - translation = traits.Tuple((traits.Float(), traits.Float(), - traits.Float()), - desc='translation (x,y,z) in mm', - argstr='-translation %g %g %g', - xor=['transform']) - euler = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='(theta, phi, psi) in degrees', - xor=['transform'], argstr='-euler %g %g %g') - deformation = traits.Tuple((traits.Float(),) * 6, - desc='(xx,yy,zz,xy,yz,xz)', xor=['transform'], - argstr='-deformation %g %g %g %g %g %g') + in_file = File( + desc="moving scalar volume", exists=True, argstr="-in %s", mandatory=True + ) + out_file = File( + desc="output filename", + argstr="-out %s", + name_source="in_file", + name_template="%s_affxfmd", + keep_extension=True, + ) + transform = File( + exists=True, + argstr="-trans %s", + xor=["target", "translation", "euler", "deformation"], + desc="transform to apply: specify an input transformation" + " file; parameters input will be ignored", + ) + interpolation = traits.Enum( + "trilinear", + "NN", + usedefault=True, + argstr="-interp %s", + desc="trilinear or nearest neighbor" " interpolation", + ) + target = File( + exists=True, + argstr="-target %s", + xor=["transform"], + desc="output volume specification read from the target " "volume if specified", + ) + translation = traits.Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="translation (x,y,z) in mm", + argstr="-translation %g %g %g", + xor=["transform"], + ) + euler = traits.Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="(theta, phi, psi) in degrees", + xor=["transform"], + argstr="-euler %g %g %g", + ) + deformation = traits.Tuple( + (traits.Float(),) * 6, + desc="(xx,yy,zz,xy,yz,xz)", + xor=["transform"], + argstr="-deformation %g %g %g %g %g %g", + ) class AffScalarVolOutputSpec(TraitedSpec): - out_file = File(desc='moved volume', exists=True) + out_file = File(desc="moved volume", exists=True) class AffScalarVol(CommandLineDtitk): @@ -329,43 +437,69 @@ class AffScalarVol(CommandLineDtitk): im_affine.aff' >>> node.run() # doctest: +SKIP """ + input_spec = AffScalarVolInputSpec output_spec = AffScalarVolOutputSpec - _cmd = 'affineScalarVolume' + _cmd = "affineScalarVolume" def _format_arg(self, name, spec, value): - if name == 'interpolation': - value = {'trilinear': 0, 'NN': 1}[value] + if name == "interpolation": + value = {"trilinear": 0, "NN": 1}[value] return super(AffScalarVol, self)._format_arg(name, spec, value) class DiffeoSymTensor3DVolInputSpec(CommandLineInputSpec): - in_file = File(desc='moving tensor volume', exists=True, - argstr="-in %s", mandatory=True) - out_file = File(desc='output filename', - argstr="-out %s", name_source="in_file", - name_template="%s_diffeoxfmd", keep_extension=True) - transform = File(exists=True, argstr="-trans %s", - mandatory=True, desc='transform to apply') - df = traits.Str('FD', argstr="-df %s", usedefault=True) - interpolation = traits.Enum('LEI', 'EI', usedefault=True, - argstr="-interp %s", - desc='Log Euclidean/Euclidean Interpolation') - reorient = traits.Enum('PPD', 'FS', argstr='-reorient %s', - usedefault=True, desc='Reorientation strategy: ' - 'preservation of principal direction or finite ' - 'strain') - target = File(exists=True, argstr="-target %s", xor=['voxel_size'], - desc='output volume specification read from the target ' - 'volume if specified') - voxel_size = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='xyz voxel size (superseded by target)', - argstr="-vsize %g %g %g", xor=['target']) - flip = traits.Tuple((traits.Int(), traits.Int(), traits.Int()), - argstr="-flip %d %d %d") - resampling_type = traits.Enum('backward', 'forward', - desc='use backward or forward resampling', - argstr="-type %s") + in_file = File( + desc="moving tensor volume", exists=True, argstr="-in %s", mandatory=True + ) + out_file = File( + desc="output filename", + argstr="-out %s", + name_source="in_file", + name_template="%s_diffeoxfmd", + keep_extension=True, + ) + transform = File( + exists=True, argstr="-trans %s", mandatory=True, desc="transform to apply" + ) + df = traits.Str("FD", argstr="-df %s", usedefault=True) + interpolation = traits.Enum( + "LEI", + "EI", + usedefault=True, + argstr="-interp %s", + desc="Log Euclidean/Euclidean Interpolation", + ) + reorient = traits.Enum( + "PPD", + "FS", + argstr="-reorient %s", + usedefault=True, + desc="Reorientation strategy: " + "preservation of principal direction or finite " + "strain", + ) + target = File( + exists=True, + argstr="-target %s", + xor=["voxel_size"], + desc="output volume specification read from the target " "volume if specified", + ) + voxel_size = traits.Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="xyz voxel size (superseded by target)", + argstr="-vsize %g %g %g", + xor=["target"], + ) + flip = traits.Tuple( + (traits.Int(), traits.Int(), traits.Int()), argstr="-flip %d %d %d" + ) + resampling_type = traits.Enum( + "backward", + "forward", + desc="use backward or forward resampling", + argstr="-type %s", + ) class DiffeoSymTensor3DVolOutputSpec(TraitedSpec): @@ -391,41 +525,60 @@ class DiffeoSymTensor3DVol(CommandLineDtitk): input_spec = DiffeoSymTensor3DVolInputSpec output_spec = DiffeoSymTensor3DVolOutputSpec - _cmd = 'deformationSymTensor3DVolume' + _cmd = "deformationSymTensor3DVolume" def _format_arg(self, name, spec, value): - if name == 'resampling_type': - value = {'forward': 0, 'backward': 1}[value] + if name == "resampling_type": + value = {"forward": 0, "backward": 1}[value] return super(DiffeoSymTensor3DVol, self)._format_arg(name, spec, value) class DiffeoScalarVolInputSpec(CommandLineInputSpec): - in_file = File(desc='moving scalar volume', exists=True, - argstr="-in %s", mandatory=True) - out_file = File(desc='output filename', - argstr="-out %s", name_source="in_file", - name_template="%s_diffeoxfmd", keep_extension=True) - transform = File(exists=True, argstr="-trans %s", - mandatory=True, desc='transform to apply') - target = File(exists=True, argstr="-target %s", xor=['voxel_size'], - desc='output volume specification read from the target ' - 'volume if specified') - voxel_size = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='xyz voxel size (superseded by target)', - argstr="-vsize %g %g %g", xor=['target']) - flip = traits.Tuple((traits.Int(), traits.Int(), traits.Int()), - argstr="-flip %d %d %d") - resampling_type = traits.Enum('backward', 'forward', - desc='use backward or forward resampling', - argstr="-type %s") - interpolation = traits.Enum('trilinear', 'NN', - desc='trilinear, or nearest neighbor', - argstr="-interp %s", - usedefault=True) + in_file = File( + desc="moving scalar volume", exists=True, argstr="-in %s", mandatory=True + ) + out_file = File( + desc="output filename", + argstr="-out %s", + name_source="in_file", + name_template="%s_diffeoxfmd", + keep_extension=True, + ) + transform = File( + exists=True, argstr="-trans %s", mandatory=True, desc="transform to apply" + ) + target = File( + exists=True, + argstr="-target %s", + xor=["voxel_size"], + desc="output volume specification read from the target " "volume if specified", + ) + voxel_size = traits.Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="xyz voxel size (superseded by target)", + argstr="-vsize %g %g %g", + xor=["target"], + ) + flip = traits.Tuple( + (traits.Int(), traits.Int(), traits.Int()), argstr="-flip %d %d %d" + ) + resampling_type = traits.Enum( + "backward", + "forward", + desc="use backward or forward resampling", + argstr="-type %s", + ) + interpolation = traits.Enum( + "trilinear", + "NN", + desc="trilinear, or nearest neighbor", + argstr="-interp %s", + usedefault=True, + ) class DiffeoScalarVolOutputSpec(TraitedSpec): - out_file = File(desc='moved volume', exists=True) + out_file = File(desc="moved volume", exists=True) class DiffeoScalarVol(CommandLineDtitk): @@ -447,13 +600,13 @@ class DiffeoScalarVol(CommandLineDtitk): input_spec = DiffeoScalarVolInputSpec output_spec = DiffeoScalarVolOutputSpec - _cmd = 'deformationScalarVolume' + _cmd = "deformationScalarVolume" def _format_arg(self, name, spec, value): - if name == 'resampling_type': - value = {'forward': 0, 'backward': 1}[value] - elif name == 'interpolation': - value = {'trilinear': 0, 'NN': 1}[value] + if name == "resampling_type": + value = {"forward": 0, "backward": 1}[value] + elif name == "interpolation": + value = {"trilinear": 0, "NN": 1}[value] return super(DiffeoScalarVol, self)._format_arg(name, spec, value) diff --git a/nipype/interfaces/dtitk/tests/test_auto_AffScalarVol.py b/nipype/interfaces/dtitk/tests/test_auto_AffScalarVol.py index b6a6128a5f..e48312d3f2 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_AffScalarVol.py +++ b/nipype/interfaces/dtitk/tests/test_auto_AffScalarVol.py @@ -4,57 +4,36 @@ def test_AffScalarVol_inputs(): input_map = dict( - args=dict(argstr='%s', ), - deformation=dict( - argstr='-deformation %g %g %g %g %g %g', - xor=['transform'], - ), - environ=dict( - nohash=True, - usedefault=True, - ), - euler=dict( - argstr='-euler %g %g %g', - xor=['transform'], - ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - ), - interpolation=dict( - argstr='-interp %s', - usedefault=True, - ), + args=dict(argstr="%s",), + deformation=dict(argstr="-deformation %g %g %g %g %g %g", xor=["transform"],), + environ=dict(nohash=True, usedefault=True,), + euler=dict(argstr="-euler %g %g %g", xor=["transform"],), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True,), + interpolation=dict(argstr="-interp %s", usedefault=True,), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_affxfmd', - ), - target=dict( - argstr='-target %s', - extensions=None, - xor=['transform'], + name_source="in_file", + name_template="%s_affxfmd", ), + target=dict(argstr="-target %s", extensions=None, xor=["transform"],), transform=dict( - argstr='-trans %s', + argstr="-trans %s", extensions=None, - xor=['target', 'translation', 'euler', 'deformation'], - ), - translation=dict( - argstr='-translation %g %g %g', - xor=['transform'], + xor=["target", "translation", "euler", "deformation"], ), + translation=dict(argstr="-translation %g %g %g", xor=["transform"],), ) inputs = AffScalarVol.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AffScalarVol_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = AffScalarVol.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_AffSymTensor3DVol.py b/nipype/interfaces/dtitk/tests/test_auto_AffSymTensor3DVol.py index 658f8c7baa..54a562e8be 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_AffSymTensor3DVol.py +++ b/nipype/interfaces/dtitk/tests/test_auto_AffSymTensor3DVol.py @@ -4,61 +4,37 @@ def test_AffSymTensor3DVol_inputs(): input_map = dict( - args=dict(argstr='%s', ), - deformation=dict( - argstr='-deformation %g %g %g %g %g %g', - xor=['transform'], - ), - environ=dict( - nohash=True, - usedefault=True, - ), - euler=dict( - argstr='-euler %g %g %g', - xor=['transform'], - ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - ), - interpolation=dict( - argstr='-interp %s', - usedefault=True, - ), + args=dict(argstr="%s",), + deformation=dict(argstr="-deformation %g %g %g %g %g %g", xor=["transform"],), + environ=dict(nohash=True, usedefault=True,), + euler=dict(argstr="-euler %g %g %g", xor=["transform"],), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True,), + interpolation=dict(argstr="-interp %s", usedefault=True,), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_affxfmd', - ), - reorient=dict( - argstr='-reorient %s', - usedefault=True, - ), - target=dict( - argstr='-target %s', - extensions=None, - xor=['transform'], + name_source="in_file", + name_template="%s_affxfmd", ), + reorient=dict(argstr="-reorient %s", usedefault=True,), + target=dict(argstr="-target %s", extensions=None, xor=["transform"],), transform=dict( - argstr='-trans %s', + argstr="-trans %s", extensions=None, - xor=['target', 'translation', 'euler', 'deformation'], - ), - translation=dict( - argstr='-translation %g %g %g', - xor=['transform'], + xor=["target", "translation", "euler", "deformation"], ), + translation=dict(argstr="-translation %g %g %g", xor=["transform"],), ) inputs = AffSymTensor3DVol.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AffSymTensor3DVol_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = AffSymTensor3DVol.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_Affine.py b/nipype/interfaces/dtitk/tests/test_auto_Affine.py index 4469168d12..e095ce3922 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_Affine.py +++ b/nipype/interfaces/dtitk/tests/test_auto_Affine.py @@ -4,48 +4,21 @@ def test_Affine_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), fixed_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=0, - ), - ftol=dict( - argstr='%g', - mandatory=True, - position=4, - usedefault=True, - ), - initialize_xfm=dict( - argstr='%s', - copyfile=True, - extensions=None, - position=5, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=0, ), + ftol=dict(argstr="%g", mandatory=True, position=4, usedefault=True,), + initialize_xfm=dict(argstr="%s", copyfile=True, extensions=None, position=5,), moving_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=1, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=1, ), sampling_xyz=dict( - argstr='%g %g %g', - mandatory=True, - position=3, - usedefault=True, + argstr="%g %g %g", mandatory=True, position=3, usedefault=True, ), similarity_metric=dict( - argstr='%s', - mandatory=True, - position=2, - usedefault=True, + argstr="%s", mandatory=True, position=2, usedefault=True, ), ) inputs = Affine.input_spec() @@ -53,10 +26,11 @@ def test_Affine_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Affine_outputs(): output_map = dict( - out_file=dict(extensions=None, ), - out_file_xfm=dict(extensions=None, ), + out_file=dict(extensions=None,), out_file_xfm=dict(extensions=None,), ) outputs = Affine.output_spec() diff --git a/nipype/interfaces/dtitk/tests/test_auto_AffineTask.py b/nipype/interfaces/dtitk/tests/test_auto_AffineTask.py index 873c92e7ca..31820fc56d 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_AffineTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_AffineTask.py @@ -4,48 +4,21 @@ def test_AffineTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), fixed_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=0, - ), - ftol=dict( - argstr='%g', - mandatory=True, - position=4, - usedefault=True, - ), - initialize_xfm=dict( - argstr='%s', - copyfile=True, - extensions=None, - position=5, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=0, ), + ftol=dict(argstr="%g", mandatory=True, position=4, usedefault=True,), + initialize_xfm=dict(argstr="%s", copyfile=True, extensions=None, position=5,), moving_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=1, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=1, ), sampling_xyz=dict( - argstr='%g %g %g', - mandatory=True, - position=3, - usedefault=True, + argstr="%g %g %g", mandatory=True, position=3, usedefault=True, ), similarity_metric=dict( - argstr='%s', - mandatory=True, - position=2, - usedefault=True, + argstr="%s", mandatory=True, position=2, usedefault=True, ), ) inputs = AffineTask.input_spec() @@ -53,10 +26,11 @@ def test_AffineTask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AffineTask_outputs(): output_map = dict( - out_file=dict(extensions=None, ), - out_file_xfm=dict(extensions=None, ), + out_file=dict(extensions=None,), out_file_xfm=dict(extensions=None,), ) outputs = AffineTask.output_spec() diff --git a/nipype/interfaces/dtitk/tests/test_auto_BinThresh.py b/nipype/interfaces/dtitk/tests/test_auto_BinThresh.py index cfd748dcb2..d1780ffe5c 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_BinThresh.py +++ b/nipype/interfaces/dtitk/tests/test_auto_BinThresh.py @@ -4,57 +4,31 @@ def test_BinThresh_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=0, - ), - inside_value=dict( - argstr='%g', - mandatory=True, - position=4, - usedefault=True, - ), - lower_bound=dict( - argstr='%g', - mandatory=True, - position=2, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=0,), + inside_value=dict(argstr="%g", mandatory=True, position=4, usedefault=True,), + lower_bound=dict(argstr="%g", mandatory=True, position=2, usedefault=True,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_thrbin', + name_source="in_file", + name_template="%s_thrbin", position=1, ), - outside_value=dict( - argstr='%g', - mandatory=True, - position=5, - usedefault=True, - ), - upper_bound=dict( - argstr='%g', - mandatory=True, - position=3, - usedefault=True, - ), + outside_value=dict(argstr="%g", mandatory=True, position=5, usedefault=True,), + upper_bound=dict(argstr="%g", mandatory=True, position=3, usedefault=True,), ) inputs = BinThresh.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BinThresh_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = BinThresh.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_BinThreshTask.py b/nipype/interfaces/dtitk/tests/test_auto_BinThreshTask.py index 155c5a8406..b8e03f023b 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_BinThreshTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_BinThreshTask.py @@ -4,57 +4,31 @@ def test_BinThreshTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=0, - ), - inside_value=dict( - argstr='%g', - mandatory=True, - position=4, - usedefault=True, - ), - lower_bound=dict( - argstr='%g', - mandatory=True, - position=2, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=0,), + inside_value=dict(argstr="%g", mandatory=True, position=4, usedefault=True,), + lower_bound=dict(argstr="%g", mandatory=True, position=2, usedefault=True,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_thrbin', + name_source="in_file", + name_template="%s_thrbin", position=1, ), - outside_value=dict( - argstr='%g', - mandatory=True, - position=5, - usedefault=True, - ), - upper_bound=dict( - argstr='%g', - mandatory=True, - position=3, - usedefault=True, - ), + outside_value=dict(argstr="%g", mandatory=True, position=5, usedefault=True,), + upper_bound=dict(argstr="%g", mandatory=True, position=3, usedefault=True,), ) inputs = BinThreshTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BinThreshTask_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = BinThreshTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_CommandLineDtitk.py b/nipype/interfaces/dtitk/tests/test_auto_CommandLineDtitk.py index 2e064a3150..ba76666b1f 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_CommandLineDtitk.py +++ b/nipype/interfaces/dtitk/tests/test_auto_CommandLineDtitk.py @@ -4,11 +4,7 @@ def test_CommandLineDtitk_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), environ=dict(nohash=True, usedefault=True,), ) inputs = CommandLineDtitk.input_spec() diff --git a/nipype/interfaces/dtitk/tests/test_auto_ComposeXfm.py b/nipype/interfaces/dtitk/tests/test_auto_ComposeXfm.py index 17b31df738..88dac765f2 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_ComposeXfm.py +++ b/nipype/interfaces/dtitk/tests/test_auto_ComposeXfm.py @@ -4,34 +4,21 @@ def test_ComposeXfm_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_aff=dict( - argstr='-aff %s', - extensions=None, - mandatory=True, - ), - in_df=dict( - argstr='-df %s', - extensions=None, - mandatory=True, - ), - out_file=dict( - argstr='-out %s', - extensions=None, - genfile=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_aff=dict(argstr="-aff %s", extensions=None, mandatory=True,), + in_df=dict(argstr="-df %s", extensions=None, mandatory=True,), + out_file=dict(argstr="-out %s", extensions=None, genfile=True,), ) inputs = ComposeXfm.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComposeXfm_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ComposeXfm.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_ComposeXfmTask.py b/nipype/interfaces/dtitk/tests/test_auto_ComposeXfmTask.py index 2ad8ce96ab..3f573a1815 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_ComposeXfmTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_ComposeXfmTask.py @@ -4,34 +4,21 @@ def test_ComposeXfmTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_aff=dict( - argstr='-aff %s', - extensions=None, - mandatory=True, - ), - in_df=dict( - argstr='-df %s', - extensions=None, - mandatory=True, - ), - out_file=dict( - argstr='-out %s', - extensions=None, - genfile=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_aff=dict(argstr="-aff %s", extensions=None, mandatory=True,), + in_df=dict(argstr="-df %s", extensions=None, mandatory=True,), + out_file=dict(argstr="-out %s", extensions=None, genfile=True,), ) inputs = ComposeXfmTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComposeXfmTask_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ComposeXfmTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_Diffeo.py b/nipype/interfaces/dtitk/tests/test_auto_Diffeo.py index cc960e36d5..cc354b0cde 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_Diffeo.py +++ b/nipype/interfaces/dtitk/tests/test_auto_Diffeo.py @@ -4,55 +4,25 @@ def test_Diffeo_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixed_file=dict( - argstr='%s', - extensions=None, - position=0, - ), - ftol=dict( - argstr='%g', - mandatory=True, - position=5, - usedefault=True, - ), - legacy=dict( - argstr='%d', - mandatory=True, - position=3, - usedefault=True, - ), - mask_file=dict( - argstr='%s', - extensions=None, - position=2, - ), - moving_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - position=1, - ), - n_iters=dict( - argstr='%d', - mandatory=True, - position=4, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + fixed_file=dict(argstr="%s", extensions=None, position=0,), + ftol=dict(argstr="%g", mandatory=True, position=5, usedefault=True,), + legacy=dict(argstr="%d", mandatory=True, position=3, usedefault=True,), + mask_file=dict(argstr="%s", extensions=None, position=2,), + moving_file=dict(argstr="%s", copyfile=False, extensions=None, position=1,), + n_iters=dict(argstr="%d", mandatory=True, position=4, usedefault=True,), ) inputs = Diffeo.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Diffeo_outputs(): output_map = dict( - out_file=dict(extensions=None, ), - out_file_xfm=dict(extensions=None, ), + out_file=dict(extensions=None,), out_file_xfm=dict(extensions=None,), ) outputs = Diffeo.output_spec() diff --git a/nipype/interfaces/dtitk/tests/test_auto_DiffeoScalarVol.py b/nipype/interfaces/dtitk/tests/test_auto_DiffeoScalarVol.py index 9924e1b8e3..cc85e03870 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_DiffeoScalarVol.py +++ b/nipype/interfaces/dtitk/tests/test_auto_DiffeoScalarVol.py @@ -4,51 +4,32 @@ def test_DiffeoScalarVol_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - flip=dict(argstr='-flip %d %d %d', ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - ), - interpolation=dict( - argstr='-interp %s', - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + flip=dict(argstr="-flip %d %d %d",), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True,), + interpolation=dict(argstr="-interp %s", usedefault=True,), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_diffeoxfmd', - ), - resampling_type=dict(argstr='-type %s', ), - target=dict( - argstr='-target %s', - extensions=None, - xor=['voxel_size'], - ), - transform=dict( - argstr='-trans %s', - extensions=None, - mandatory=True, - ), - voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target'], + name_source="in_file", + name_template="%s_diffeoxfmd", ), + resampling_type=dict(argstr="-type %s",), + target=dict(argstr="-target %s", extensions=None, xor=["voxel_size"],), + transform=dict(argstr="-trans %s", extensions=None, mandatory=True,), + voxel_size=dict(argstr="-vsize %g %g %g", xor=["target"],), ) inputs = DiffeoScalarVol.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DiffeoScalarVol_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = DiffeoScalarVol.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_DiffeoSymTensor3DVol.py b/nipype/interfaces/dtitk/tests/test_auto_DiffeoSymTensor3DVol.py index a51cbd2314..67626f3ce6 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_DiffeoSymTensor3DVol.py +++ b/nipype/interfaces/dtitk/tests/test_auto_DiffeoSymTensor3DVol.py @@ -4,59 +4,34 @@ def test_DiffeoSymTensor3DVol_inputs(): input_map = dict( - args=dict(argstr='%s', ), - df=dict( - argstr='-df %s', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - flip=dict(argstr='-flip %d %d %d', ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - ), - interpolation=dict( - argstr='-interp %s', - usedefault=True, - ), + args=dict(argstr="%s",), + df=dict(argstr="-df %s", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + flip=dict(argstr="-flip %d %d %d",), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True,), + interpolation=dict(argstr="-interp %s", usedefault=True,), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_diffeoxfmd', - ), - reorient=dict( - argstr='-reorient %s', - usedefault=True, - ), - resampling_type=dict(argstr='-type %s', ), - target=dict( - argstr='-target %s', - extensions=None, - xor=['voxel_size'], - ), - transform=dict( - argstr='-trans %s', - extensions=None, - mandatory=True, - ), - voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target'], - ), + name_source="in_file", + name_template="%s_diffeoxfmd", + ), + reorient=dict(argstr="-reorient %s", usedefault=True,), + resampling_type=dict(argstr="-type %s",), + target=dict(argstr="-target %s", extensions=None, xor=["voxel_size"],), + transform=dict(argstr="-trans %s", extensions=None, mandatory=True,), + voxel_size=dict(argstr="-vsize %g %g %g", xor=["target"],), ) inputs = DiffeoSymTensor3DVol.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DiffeoSymTensor3DVol_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = DiffeoSymTensor3DVol.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_DiffeoTask.py b/nipype/interfaces/dtitk/tests/test_auto_DiffeoTask.py index 5ba67405ea..0eb20d64d6 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_DiffeoTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_DiffeoTask.py @@ -4,55 +4,25 @@ def test_DiffeoTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixed_file=dict( - argstr='%s', - extensions=None, - position=0, - ), - ftol=dict( - argstr='%g', - mandatory=True, - position=5, - usedefault=True, - ), - legacy=dict( - argstr='%d', - mandatory=True, - position=3, - usedefault=True, - ), - mask_file=dict( - argstr='%s', - extensions=None, - position=2, - ), - moving_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - position=1, - ), - n_iters=dict( - argstr='%d', - mandatory=True, - position=4, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + fixed_file=dict(argstr="%s", extensions=None, position=0,), + ftol=dict(argstr="%g", mandatory=True, position=5, usedefault=True,), + legacy=dict(argstr="%d", mandatory=True, position=3, usedefault=True,), + mask_file=dict(argstr="%s", extensions=None, position=2,), + moving_file=dict(argstr="%s", copyfile=False, extensions=None, position=1,), + n_iters=dict(argstr="%d", mandatory=True, position=4, usedefault=True,), ) inputs = DiffeoTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DiffeoTask_outputs(): output_map = dict( - out_file=dict(extensions=None, ), - out_file_xfm=dict(extensions=None, ), + out_file=dict(extensions=None,), out_file_xfm=dict(extensions=None,), ) outputs = DiffeoTask.output_spec() diff --git a/nipype/interfaces/dtitk/tests/test_auto_Rigid.py b/nipype/interfaces/dtitk/tests/test_auto_Rigid.py index 4d968093c4..629fd5b780 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_Rigid.py +++ b/nipype/interfaces/dtitk/tests/test_auto_Rigid.py @@ -4,48 +4,21 @@ def test_Rigid_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), fixed_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=0, - ), - ftol=dict( - argstr='%g', - mandatory=True, - position=4, - usedefault=True, - ), - initialize_xfm=dict( - argstr='%s', - copyfile=True, - extensions=None, - position=5, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=0, ), + ftol=dict(argstr="%g", mandatory=True, position=4, usedefault=True,), + initialize_xfm=dict(argstr="%s", copyfile=True, extensions=None, position=5,), moving_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=1, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=1, ), sampling_xyz=dict( - argstr='%g %g %g', - mandatory=True, - position=3, - usedefault=True, + argstr="%g %g %g", mandatory=True, position=3, usedefault=True, ), similarity_metric=dict( - argstr='%s', - mandatory=True, - position=2, - usedefault=True, + argstr="%s", mandatory=True, position=2, usedefault=True, ), ) inputs = Rigid.input_spec() @@ -53,10 +26,11 @@ def test_Rigid_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Rigid_outputs(): output_map = dict( - out_file=dict(extensions=None, ), - out_file_xfm=dict(extensions=None, ), + out_file=dict(extensions=None,), out_file_xfm=dict(extensions=None,), ) outputs = Rigid.output_spec() diff --git a/nipype/interfaces/dtitk/tests/test_auto_RigidTask.py b/nipype/interfaces/dtitk/tests/test_auto_RigidTask.py index 93658d5d40..ec280e06a9 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_RigidTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_RigidTask.py @@ -4,48 +4,21 @@ def test_RigidTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), fixed_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=0, - ), - ftol=dict( - argstr='%g', - mandatory=True, - position=4, - usedefault=True, - ), - initialize_xfm=dict( - argstr='%s', - copyfile=True, - extensions=None, - position=5, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=0, ), + ftol=dict(argstr="%g", mandatory=True, position=4, usedefault=True,), + initialize_xfm=dict(argstr="%s", copyfile=True, extensions=None, position=5,), moving_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=1, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=1, ), sampling_xyz=dict( - argstr='%g %g %g', - mandatory=True, - position=3, - usedefault=True, + argstr="%g %g %g", mandatory=True, position=3, usedefault=True, ), similarity_metric=dict( - argstr='%s', - mandatory=True, - position=2, - usedefault=True, + argstr="%s", mandatory=True, position=2, usedefault=True, ), ) inputs = RigidTask.input_spec() @@ -53,10 +26,11 @@ def test_RigidTask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RigidTask_outputs(): output_map = dict( - out_file=dict(extensions=None, ), - out_file_xfm=dict(extensions=None, ), + out_file=dict(extensions=None,), out_file_xfm=dict(extensions=None,), ) outputs = RigidTask.output_spec() diff --git a/nipype/interfaces/dtitk/tests/test_auto_SVAdjustVoxSp.py b/nipype/interfaces/dtitk/tests/test_auto_SVAdjustVoxSp.py index 9091868546..c01e08a66e 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_SVAdjustVoxSp.py +++ b/nipype/interfaces/dtitk/tests/test_auto_SVAdjustVoxSp.py @@ -4,44 +4,31 @@ def test_SVAdjustVoxSp_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - ), - origin=dict( - argstr='-origin %g %g %g', - xor=['target_file'], - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True,), + origin=dict(argstr="-origin %g %g %g", xor=["target_file"],), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_avs', + name_source="in_file", + name_template="%s_avs", ), target_file=dict( - argstr='-target %s', - extensions=None, - xor=['voxel_size', 'origin'], - ), - voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target_file'], + argstr="-target %s", extensions=None, xor=["voxel_size", "origin"], ), + voxel_size=dict(argstr="-vsize %g %g %g", xor=["target_file"],), ) inputs = SVAdjustVoxSp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SVAdjustVoxSp_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = SVAdjustVoxSp.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_SVAdjustVoxSpTask.py b/nipype/interfaces/dtitk/tests/test_auto_SVAdjustVoxSpTask.py index c1b4dc619a..0ca4e416ae 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_SVAdjustVoxSpTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_SVAdjustVoxSpTask.py @@ -4,44 +4,31 @@ def test_SVAdjustVoxSpTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - ), - origin=dict( - argstr='-origin %g %g %g', - xor=['target_file'], - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True,), + origin=dict(argstr="-origin %g %g %g", xor=["target_file"],), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_avs', + name_source="in_file", + name_template="%s_avs", ), target_file=dict( - argstr='-target %s', - extensions=None, - xor=['voxel_size', 'origin'], - ), - voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target_file'], + argstr="-target %s", extensions=None, xor=["voxel_size", "origin"], ), + voxel_size=dict(argstr="-vsize %g %g %g", xor=["target_file"],), ) inputs = SVAdjustVoxSpTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SVAdjustVoxSpTask_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = SVAdjustVoxSpTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_SVResample.py b/nipype/interfaces/dtitk/tests/test_auto_SVResample.py index 6aecbc29c9..c08df4bfbb 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_SVResample.py +++ b/nipype/interfaces/dtitk/tests/test_auto_SVResample.py @@ -4,49 +4,35 @@ def test_SVResample_inputs(): input_map = dict( - align=dict(argstr='-align %s', ), - args=dict(argstr='%s', ), - array_size=dict( - argstr='-size %d %d %d', - xor=['target_file'], - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - ), - origin=dict( - argstr='-origin %g %g %g', - xor=['target_file'], - ), + align=dict(argstr="-align %s",), + args=dict(argstr="%s",), + array_size=dict(argstr="-size %d %d %d", xor=["target_file"],), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True,), + origin=dict(argstr="-origin %g %g %g", xor=["target_file"],), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_resampled', + name_source="in_file", + name_template="%s_resampled", ), target_file=dict( - argstr='-target %s', + argstr="-target %s", extensions=None, - xor=['array_size', 'voxel_size', 'origin'], - ), - voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target_file'], + xor=["array_size", "voxel_size", "origin"], ), + voxel_size=dict(argstr="-vsize %g %g %g", xor=["target_file"],), ) inputs = SVResample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SVResample_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = SVResample.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_SVResampleTask.py b/nipype/interfaces/dtitk/tests/test_auto_SVResampleTask.py index 92efe23e44..467163504b 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_SVResampleTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_SVResampleTask.py @@ -4,49 +4,35 @@ def test_SVResampleTask_inputs(): input_map = dict( - align=dict(argstr='-align %s', ), - args=dict(argstr='%s', ), - array_size=dict( - argstr='-size %d %d %d', - xor=['target_file'], - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - ), - origin=dict( - argstr='-origin %g %g %g', - xor=['target_file'], - ), + align=dict(argstr="-align %s",), + args=dict(argstr="%s",), + array_size=dict(argstr="-size %d %d %d", xor=["target_file"],), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True,), + origin=dict(argstr="-origin %g %g %g", xor=["target_file"],), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_resampled', + name_source="in_file", + name_template="%s_resampled", ), target_file=dict( - argstr='-target %s', + argstr="-target %s", extensions=None, - xor=['array_size', 'voxel_size', 'origin'], - ), - voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target_file'], + xor=["array_size", "voxel_size", "origin"], ), + voxel_size=dict(argstr="-vsize %g %g %g", xor=["target_file"],), ) inputs = SVResampleTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SVResampleTask_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = SVResampleTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_TVAdjustOriginTask.py b/nipype/interfaces/dtitk/tests/test_auto_TVAdjustOriginTask.py index 406ffcb418..d22aa78c9c 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_TVAdjustOriginTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_TVAdjustOriginTask.py @@ -4,44 +4,31 @@ def test_TVAdjustOriginTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - ), - origin=dict( - argstr='-origin %g %g %g', - xor=['target_file'], - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True,), + origin=dict(argstr="-origin %g %g %g", xor=["target_file"],), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_avs', + name_source="in_file", + name_template="%s_avs", ), target_file=dict( - argstr='-target %s', - extensions=None, - xor=['voxel_size', 'origin'], - ), - voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target_file'], + argstr="-target %s", extensions=None, xor=["voxel_size", "origin"], ), + voxel_size=dict(argstr="-vsize %g %g %g", xor=["target_file"],), ) inputs = TVAdjustOriginTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TVAdjustOriginTask_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = TVAdjustOriginTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_TVAdjustVoxSp.py b/nipype/interfaces/dtitk/tests/test_auto_TVAdjustVoxSp.py index 1d2a9fa6b5..70dc59c5a3 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_TVAdjustVoxSp.py +++ b/nipype/interfaces/dtitk/tests/test_auto_TVAdjustVoxSp.py @@ -4,44 +4,31 @@ def test_TVAdjustVoxSp_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - ), - origin=dict( - argstr='-origin %g %g %g', - xor=['target_file'], - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True,), + origin=dict(argstr="-origin %g %g %g", xor=["target_file"],), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_avs', + name_source="in_file", + name_template="%s_avs", ), target_file=dict( - argstr='-target %s', - extensions=None, - xor=['voxel_size', 'origin'], - ), - voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target_file'], + argstr="-target %s", extensions=None, xor=["voxel_size", "origin"], ), + voxel_size=dict(argstr="-vsize %g %g %g", xor=["target_file"],), ) inputs = TVAdjustVoxSp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TVAdjustVoxSp_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = TVAdjustVoxSp.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_TVAdjustVoxSpTask.py b/nipype/interfaces/dtitk/tests/test_auto_TVAdjustVoxSpTask.py index 6bd1f4601f..a23056e502 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_TVAdjustVoxSpTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_TVAdjustVoxSpTask.py @@ -4,44 +4,31 @@ def test_TVAdjustVoxSpTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - ), - origin=dict( - argstr='-origin %g %g %g', - xor=['target_file'], - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True,), + origin=dict(argstr="-origin %g %g %g", xor=["target_file"],), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_avs', + name_source="in_file", + name_template="%s_avs", ), target_file=dict( - argstr='-target %s', - extensions=None, - xor=['voxel_size', 'origin'], - ), - voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target_file'], + argstr="-target %s", extensions=None, xor=["voxel_size", "origin"], ), + voxel_size=dict(argstr="-vsize %g %g %g", xor=["target_file"],), ) inputs = TVAdjustVoxSpTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TVAdjustVoxSpTask_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = TVAdjustVoxSpTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_TVResample.py b/nipype/interfaces/dtitk/tests/test_auto_TVResample.py index f8101d07ab..13142572f9 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_TVResample.py +++ b/nipype/interfaces/dtitk/tests/test_auto_TVResample.py @@ -4,50 +4,36 @@ def test_TVResample_inputs(): input_map = dict( - align=dict(argstr='-align %s', ), - args=dict(argstr='%s', ), - array_size=dict( - argstr='-size %d %d %d', - xor=['target_file'], - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - ), - interpolation=dict(argstr='-interp %s', ), - origin=dict( - argstr='-origin %g %g %g', - xor=['target_file'], - ), + align=dict(argstr="-align %s",), + args=dict(argstr="%s",), + array_size=dict(argstr="-size %d %d %d", xor=["target_file"],), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True,), + interpolation=dict(argstr="-interp %s",), + origin=dict(argstr="-origin %g %g %g", xor=["target_file"],), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_resampled', + name_source="in_file", + name_template="%s_resampled", ), target_file=dict( - argstr='-target %s', + argstr="-target %s", extensions=None, - xor=['array_size', 'voxel_size', 'origin'], - ), - voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target_file'], + xor=["array_size", "voxel_size", "origin"], ), + voxel_size=dict(argstr="-vsize %g %g %g", xor=["target_file"],), ) inputs = TVResample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TVResample_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = TVResample.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_TVResampleTask.py b/nipype/interfaces/dtitk/tests/test_auto_TVResampleTask.py index d181ae6bc9..3bf6221d24 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_TVResampleTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_TVResampleTask.py @@ -4,50 +4,36 @@ def test_TVResampleTask_inputs(): input_map = dict( - align=dict(argstr='-align %s', ), - args=dict(argstr='%s', ), - array_size=dict( - argstr='-size %d %d %d', - xor=['target_file'], - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - ), - interpolation=dict(argstr='-interp %s', ), - origin=dict( - argstr='-origin %g %g %g', - xor=['target_file'], - ), + align=dict(argstr="-align %s",), + args=dict(argstr="%s",), + array_size=dict(argstr="-size %d %d %d", xor=["target_file"],), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True,), + interpolation=dict(argstr="-interp %s",), + origin=dict(argstr="-origin %g %g %g", xor=["target_file"],), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_resampled', + name_source="in_file", + name_template="%s_resampled", ), target_file=dict( - argstr='-target %s', + argstr="-target %s", extensions=None, - xor=['array_size', 'voxel_size', 'origin'], - ), - voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target_file'], + xor=["array_size", "voxel_size", "origin"], ), + voxel_size=dict(argstr="-vsize %g %g %g", xor=["target_file"],), ) inputs = TVResampleTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TVResampleTask_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = TVResampleTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_TVtool.py b/nipype/interfaces/dtitk/tests/test_auto_TVtool.py index 322e226612..812049f83b 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_TVtool.py +++ b/nipype/interfaces/dtitk/tests/test_auto_TVtool.py @@ -4,30 +4,21 @@ def test_TVtool_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - ), - in_flag=dict(argstr='-%s', ), - out_file=dict( - argstr='-out %s', - extensions=None, - genfile=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True,), + in_flag=dict(argstr="-%s",), + out_file=dict(argstr="-out %s", extensions=None, genfile=True,), ) inputs = TVtool.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TVtool_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = TVtool.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_TVtoolTask.py b/nipype/interfaces/dtitk/tests/test_auto_TVtoolTask.py index 0699b69687..a26d2e76f2 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_TVtoolTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_TVtoolTask.py @@ -4,30 +4,21 @@ def test_TVtoolTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - ), - in_flag=dict(argstr='-%s', ), - out_file=dict( - argstr='-out %s', - extensions=None, - genfile=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True,), + in_flag=dict(argstr="-%s",), + out_file=dict(argstr="-out %s", extensions=None, genfile=True,), ) inputs = TVtoolTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TVtoolTask_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = TVtoolTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_affScalarVolTask.py b/nipype/interfaces/dtitk/tests/test_auto_affScalarVolTask.py index df6eeaed84..67b1d0efbb 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_affScalarVolTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_affScalarVolTask.py @@ -4,57 +4,36 @@ def test_affScalarVolTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - deformation=dict( - argstr='-deformation %g %g %g %g %g %g', - xor=['transform'], - ), - environ=dict( - nohash=True, - usedefault=True, - ), - euler=dict( - argstr='-euler %g %g %g', - xor=['transform'], - ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - ), - interpolation=dict( - argstr='-interp %s', - usedefault=True, - ), + args=dict(argstr="%s",), + deformation=dict(argstr="-deformation %g %g %g %g %g %g", xor=["transform"],), + environ=dict(nohash=True, usedefault=True,), + euler=dict(argstr="-euler %g %g %g", xor=["transform"],), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True,), + interpolation=dict(argstr="-interp %s", usedefault=True,), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_affxfmd', - ), - target=dict( - argstr='-target %s', - extensions=None, - xor=['transform'], + name_source="in_file", + name_template="%s_affxfmd", ), + target=dict(argstr="-target %s", extensions=None, xor=["transform"],), transform=dict( - argstr='-trans %s', + argstr="-trans %s", extensions=None, - xor=['target', 'translation', 'euler', 'deformation'], - ), - translation=dict( - argstr='-translation %g %g %g', - xor=['transform'], + xor=["target", "translation", "euler", "deformation"], ), + translation=dict(argstr="-translation %g %g %g", xor=["transform"],), ) inputs = affScalarVolTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_affScalarVolTask_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = affScalarVolTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_affSymTensor3DVolTask.py b/nipype/interfaces/dtitk/tests/test_auto_affSymTensor3DVolTask.py index 9d1f491e8e..33778b661e 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_affSymTensor3DVolTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_affSymTensor3DVolTask.py @@ -4,61 +4,37 @@ def test_affSymTensor3DVolTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - deformation=dict( - argstr='-deformation %g %g %g %g %g %g', - xor=['transform'], - ), - environ=dict( - nohash=True, - usedefault=True, - ), - euler=dict( - argstr='-euler %g %g %g', - xor=['transform'], - ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - ), - interpolation=dict( - argstr='-interp %s', - usedefault=True, - ), + args=dict(argstr="%s",), + deformation=dict(argstr="-deformation %g %g %g %g %g %g", xor=["transform"],), + environ=dict(nohash=True, usedefault=True,), + euler=dict(argstr="-euler %g %g %g", xor=["transform"],), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True,), + interpolation=dict(argstr="-interp %s", usedefault=True,), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_affxfmd', - ), - reorient=dict( - argstr='-reorient %s', - usedefault=True, - ), - target=dict( - argstr='-target %s', - extensions=None, - xor=['transform'], + name_source="in_file", + name_template="%s_affxfmd", ), + reorient=dict(argstr="-reorient %s", usedefault=True,), + target=dict(argstr="-target %s", extensions=None, xor=["transform"],), transform=dict( - argstr='-trans %s', + argstr="-trans %s", extensions=None, - xor=['target', 'translation', 'euler', 'deformation'], - ), - translation=dict( - argstr='-translation %g %g %g', - xor=['transform'], + xor=["target", "translation", "euler", "deformation"], ), + translation=dict(argstr="-translation %g %g %g", xor=["transform"],), ) inputs = affSymTensor3DVolTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_affSymTensor3DVolTask_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = affSymTensor3DVolTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_diffeoScalarVolTask.py b/nipype/interfaces/dtitk/tests/test_auto_diffeoScalarVolTask.py index 062af0c769..e1e2468b5c 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_diffeoScalarVolTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_diffeoScalarVolTask.py @@ -4,51 +4,32 @@ def test_diffeoScalarVolTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - flip=dict(argstr='-flip %d %d %d', ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - ), - interpolation=dict( - argstr='-interp %s', - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + flip=dict(argstr="-flip %d %d %d",), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True,), + interpolation=dict(argstr="-interp %s", usedefault=True,), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_diffeoxfmd', - ), - resampling_type=dict(argstr='-type %s', ), - target=dict( - argstr='-target %s', - extensions=None, - xor=['voxel_size'], - ), - transform=dict( - argstr='-trans %s', - extensions=None, - mandatory=True, - ), - voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target'], + name_source="in_file", + name_template="%s_diffeoxfmd", ), + resampling_type=dict(argstr="-type %s",), + target=dict(argstr="-target %s", extensions=None, xor=["voxel_size"],), + transform=dict(argstr="-trans %s", extensions=None, mandatory=True,), + voxel_size=dict(argstr="-vsize %g %g %g", xor=["target"],), ) inputs = diffeoScalarVolTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_diffeoScalarVolTask_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = diffeoScalarVolTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/tests/test_auto_diffeoSymTensor3DVolTask.py b/nipype/interfaces/dtitk/tests/test_auto_diffeoSymTensor3DVolTask.py index 1b7a8f03bb..0956ec615a 100644 --- a/nipype/interfaces/dtitk/tests/test_auto_diffeoSymTensor3DVolTask.py +++ b/nipype/interfaces/dtitk/tests/test_auto_diffeoSymTensor3DVolTask.py @@ -4,59 +4,34 @@ def test_diffeoSymTensor3DVolTask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - df=dict( - argstr='-df %s', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - flip=dict(argstr='-flip %d %d %d', ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - ), - interpolation=dict( - argstr='-interp %s', - usedefault=True, - ), + args=dict(argstr="%s",), + df=dict(argstr="-df %s", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + flip=dict(argstr="-flip %d %d %d",), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True,), + interpolation=dict(argstr="-interp %s", usedefault=True,), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_diffeoxfmd', - ), - reorient=dict( - argstr='-reorient %s', - usedefault=True, - ), - resampling_type=dict(argstr='-type %s', ), - target=dict( - argstr='-target %s', - extensions=None, - xor=['voxel_size'], - ), - transform=dict( - argstr='-trans %s', - extensions=None, - mandatory=True, - ), - voxel_size=dict( - argstr='-vsize %g %g %g', - xor=['target'], - ), + name_source="in_file", + name_template="%s_diffeoxfmd", + ), + reorient=dict(argstr="-reorient %s", usedefault=True,), + resampling_type=dict(argstr="-type %s",), + target=dict(argstr="-target %s", extensions=None, xor=["voxel_size"],), + transform=dict(argstr="-trans %s", extensions=None, mandatory=True,), + voxel_size=dict(argstr="-vsize %g %g %g", xor=["target"],), ) inputs = diffeoSymTensor3DVolTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_diffeoSymTensor3DVolTask_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = diffeoSymTensor3DVolTask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/dtitk/utils.py b/nipype/interfaces/dtitk/utils.py index 3ed6e61395..e959fd8f0c 100644 --- a/nipype/interfaces/dtitk/utils.py +++ b/nipype/interfaces/dtitk/utils.py @@ -29,25 +29,35 @@ from .base import CommandLineDtitk, DTITKRenameMixin import os -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" class TVAdjustVoxSpInputSpec(CommandLineInputSpec): - in_file = File(desc="tensor volume to modify", exists=True, - mandatory=True, argstr="-in %s") - out_file = File(desc='output path', - argstr="-out %s", name_source='in_file', - name_template='%s_avs', keep_extension=True) - target_file = File(desc='target volume to match', - argstr="-target %s", - xor=['voxel_size', 'origin']) - voxel_size = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='xyz voxel size (superseded by target)', - argstr="-vsize %g %g %g", xor=['target_file']) - origin = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='xyz origin (superseded by target)', - argstr='-origin %g %g %g', - xor=['target_file']) + in_file = File( + desc="tensor volume to modify", exists=True, mandatory=True, argstr="-in %s" + ) + out_file = File( + desc="output path", + argstr="-out %s", + name_source="in_file", + name_template="%s_avs", + keep_extension=True, + ) + target_file = File( + desc="target volume to match", argstr="-target %s", xor=["voxel_size", "origin"] + ) + voxel_size = traits.Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="xyz voxel size (superseded by target)", + argstr="-vsize %g %g %g", + xor=["target_file"], + ) + origin = traits.Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="xyz origin (superseded by target)", + argstr="-origin %g %g %g", + xor=["target_file"], + ) class TVAdjustVoxSpOutputSpec(TraitedSpec): @@ -69,26 +79,38 @@ class TVAdjustVoxSp(CommandLineDtitk): 'TVAdjustVoxelspace -in im1.nii -out im1_avs.nii -target im2.nii' >>> node.run() # doctest: +SKIP """ + input_spec = TVAdjustVoxSpInputSpec output_spec = TVAdjustVoxSpOutputSpec - _cmd = 'TVAdjustVoxelspace' + _cmd = "TVAdjustVoxelspace" class SVAdjustVoxSpInputSpec(CommandLineInputSpec): - in_file = File(desc="scalar volume to modify", exists=True, - mandatory=True, argstr="-in %s") - out_file = File(desc='output path', argstr="-out %s", - name_source="in_file", name_template='%s_avs', - keep_extension=True) - target_file = File(desc='target volume to match', - argstr="-target %s", xor=['voxel_size', 'origin']) - voxel_size = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='xyz voxel size (superseded by target)', - argstr="-vsize %g %g %g", xor=['target_file']) - origin = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='xyz origin (superseded by target)', - argstr='-origin %g %g %g', - xor=['target_file']) + in_file = File( + desc="scalar volume to modify", exists=True, mandatory=True, argstr="-in %s" + ) + out_file = File( + desc="output path", + argstr="-out %s", + name_source="in_file", + name_template="%s_avs", + keep_extension=True, + ) + target_file = File( + desc="target volume to match", argstr="-target %s", xor=["voxel_size", "origin"] + ) + voxel_size = traits.Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="xyz voxel size (superseded by target)", + argstr="-vsize %g %g %g", + xor=["target_file"], + ) + origin = traits.Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="xyz origin (superseded by target)", + argstr="-origin %g %g %g", + xor=["target_file"], + ) class SVAdjustVoxSpOutputSpec(TraitedSpec): @@ -110,33 +132,55 @@ class SVAdjustVoxSp(CommandLineDtitk): 'SVAdjustVoxelspace -in im1.nii -out im1_avs.nii -target im2.nii' >>> node.run() # doctest: +SKIP """ + input_spec = SVAdjustVoxSpInputSpec output_spec = SVAdjustVoxSpOutputSpec - _cmd = 'SVAdjustVoxelspace' + _cmd = "SVAdjustVoxelspace" class TVResampleInputSpec(CommandLineInputSpec): - in_file = File(desc="tensor volume to resample", exists=True, - mandatory=True, argstr="-in %s") - out_file = File(desc='output path', - name_source="in_file", name_template="%s_resampled", - keep_extension=True, argstr="-out %s") - target_file = File(desc='specs read from the target volume', - argstr="-target %s", - xor=['array_size', 'voxel_size', 'origin']) - align = traits.Enum('center', 'origin', argstr="-align %s", - desc='how to align output volume to input volume') - interpolation = traits.Enum('LEI', 'EI', argstr="-interp %s", - desc='Log Euclidean Euclidean Interpolation') - array_size = traits.Tuple((traits.Int(), traits.Int(), traits.Int()), - desc='resampled array size', xor=['target_file'], - argstr="-size %d %d %d") - voxel_size = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='resampled voxel size', xor=['target_file'], - argstr="-vsize %g %g %g") - origin = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='xyz origin', xor=['target_file'], - argstr='-origin %g %g %g') + in_file = File( + desc="tensor volume to resample", exists=True, mandatory=True, argstr="-in %s" + ) + out_file = File( + desc="output path", + name_source="in_file", + name_template="%s_resampled", + keep_extension=True, + argstr="-out %s", + ) + target_file = File( + desc="specs read from the target volume", + argstr="-target %s", + xor=["array_size", "voxel_size", "origin"], + ) + align = traits.Enum( + "center", + "origin", + argstr="-align %s", + desc="how to align output volume to input volume", + ) + interpolation = traits.Enum( + "LEI", "EI", argstr="-interp %s", desc="Log Euclidean Euclidean Interpolation" + ) + array_size = traits.Tuple( + (traits.Int(), traits.Int(), traits.Int()), + desc="resampled array size", + xor=["target_file"], + argstr="-size %d %d %d", + ) + voxel_size = traits.Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="resampled voxel size", + xor=["target_file"], + argstr="-vsize %g %g %g", + ) + origin = traits.Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="xyz origin", + xor=["target_file"], + argstr="-origin %g %g %g", + ) class TVResampleOutputSpec(TraitedSpec): @@ -158,31 +202,52 @@ class TVResample(CommandLineDtitk): 'TVResample -in im1.nii -out im1_resampled.nii -target im2.nii' >>> node.run() # doctest: +SKIP """ + input_spec = TVResampleInputSpec output_spec = TVResampleOutputSpec - _cmd = 'TVResample' + _cmd = "TVResample" class SVResampleInputSpec(CommandLineInputSpec): - in_file = File(desc="image to resample", exists=True, - mandatory=True, argstr="-in %s") - out_file = File(desc='output path', - name_source="in_file", name_template="%s_resampled", - keep_extension=True, argstr="-out %s") - target_file = File(desc='specs read from the target volume', - argstr="-target %s", - xor=['array_size', 'voxel_size', 'origin']) - align = traits.Enum('center', 'origin', argstr="-align %s", - desc='how to align output volume to input volume') - array_size = traits.Tuple((traits.Int(), traits.Int(), traits.Int()), - desc='resampled array size', xor=['target_file'], - argstr="-size %d %d %d") - voxel_size = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='resampled voxel size', xor=['target_file'], - argstr="-vsize %g %g %g") - origin = traits.Tuple((traits.Float(), traits.Float(), traits.Float()), - desc='xyz origin', xor=['target_file'], - argstr='-origin %g %g %g') + in_file = File( + desc="image to resample", exists=True, mandatory=True, argstr="-in %s" + ) + out_file = File( + desc="output path", + name_source="in_file", + name_template="%s_resampled", + keep_extension=True, + argstr="-out %s", + ) + target_file = File( + desc="specs read from the target volume", + argstr="-target %s", + xor=["array_size", "voxel_size", "origin"], + ) + align = traits.Enum( + "center", + "origin", + argstr="-align %s", + desc="how to align output volume to input volume", + ) + array_size = traits.Tuple( + (traits.Int(), traits.Int(), traits.Int()), + desc="resampled array size", + xor=["target_file"], + argstr="-size %d %d %d", + ) + voxel_size = traits.Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="resampled voxel size", + xor=["target_file"], + argstr="-vsize %g %g %g", + ) + origin = traits.Tuple( + (traits.Float(), traits.Float(), traits.Float()), + desc="xyz origin", + xor=["target_file"], + argstr="-origin %g %g %g", + ) class SVResampleOutputSpec(TraitedSpec): @@ -204,17 +269,18 @@ class SVResample(CommandLineDtitk): 'SVResample -in im1.nii -out im1_resampled.nii -target im2.nii' >>> node.run() # doctest: +SKIP """ + input_spec = SVResampleInputSpec output_spec = SVResampleOutputSpec - _cmd = 'SVResample' + _cmd = "SVResample" class TVtoolInputSpec(CommandLineInputSpec): - in_file = File(desc="scalar volume to resample", exists=True, - argstr="-in %s", mandatory=True) - '''NOTE: there are a lot more options here; not implementing all of them''' - in_flag = traits.Enum('fa', 'tr', 'ad', 'rd', 'pd', 'rgb', - argstr="-%s", desc='') + in_file = File( + desc="scalar volume to resample", exists=True, argstr="-in %s", mandatory=True + ) + """NOTE: there are a lot more options here; not implementing all of them""" + in_flag = traits.Enum("fa", "tr", "ad", "rd", "pd", "rgb", argstr="-%s", desc="") out_file = File(argstr="-out %s", genfile=True) @@ -237,46 +303,78 @@ class TVtool(CommandLineDtitk): 'TVtool -in im1.nii -fa -out im1_fa.nii' >>> node.run() # doctest: +SKIP """ + input_spec = TVtoolInputSpec output_spec = TVtoolOutputSpec - _cmd = 'TVtool' + _cmd = "TVtool" def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.out_file if not isdefined(out_file): - out_file = self._gen_filename('out_file') - outputs['out_file'] = os.path.abspath(out_file) + out_file = self._gen_filename("out_file") + outputs["out_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): - if name != 'out_file': + if name != "out_file": return - return fname_presuffix(os.path.basename(self.inputs.in_file), - suffix='_' + self.inputs.in_flag) + return fname_presuffix( + os.path.basename(self.inputs.in_file), suffix="_" + self.inputs.in_flag + ) -'''Note: SVTool not implemented at this time''' +"""Note: SVTool not implemented at this time""" class BinThreshInputSpec(CommandLineInputSpec): - in_file = File(desc='Image to threshold/binarize', exists=True, - position=0, argstr="%s", mandatory=True) - out_file = File(desc='output path', position=1, argstr="%s", - keep_extension=True, name_source='in_file', - name_template='%s_thrbin') - lower_bound = traits.Float(0.01, usedefault=True, - position=2, argstr="%g", mandatory=True, - desc='lower bound of binarization range') - upper_bound = traits.Float(100, usedefault=True, - position=3, argstr="%g", mandatory=True, - desc='upper bound of binarization range') - inside_value = traits.Float(1, position=4, argstr="%g", usedefault=True, - mandatory=True, desc='value for voxels in ' - 'binarization range') - outside_value = traits.Float(0, position=5, argstr="%g", usedefault=True, - mandatory=True, desc='value for voxels' - 'outside of binarization range') + in_file = File( + desc="Image to threshold/binarize", + exists=True, + position=0, + argstr="%s", + mandatory=True, + ) + out_file = File( + desc="output path", + position=1, + argstr="%s", + keep_extension=True, + name_source="in_file", + name_template="%s_thrbin", + ) + lower_bound = traits.Float( + 0.01, + usedefault=True, + position=2, + argstr="%g", + mandatory=True, + desc="lower bound of binarization range", + ) + upper_bound = traits.Float( + 100, + usedefault=True, + position=3, + argstr="%g", + mandatory=True, + desc="upper bound of binarization range", + ) + inside_value = traits.Float( + 1, + position=4, + argstr="%g", + usedefault=True, + mandatory=True, + desc="value for voxels in " "binarization range", + ) + outside_value = traits.Float( + 0, + position=5, + argstr="%g", + usedefault=True, + mandatory=True, + desc="value for voxels" "outside of binarization range", + ) class BinThreshOutputSpec(TraitedSpec): @@ -304,7 +402,7 @@ class BinThresh(CommandLineDtitk): input_spec = BinThreshInputSpec output_spec = BinThreshOutputSpec - _cmd = 'BinaryThresholdImageFilter' + _cmd = "BinaryThresholdImageFilter" class BinThreshTask(DTITKRenameMixin, BinThresh): diff --git a/nipype/interfaces/dynamic_slicer.py b/nipype/interfaces/dynamic_slicer.py index db238127c9..8404aad802 100644 --- a/nipype/interfaces/dynamic_slicer.py +++ b/nipype/interfaces/dynamic_slicer.py @@ -6,28 +6,34 @@ import warnings import xml.dom.minidom -from .base import (CommandLine, CommandLineInputSpec, DynamicTraitedSpec, - traits, Undefined, File, isdefined) +from .base import ( + CommandLine, + CommandLineInputSpec, + DynamicTraitedSpec, + traits, + Undefined, + File, + isdefined, +) class SlicerCommandLineInputSpec(DynamicTraitedSpec, CommandLineInputSpec): - module = traits.Str( - desc="name of the Slicer command line module you want to use") + module = traits.Str(desc="name of the Slicer command line module you want to use") class SlicerCommandLine(CommandLine): """Experimental Slicer wrapper. Work in progress. """ + _cmd = "Slicer3" input_spec = SlicerCommandLineInputSpec output_spec = DynamicTraitedSpec def _grab_xml(self, module): cmd = CommandLine( - command="Slicer3", - resource_monitor=False, - args="--launch %s --xml" % module) + command="Slicer3", resource_monitor=False, args="--launch %s --xml" % module + ) ret = cmd.run() if ret.runtime.returncode == 0: return xml.dom.minidom.parseString(ret.runtime.stdout) @@ -38,8 +44,8 @@ def _outputs(self): base = super(SlicerCommandLine, self)._outputs() undefined_output_traits = {} for key in [ - node.getElementsByTagName('name')[0].firstChild.nodeValue - for node in self._outputs_nodes + node.getElementsByTagName("name")[0].firstChild.nodeValue + for node in self._outputs_nodes ]: base.add_trait(key, File(exists=True)) undefined_output_traits[key] = Undefined @@ -48,9 +54,10 @@ def _outputs(self): return base def __init__(self, module, **inputs): - warnings.warn('slicer is Not fully implemented', RuntimeWarning) + warnings.warn("slicer is Not fully implemented", RuntimeWarning) super(SlicerCommandLine, self).__init__( - command="Slicer3 --launch %s " % module, name=module, **inputs) + command="Slicer3 --launch %s " % module, name=module, **inputs + ) dom = self._grab_xml(module) self._outputs_filenames = {} @@ -60,95 +67,89 @@ def __init__(self, module, **inputs): for paramGroup in dom.getElementsByTagName("parameters"): for param in paramGroup.childNodes: - if param.nodeName in [ - 'label', 'description', '#text', '#comment' - ]: + if param.nodeName in ["label", "description", "#text", "#comment"]: continue traitsParams = {} - name = param.getElementsByTagName('name')[ - 0].firstChild.nodeValue + name = param.getElementsByTagName("name")[0].firstChild.nodeValue - longFlagNode = param.getElementsByTagName('longflag') + longFlagNode = param.getElementsByTagName("longflag") if longFlagNode: - traitsParams[ - "argstr"] = "--" + longFlagNode[0].firstChild.nodeValue + " " + traitsParams["argstr"] = ( + "--" + longFlagNode[0].firstChild.nodeValue + " " + ) else: traitsParams["argstr"] = "--" + name + " " argsDict = { - 'file': '%s', - 'integer': "%d", - 'double': "%f", - 'float': "%f", - 'image': "%s", - 'transform': "%s", - 'boolean': '', - 'string-enumeration': '%s', - 'string': "%s" + "file": "%s", + "integer": "%d", + "double": "%f", + "float": "%f", + "image": "%s", + "transform": "%s", + "boolean": "", + "string-enumeration": "%s", + "string": "%s", } - if param.nodeName.endswith('-vector'): + if param.nodeName.endswith("-vector"): traitsParams["argstr"] += argsDict[param.nodeName[:-7]] else: traitsParams["argstr"] += argsDict[param.nodeName] - index = param.getElementsByTagName('index') + index = param.getElementsByTagName("index") if index: traitsParams["position"] = index[0].firstChild.nodeValue - desc = param.getElementsByTagName('description') + desc = param.getElementsByTagName("description") if index: traitsParams["desc"] = desc[0].firstChild.nodeValue - name = param.getElementsByTagName('name')[ - 0].firstChild.nodeValue + name = param.getElementsByTagName("name")[0].firstChild.nodeValue typesDict = { - 'integer': traits.Int, - 'double': traits.Float, - 'float': traits.Float, - 'image': File, - 'transform': File, - 'boolean': traits.Bool, - 'string': traits.Str, - 'file': File + "integer": traits.Int, + "double": traits.Float, + "float": traits.Float, + "image": File, + "transform": File, + "boolean": traits.Bool, + "string": traits.Str, + "file": File, } - if param.nodeName == 'string-enumeration': + if param.nodeName == "string-enumeration": type = traits.Enum values = [ el.firstChild.nodeValue - for el in param.getElementsByTagName('element') + for el in param.getElementsByTagName("element") ] - elif param.nodeName.endswith('-vector'): + elif param.nodeName.endswith("-vector"): type = traits.List values = [typesDict[param.nodeName[:-7]]] - traitsParams["sep"] = ',' + traitsParams["sep"] = "," else: values = [] type = typesDict[param.nodeName] - if param.nodeName in [ - 'file', 'directory', 'image', 'transform' - ] and param.getElementsByTagName( - 'channel')[0].firstChild.nodeValue == 'output': - self.inputs.add_trait(name, - traits.Either( - traits.Bool, File, - **traitsParams)) + if ( + param.nodeName in ["file", "directory", "image", "transform"] + and param.getElementsByTagName("channel")[0].firstChild.nodeValue + == "output" + ): + self.inputs.add_trait( + name, traits.Either(traits.Bool, File, **traitsParams) + ) undefined_traits[name] = Undefined # traitsParams["exists"] = True - self._outputs_filenames[ - name] = self._gen_filename_from_param(param) + self._outputs_filenames[name] = self._gen_filename_from_param(param) # undefined_output_traits[name] = Undefined # self._outputs().add_trait(name, File(*values, **traitsParams)) self._outputs_nodes.append(param) else: - if param.nodeName in [ - 'file', 'directory', 'image', 'transform' - ]: + if param.nodeName in ["file", "directory", "image", "transform"]: traitsParams["exists"] = True self.inputs.add_trait(name, type(*values, **traitsParams)) undefined_traits[name] = Undefined @@ -164,23 +165,18 @@ def _gen_filename(self, name): return None def _gen_filename_from_param(self, param): - base = param.getElementsByTagName('name')[0].firstChild.nodeValue + base = param.getElementsByTagName("name")[0].firstChild.nodeValue fileExtensions = param.getAttribute("fileExtensions") if fileExtensions: ext = fileExtensions else: - ext = { - 'image': '.nii', - 'transform': '.txt', - 'file': '' - }[param.nodeName] + ext = {"image": ".nii", "transform": ".txt", "file": ""}[param.nodeName] return base + ext def _list_outputs(self): outputs = self.output_spec().get() for output_node in self._outputs_nodes: - name = output_node.getElementsByTagName('name')[ - 0].firstChild.nodeValue + name = output_node.getElementsByTagName("name")[0].firstChild.nodeValue outputs[name] = getattr(self.inputs, name) if isdefined(outputs[name]) and isinstance(outputs[name], bool): if outputs[name]: @@ -191,8 +187,8 @@ def _list_outputs(self): def _format_arg(self, name, spec, value): if name in [ - output_node.getElementsByTagName('name')[0] - .firstChild.nodeValue for output_node in self._outputs_nodes + output_node.getElementsByTagName("name")[0].firstChild.nodeValue + for output_node in self._outputs_nodes ]: if isinstance(value, bool): fname = self._gen_filename(name) diff --git a/nipype/interfaces/elastix/base.py b/nipype/interfaces/elastix/base.py index 748f69f44d..6e26937793 100644 --- a/nipype/interfaces/elastix/base.py +++ b/nipype/interfaces/elastix/base.py @@ -12,19 +12,23 @@ from ... import logging from ..base import CommandLineInputSpec, Directory, traits -iflogger = logging.getLogger('nipype.interface') + +iflogger = logging.getLogger("nipype.interface") class ElastixBaseInputSpec(CommandLineInputSpec): output_path = Directory( - './', + "./", exists=True, mandatory=True, usedefault=True, - argstr='-out %s', - desc='output directory') + argstr="-out %s", + desc="output directory", + ) num_threads = traits.Int( - 1, usedefault=True, - argstr='-threads %01d', + 1, + usedefault=True, + argstr="-threads %01d", nohash=True, - desc='set the maximum number of threads of elastix') + desc="set the maximum number of threads of elastix", + ) diff --git a/nipype/interfaces/elastix/registration.py b/nipype/interfaces/elastix/registration.py index 539534aada..9c6074014b 100644 --- a/nipype/interfaces/elastix/registration.py +++ b/nipype/interfaces/elastix/registration.py @@ -14,38 +14,37 @@ from .base import ElastixBaseInputSpec from ..base import CommandLine, TraitedSpec, File, traits, InputMultiPath -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") class RegistrationInputSpec(ElastixBaseInputSpec): - fixed_image = File( - exists=True, mandatory=True, argstr='-f %s', desc='fixed image') + fixed_image = File(exists=True, mandatory=True, argstr="-f %s", desc="fixed image") moving_image = File( - exists=True, mandatory=True, argstr='-m %s', desc='moving image') + exists=True, mandatory=True, argstr="-m %s", desc="moving image" + ) parameters = InputMultiPath( File(exists=True), mandatory=True, - argstr='-p %s...', - desc='parameter file, elastix handles 1 or more -p') - fixed_mask = File( - exists=True, argstr='-fMask %s', desc='mask for fixed image') - moving_mask = File( - exists=True, argstr='-mMask %s', desc='mask for moving image') + argstr="-p %s...", + desc="parameter file, elastix handles 1 or more -p", + ) + fixed_mask = File(exists=True, argstr="-fMask %s", desc="mask for fixed image") + moving_mask = File(exists=True, argstr="-mMask %s", desc="mask for moving image") initial_transform = File( - exists=True, - argstr='-t0 %s', - desc='parameter file for initial transform') + exists=True, argstr="-t0 %s", desc="parameter file for initial transform" + ) class RegistrationOutputSpec(TraitedSpec): - transform = InputMultiPath(File(exists=True), desc='output transform') - warped_file = File(desc='input moving image warped to fixed image') + transform = InputMultiPath(File(exists=True), desc="output transform") + warped_file = File(desc="input moving image warped to fixed image") warped_files = InputMultiPath( File(exists=False), - desc=('input moving image warped to fixed image at each level')) + desc=("input moving image warped to fixed image at each level"), + ) warped_files_flags = traits.List( - traits.Bool(False), - desc='flag indicating if warped image was generated') + traits.Bool(False), desc="flag indicating if warped image was generated" + ) class Registration(CommandLine): @@ -66,7 +65,7 @@ class Registration(CommandLine): """ - _cmd = 'elastix' + _cmd = "elastix" input_spec = RegistrationInputSpec output_spec = RegistrationOutputSpec @@ -75,37 +74,39 @@ def _list_outputs(self): out_dir = op.abspath(self.inputs.output_path) - regex = re.compile(r'^\((\w+)\s(.+)\)$') + regex = re.compile(r"^\((\w+)\s(.+)\)$") - outputs['transform'] = [] - outputs['warped_files'] = [] - outputs['warped_files_flags'] = [] + outputs["transform"] = [] + outputs["warped_files"] = [] + outputs["warped_files_flags"] = [] for i, params in enumerate(self.inputs.parameters): config = {} - with open(params, 'r') as f: + with open(params, "r") as f: for line in f.readlines(): line = line.strip() - if not line.startswith('//') and line: + if not line.startswith("//") and line: m = regex.search(line) if m: value = self._cast(m.group(2).strip()) config[m.group(1).strip()] = value - outputs['transform'].append( - op.join(out_dir, 'TransformParameters.%01d.txt' % i)) + outputs["transform"].append( + op.join(out_dir, "TransformParameters.%01d.txt" % i) + ) warped_file = None - if config['WriteResultImage']: - warped_file = op.join(out_dir, 'result.%01d.%s' % - (i, config['ResultImageFormat'])) + if config["WriteResultImage"]: + warped_file = op.join( + out_dir, "result.%01d.%s" % (i, config["ResultImageFormat"]) + ) - outputs['warped_files'].append(warped_file) - outputs['warped_files_flags'].append(config['WriteResultImage']) + outputs["warped_files"].append(warped_file) + outputs["warped_files_flags"].append(config["WriteResultImage"]) - if outputs['warped_files_flags'][-1]: - outputs['warped_file'] = outputs['warped_files'][-1] + if outputs["warped_files_flags"][-1]: + outputs["warped_file"] = outputs["warped_files"][-1] return outputs @@ -131,18 +132,17 @@ class ApplyWarpInputSpec(ElastixBaseInputSpec): transform_file = File( exists=True, mandatory=True, - argstr='-tp %s', - desc='transform-parameter file, only 1') + argstr="-tp %s", + desc="transform-parameter file, only 1", + ) moving_image = File( - exists=True, - argstr='-in %s', - mandatory=True, - desc='input image to deform') + exists=True, argstr="-in %s", mandatory=True, desc="input image to deform" + ) class ApplyWarpOutputSpec(TraitedSpec): - warped_file = File(desc='input moving image warped to fixed image') + warped_file = File(desc="input moving image warped to fixed image") class ApplyWarp(CommandLine): @@ -163,44 +163,47 @@ class ApplyWarp(CommandLine): """ - _cmd = 'transformix' + _cmd = "transformix" input_spec = ApplyWarpInputSpec output_spec = ApplyWarpOutputSpec def _list_outputs(self): outputs = self._outputs().get() out_dir = op.abspath(self.inputs.output_path) - outputs['warped_file'] = op.join(out_dir, 'result.nii.gz') + outputs["warped_file"] = op.join(out_dir, "result.nii.gz") return outputs class AnalyzeWarpInputSpec(ApplyWarpInputSpec): points = traits.Enum( - 'all', + "all", usedefault=True, position=0, - argstr='-def %s', - desc='transform all points from the input-image, which effectively' - ' generates a deformation field.') + argstr="-def %s", + desc="transform all points from the input-image, which effectively" + " generates a deformation field.", + ) jac = traits.Enum( - 'all', + "all", usedefault=True, - argstr='-jac %s', - desc='generate an image with the determinant of the spatial Jacobian') + argstr="-jac %s", + desc="generate an image with the determinant of the spatial Jacobian", + ) jacmat = traits.Enum( - 'all', + "all", usedefault=True, - argstr='-jacmat %s', - desc='generate an image with the spatial Jacobian matrix at each voxel') + argstr="-jacmat %s", + desc="generate an image with the spatial Jacobian matrix at each voxel", + ) moving_image = File( - exists=True, - argstr='-in %s', - desc='input image to deform (not used)') + exists=True, argstr="-in %s", desc="input image to deform (not used)" + ) + class AnalyzeWarpOutputSpec(TraitedSpec): - disp_field = File(desc='displacements field') - jacdet_map = File(desc='det(Jacobian) map') - jacmat_map = File(desc='Jacobian matrix map') + disp_field = File(desc="displacements field") + jacdet_map = File(desc="det(Jacobian) map") + jacmat_map = File(desc="Jacobian matrix map") class AnalyzeWarp(ApplyWarp): @@ -227,27 +230,29 @@ class AnalyzeWarp(ApplyWarp): def _list_outputs(self): outputs = self._outputs().get() out_dir = op.abspath(self.inputs.output_path) - outputs['disp_field'] = op.join(out_dir, 'deformationField.nii.gz') - outputs['jacdet_map'] = op.join(out_dir, 'spatialJacobian.nii.gz') - outputs['jacmat_map'] = op.join(out_dir, 'fullSpatialJacobian.nii.gz') + outputs["disp_field"] = op.join(out_dir, "deformationField.nii.gz") + outputs["jacdet_map"] = op.join(out_dir, "spatialJacobian.nii.gz") + outputs["jacmat_map"] = op.join(out_dir, "fullSpatialJacobian.nii.gz") return outputs class PointsWarpInputSpec(ElastixBaseInputSpec): points_file = File( exists=True, - argstr='-def %s', + argstr="-def %s", mandatory=True, - desc='input points (accepts .vtk triangular meshes).') + desc="input points (accepts .vtk triangular meshes).", + ) transform_file = File( exists=True, mandatory=True, - argstr='-tp %s', - desc='transform-parameter file, only 1') + argstr="-tp %s", + desc="transform-parameter file, only 1", + ) class PointsWarpOutputSpec(TraitedSpec): - warped_file = File(desc='input points displaced in fixed image domain') + warped_file = File(desc="input points displaced in fixed image domain") class PointsWarp(CommandLine): @@ -267,7 +272,7 @@ class PointsWarp(CommandLine): """ - _cmd = 'transformix' + _cmd = "transformix" input_spec = PointsWarpInputSpec output_spec = PointsWarpOutputSpec @@ -277,5 +282,5 @@ def _list_outputs(self): fname, ext = op.splitext(op.basename(self.inputs.points_file)) - outputs['warped_file'] = op.join(out_dir, 'outputpoints%s' % ext) + outputs["warped_file"] = op.join(out_dir, "outputpoints%s" % ext) return outputs diff --git a/nipype/interfaces/elastix/tests/test_auto_AnalyzeWarp.py b/nipype/interfaces/elastix/tests/test_auto_AnalyzeWarp.py index b8eb8686d0..509c0a5a77 100644 --- a/nipype/interfaces/elastix/tests/test_auto_AnalyzeWarp.py +++ b/nipype/interfaces/elastix/tests/test_auto_AnalyzeWarp.py @@ -4,54 +4,28 @@ def test_AnalyzeWarp_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - jac=dict( - argstr='-jac %s', - usedefault=True, - ), - jacmat=dict( - argstr='-jacmat %s', - usedefault=True, - ), - moving_image=dict( - argstr='-in %s', - extensions=None, - ), - num_threads=dict( - argstr='-threads %01d', - nohash=True, - usedefault=True, - ), - output_path=dict( - argstr='-out %s', - mandatory=True, - usedefault=True, - ), - points=dict( - argstr='-def %s', - position=0, - usedefault=True, - ), - transform_file=dict( - argstr='-tp %s', - extensions=None, - mandatory=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + jac=dict(argstr="-jac %s", usedefault=True,), + jacmat=dict(argstr="-jacmat %s", usedefault=True,), + moving_image=dict(argstr="-in %s", extensions=None,), + num_threads=dict(argstr="-threads %01d", nohash=True, usedefault=True,), + output_path=dict(argstr="-out %s", mandatory=True, usedefault=True,), + points=dict(argstr="-def %s", position=0, usedefault=True,), + transform_file=dict(argstr="-tp %s", extensions=None, mandatory=True,), ) inputs = AnalyzeWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AnalyzeWarp_outputs(): output_map = dict( - disp_field=dict(extensions=None, ), - jacdet_map=dict(extensions=None, ), - jacmat_map=dict(extensions=None, ), + disp_field=dict(extensions=None,), + jacdet_map=dict(extensions=None,), + jacmat_map=dict(extensions=None,), ) outputs = AnalyzeWarp.output_spec() diff --git a/nipype/interfaces/elastix/tests/test_auto_ApplyWarp.py b/nipype/interfaces/elastix/tests/test_auto_ApplyWarp.py index 902928ae02..fd77478270 100644 --- a/nipype/interfaces/elastix/tests/test_auto_ApplyWarp.py +++ b/nipype/interfaces/elastix/tests/test_auto_ApplyWarp.py @@ -4,39 +4,22 @@ def test_ApplyWarp_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - moving_image=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - ), - num_threads=dict( - argstr='-threads %01d', - nohash=True, - usedefault=True, - ), - output_path=dict( - argstr='-out %s', - mandatory=True, - usedefault=True, - ), - transform_file=dict( - argstr='-tp %s', - extensions=None, - mandatory=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + moving_image=dict(argstr="-in %s", extensions=None, mandatory=True,), + num_threads=dict(argstr="-threads %01d", nohash=True, usedefault=True,), + output_path=dict(argstr="-out %s", mandatory=True, usedefault=True,), + transform_file=dict(argstr="-tp %s", extensions=None, mandatory=True,), ) inputs = ApplyWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyWarp_outputs(): - output_map = dict(warped_file=dict(extensions=None, ), ) + output_map = dict(warped_file=dict(extensions=None,),) outputs = ApplyWarp.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/elastix/tests/test_auto_EditTransform.py b/nipype/interfaces/elastix/tests/test_auto_EditTransform.py index 2720e14f44..687906f8b0 100644 --- a/nipype/interfaces/elastix/tests/test_auto_EditTransform.py +++ b/nipype/interfaces/elastix/tests/test_auto_EditTransform.py @@ -4,26 +4,22 @@ def test_EditTransform_inputs(): input_map = dict( - interpolation=dict( - argstr='FinalBSplineInterpolationOrder', - usedefault=True, - ), - output_file=dict(extensions=None, ), - output_format=dict(argstr='ResultImageFormat', ), - output_type=dict(argstr='ResultImagePixelType', ), - reference_image=dict(extensions=None, ), - transform_file=dict( - extensions=None, - mandatory=True, - ), + interpolation=dict(argstr="FinalBSplineInterpolationOrder", usedefault=True,), + output_file=dict(extensions=None,), + output_format=dict(argstr="ResultImageFormat",), + output_type=dict(argstr="ResultImagePixelType",), + reference_image=dict(extensions=None,), + transform_file=dict(extensions=None, mandatory=True,), ) inputs = EditTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EditTransform_outputs(): - output_map = dict(output_file=dict(extensions=None, ), ) + output_map = dict(output_file=dict(extensions=None,),) outputs = EditTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/elastix/tests/test_auto_PointsWarp.py b/nipype/interfaces/elastix/tests/test_auto_PointsWarp.py index 8047d31ecb..83e3092f28 100644 --- a/nipype/interfaces/elastix/tests/test_auto_PointsWarp.py +++ b/nipype/interfaces/elastix/tests/test_auto_PointsWarp.py @@ -4,39 +4,22 @@ def test_PointsWarp_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - num_threads=dict( - argstr='-threads %01d', - nohash=True, - usedefault=True, - ), - output_path=dict( - argstr='-out %s', - mandatory=True, - usedefault=True, - ), - points_file=dict( - argstr='-def %s', - extensions=None, - mandatory=True, - ), - transform_file=dict( - argstr='-tp %s', - extensions=None, - mandatory=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + num_threads=dict(argstr="-threads %01d", nohash=True, usedefault=True,), + output_path=dict(argstr="-out %s", mandatory=True, usedefault=True,), + points_file=dict(argstr="-def %s", extensions=None, mandatory=True,), + transform_file=dict(argstr="-tp %s", extensions=None, mandatory=True,), ) inputs = PointsWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PointsWarp_outputs(): - output_map = dict(warped_file=dict(extensions=None, ), ) + output_map = dict(warped_file=dict(extensions=None,),) outputs = PointsWarp.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/elastix/tests/test_auto_Registration.py b/nipype/interfaces/elastix/tests/test_auto_Registration.py index 095d1cd521..18ad8f93a2 100644 --- a/nipype/interfaces/elastix/tests/test_auto_Registration.py +++ b/nipype/interfaces/elastix/tests/test_auto_Registration.py @@ -4,57 +4,28 @@ def test_Registration_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixed_image=dict( - argstr='-f %s', - extensions=None, - mandatory=True, - ), - fixed_mask=dict( - argstr='-fMask %s', - extensions=None, - ), - initial_transform=dict( - argstr='-t0 %s', - extensions=None, - ), - moving_image=dict( - argstr='-m %s', - extensions=None, - mandatory=True, - ), - moving_mask=dict( - argstr='-mMask %s', - extensions=None, - ), - num_threads=dict( - argstr='-threads %01d', - nohash=True, - usedefault=True, - ), - output_path=dict( - argstr='-out %s', - mandatory=True, - usedefault=True, - ), - parameters=dict( - argstr='-p %s...', - mandatory=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + fixed_image=dict(argstr="-f %s", extensions=None, mandatory=True,), + fixed_mask=dict(argstr="-fMask %s", extensions=None,), + initial_transform=dict(argstr="-t0 %s", extensions=None,), + moving_image=dict(argstr="-m %s", extensions=None, mandatory=True,), + moving_mask=dict(argstr="-mMask %s", extensions=None,), + num_threads=dict(argstr="-threads %01d", nohash=True, usedefault=True,), + output_path=dict(argstr="-out %s", mandatory=True, usedefault=True,), + parameters=dict(argstr="-p %s...", mandatory=True,), ) inputs = Registration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Registration_outputs(): output_map = dict( transform=dict(), - warped_file=dict(extensions=None, ), + warped_file=dict(extensions=None,), warped_files=dict(), warped_files_flags=dict(), ) diff --git a/nipype/interfaces/elastix/utils.py b/nipype/interfaces/elastix/utils.py index 52fd7023a2..3628416b1e 100644 --- a/nipype/interfaces/elastix/utils.py +++ b/nipype/interfaces/elastix/utils.py @@ -10,49 +10,60 @@ import os.path as op from ... import logging -from ..base import (BaseInterface, BaseInterfaceInputSpec, isdefined, - TraitedSpec, File, traits) -iflogger = logging.getLogger('nipype.interface') +from ..base import ( + BaseInterface, + BaseInterfaceInputSpec, + isdefined, + TraitedSpec, + File, + traits, +) + +iflogger = logging.getLogger("nipype.interface") class EditTransformInputSpec(BaseInterfaceInputSpec): transform_file = File( - exists=True, mandatory=True, desc='transform-parameter file, only 1') + exists=True, mandatory=True, desc="transform-parameter file, only 1" + ) reference_image = File( exists=True, - desc=('set a new reference image to change the ' - 'target coordinate system.')) + desc=("set a new reference image to change the " "target coordinate system."), + ) interpolation = traits.Enum( - 'cubic', - 'linear', - 'nearest', + "cubic", + "linear", + "nearest", usedefault=True, - argstr='FinalBSplineInterpolationOrder', - desc='set a new interpolator for transformation') + argstr="FinalBSplineInterpolationOrder", + desc="set a new interpolator for transformation", + ) output_type = traits.Enum( - 'float', - 'unsigned char', - 'unsigned short', - 'short', - 'unsigned long', - 'long', - 'double', - argstr='ResultImagePixelType', - desc='set a new output pixel type for resampled images') + "float", + "unsigned char", + "unsigned short", + "short", + "unsigned long", + "long", + "double", + argstr="ResultImagePixelType", + desc="set a new output pixel type for resampled images", + ) output_format = traits.Enum( - 'nii.gz', - 'nii', - 'mhd', - 'hdr', - 'vtk', - argstr='ResultImageFormat', - desc='set a new image format for resampled images') - output_file = File(desc='the filename for the resulting transform file') + "nii.gz", + "nii", + "mhd", + "hdr", + "vtk", + argstr="ResultImageFormat", + desc="set a new image format for resampled images", + ) + output_file = File(desc="the filename for the resulting transform file") class EditTransformOutputSpec(TraitedSpec): - output_file = File(exists=True, desc='output transform file') + output_file = File(exists=True, desc="output transform file") class EditTransform(BaseInterface): @@ -73,38 +84,42 @@ class EditTransform(BaseInterface): input_spec = EditTransformInputSpec output_spec = EditTransformOutputSpec - _out_file = '' - _pattern = '\((?P%s\s\"?)([-\.\s\w]+)(\"?\))' + _out_file = "" + _pattern = '\((?P%s\s"?)([-\.\s\w]+)("?\))' - _interp = {'nearest': 0, 'linear': 1, 'cubic': 3} + _interp = {"nearest": 0, "linear": 1, "cubic": 3} def _run_interface(self, runtime): import re import nibabel as nb import numpy as np - contents = '' + contents = "" - with open(self.inputs.transform_file, 'r') as f: + with open(self.inputs.transform_file, "r") as f: contents = f.read() if isdefined(self.inputs.output_type): - p = re.compile((self._pattern % - 'ResultImagePixelType').decode('string-escape')) - rep = '(\g%s\g<3>' % self.inputs.output_type + p = re.compile( + (self._pattern % "ResultImagePixelType").decode("string-escape") + ) + rep = "(\g%s\g<3>" % self.inputs.output_type contents = p.sub(rep, contents) if isdefined(self.inputs.output_format): p = re.compile( - (self._pattern % 'ResultImageFormat').decode('string-escape')) - rep = '(\g%s\g<3>' % self.inputs.output_format + (self._pattern % "ResultImageFormat").decode("string-escape") + ) + rep = "(\g%s\g<3>" % self.inputs.output_format contents = p.sub(rep, contents) if isdefined(self.inputs.interpolation): p = re.compile( - (self._pattern % - 'FinalBSplineInterpolationOrder').decode('string-escape')) - rep = '(\g%s\g<3>' % self._interp[self.inputs.interpolation] + (self._pattern % "FinalBSplineInterpolationOrder").decode( + "string-escape" + ) + ) + rep = "(\g%s\g<3>" % self._interp[self.inputs.interpolation] contents = p.sub(rep, contents) if isdefined(self.inputs.reference_image): @@ -113,19 +128,19 @@ def _run_interface(self, runtime): if len(im.header.get_zooms()) == 4: im = nb.func.four_to_three(im)[0] - size = ' '.join(["%01d" % s for s in im.shape]) - p = re.compile((self._pattern % 'Size').decode('string-escape')) - rep = '(\g%s\g<3>' % size + size = " ".join(["%01d" % s for s in im.shape]) + p = re.compile((self._pattern % "Size").decode("string-escape")) + rep = "(\g%s\g<3>" % size contents = p.sub(rep, contents) - index = ' '.join(["0" for s in im.shape]) - p = re.compile((self._pattern % 'Index').decode('string-escape')) - rep = '(\g%s\g<3>' % index + index = " ".join(["0" for s in im.shape]) + p = re.compile((self._pattern % "Index").decode("string-escape")) + rep = "(\g%s\g<3>" % index contents = p.sub(rep, contents) - spacing = ' '.join(["%0.4f" % f for f in im.header.get_zooms()]) - p = re.compile((self._pattern % 'Spacing').decode('string-escape')) - rep = '(\g%s\g<3>' % spacing + spacing = " ".join(["%0.4f" % f for f in im.header.get_zooms()]) + p = re.compile((self._pattern % "Spacing").decode("string-escape")) + rep = "(\g%s\g<3>" % spacing contents = p.sub(rep, contents) itkmat = np.eye(4) @@ -133,37 +148,36 @@ def _run_interface(self, runtime): itkmat[1, 1] = -1 affine = np.dot(itkmat, im.affine) - dirs = ' '.join( - ['%0.4f' % f for f in affine[0:3, 0:3].reshape(-1)]) - orig = ' '.join(['%0.4f' % f for f in affine[0:3, 3].reshape(-1)]) + dirs = " ".join(["%0.4f" % f for f in affine[0:3, 0:3].reshape(-1)]) + orig = " ".join(["%0.4f" % f for f in affine[0:3, 3].reshape(-1)]) # p = re.compile((self._pattern % 'Direction').decode('string-escape')) # rep = '(\g%s\g<3>' % dirs # contents = p.sub(rep, contents) - p = re.compile((self._pattern % 'Origin').decode('string-escape')) - rep = '(\g%s\g<3>' % orig + p = re.compile((self._pattern % "Origin").decode("string-escape")) + rep = "(\g%s\g<3>" % orig contents = p.sub(rep, contents) - with open(self._get_outfile(), 'w') as of: + with open(self._get_outfile(), "w") as of: of.write(contents) return runtime def _list_outputs(self): outputs = self.output_spec().get() - outputs['output_file'] = getattr(self, '_out_file') + outputs["output_file"] = getattr(self, "_out_file") return outputs def _get_outfile(self): - val = getattr(self, '_out_file') - if val is not None and val != '': + val = getattr(self, "_out_file") + if val is not None and val != "": return val if isdefined(self.inputs.output_file): - setattr(self, '_out_file', self.inputs.output_file) + setattr(self, "_out_file", self.inputs.output_file) return self.inputs.output_file out_file = op.abspath(op.basename(self.inputs.transform_file)) - setattr(self, '_out_file', out_file) + setattr(self, "_out_file", out_file) return out_file diff --git a/nipype/interfaces/freesurfer/__init__.py b/nipype/interfaces/freesurfer/__init__.py index 44c939706e..803ac571cb 100644 --- a/nipype/interfaces/freesurfer/__init__.py +++ b/nipype/interfaces/freesurfer/__init__.py @@ -5,25 +5,91 @@ from .base import Info, FSCommand, no_freesurfer from .preprocess import ( - ParseDICOMDir, UnpackSDICOMDir, MRIConvert, Resample, ReconAll, BBRegister, - ApplyVolTransform, Smooth, DICOMConvert, RobustRegister, FitMSParams, - SynthesizeFLASH, MNIBiasCorrection, WatershedSkullStrip, Normalize, - CANormalize, CARegister, CALabel, MRIsCALabel, SegmentCC, SegmentWM, - EditWMwithAseg, ConcatenateLTA) -from .model import (MRISPreproc, MRISPreprocReconAll, GLMFit, OneSampleTTest, - Binarize, Concatenate, SegStats, SegStatsReconAll, - Label2Vol, MS_LDA, Label2Label, Label2Annot, - SphericalAverage) + ParseDICOMDir, + UnpackSDICOMDir, + MRIConvert, + Resample, + ReconAll, + BBRegister, + ApplyVolTransform, + Smooth, + DICOMConvert, + RobustRegister, + FitMSParams, + SynthesizeFLASH, + MNIBiasCorrection, + WatershedSkullStrip, + Normalize, + CANormalize, + CARegister, + CALabel, + MRIsCALabel, + SegmentCC, + SegmentWM, + EditWMwithAseg, + ConcatenateLTA, +) +from .model import ( + MRISPreproc, + MRISPreprocReconAll, + GLMFit, + OneSampleTTest, + Binarize, + Concatenate, + SegStats, + SegStatsReconAll, + Label2Vol, + MS_LDA, + Label2Label, + Label2Annot, + SphericalAverage, +) from .utils import ( - SampleToSurface, SurfaceSmooth, SurfaceTransform, Surface2VolTransform, - SurfaceSnapshots, ApplyMask, MRIsConvert, MRITessellate, MRIPretess, - MRIMarchingCubes, SmoothTessellation, MakeAverageSubject, - ExtractMainComponent, Tkregister2, AddXFormToHeader, - CheckTalairachAlignment, TalairachAVI, TalairachQC, RemoveNeck, MRIFill, - MRIsInflate, Sphere, FixTopology, EulerNumber, RemoveIntersection, - MakeSurfaces, Curvature, CurvatureStats, Jacobian, MRIsCalc, VolumeMask, - ParcellationStats, Contrast, RelabelHypointensities, Aparc2Aseg, Apas2Aseg, - MRIsExpand, MRIsCombine) -from .longitudinal import (RobustTemplate, FuseSegmentations) -from .registration import (MPRtoMNI305, RegisterAVItoTalairach, EMRegister, - Register, Paint, MRICoreg) + SampleToSurface, + SurfaceSmooth, + SurfaceTransform, + Surface2VolTransform, + SurfaceSnapshots, + ApplyMask, + MRIsConvert, + MRITessellate, + MRIPretess, + MRIMarchingCubes, + SmoothTessellation, + MakeAverageSubject, + ExtractMainComponent, + Tkregister2, + AddXFormToHeader, + CheckTalairachAlignment, + TalairachAVI, + TalairachQC, + RemoveNeck, + MRIFill, + MRIsInflate, + Sphere, + FixTopology, + EulerNumber, + RemoveIntersection, + MakeSurfaces, + Curvature, + CurvatureStats, + Jacobian, + MRIsCalc, + VolumeMask, + ParcellationStats, + Contrast, + RelabelHypointensities, + Aparc2Aseg, + Apas2Aseg, + MRIsExpand, + MRIsCombine, +) +from .longitudinal import RobustTemplate, FuseSegmentations +from .registration import ( + MPRtoMNI305, + RegisterAVItoTalairach, + EMRegister, + Register, + Paint, + MRICoreg, +) diff --git a/nipype/interfaces/freesurfer/base.py b/nipype/interfaces/freesurfer/base.py index 3e82624cce..66023386da 100644 --- a/nipype/interfaces/freesurfer/base.py +++ b/nipype/interfaces/freesurfer/base.py @@ -18,10 +18,18 @@ from ... import LooseVersion from ...utils.filemanip import fname_presuffix -from ..base import (CommandLine, Directory, CommandLineInputSpec, isdefined, - traits, TraitedSpec, File, PackageInfo) +from ..base import ( + CommandLine, + Directory, + CommandLineInputSpec, + isdefined, + traits, + TraitedSpec, + File, + PackageInfo, +) -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" class Info(PackageInfo): @@ -35,9 +43,9 @@ class Info(PackageInfo): >>> Info.subjectsdir() # doctest: +SKIP """ - if os.getenv('FREESURFER_HOME'): - version_file = os.path.join( - os.getenv('FREESURFER_HOME'), 'build-stamp.txt') + + if os.getenv("FREESURFER_HOME"): + version_file = os.path.join(os.getenv("FREESURFER_HOME"), "build-stamp.txt") @staticmethod def parse_version(raw_info): @@ -51,30 +59,30 @@ def looseversion(cls): """ ver = cls.version() if ver is None: - return LooseVersion('0.0.0') + return LooseVersion("0.0.0") - vinfo = ver.rstrip().split('-') + vinfo = ver.rstrip().split("-") try: int(vinfo[-1], 16) except ValueError: - githash = '' + githash = "" else: - githash = '.' + vinfo[-1] + githash = "." + vinfo[-1] # As of FreeSurfer v6.0.0, the final component is a githash if githash: - if vinfo[3] == 'dev': + if vinfo[3] == "dev": # This will need updating when v6.0.1 comes out - vstr = '6.0.0-dev' + githash - elif vinfo[5][0] == 'v': + vstr = "6.0.0-dev" + githash + elif vinfo[5][0] == "v": vstr = vinfo[5][1:] else: - raise RuntimeError('Unknown version string: ' + ver) + raise RuntimeError("Unknown version string: " + ver) # Retain pre-6.0.0 heuristics - elif 'dev' in ver: - vstr = vinfo[-1] + '-dev' + elif "dev" in ver: + vstr = vinfo[-1] + "-dev" else: - vstr = ver.rstrip().split('-v')[-1] + vstr = ver.rstrip().split("-v")[-1] return LooseVersion(vstr) @@ -96,12 +104,12 @@ def subjectsdir(cls): """ if cls.version(): - return os.environ['SUBJECTS_DIR'] + return os.environ["SUBJECTS_DIR"] return None class FSTraitedSpec(CommandLineInputSpec): - subjects_dir = Directory(exists=True, desc='subjects directory') + subjects_dir = Directory(exists=True, desc="subjects directory") class FSCommand(CommandLine): @@ -116,7 +124,7 @@ class FSCommand(CommandLine): def __init__(self, **inputs): super(FSCommand, self).__init__(**inputs) - self.inputs.on_trait_change(self._subjects_dir_update, 'subjects_dir') + self.inputs.on_trait_change(self._subjects_dir_update, "subjects_dir") if not self._subjects_dir: self._subjects_dir = Info.subjectsdir() if not isdefined(self.inputs.subjects_dir) and self._subjects_dir: @@ -125,27 +133,20 @@ def __init__(self, **inputs): def _subjects_dir_update(self): if self.inputs.subjects_dir: - self.inputs.environ.update({ - 'SUBJECTS_DIR': self.inputs.subjects_dir - }) + self.inputs.environ.update({"SUBJECTS_DIR": self.inputs.subjects_dir}) @classmethod def set_default_subjects_dir(cls, subjects_dir): cls._subjects_dir = subjects_dir def run(self, **inputs): - if 'subjects_dir' in inputs: - self.inputs.subjects_dir = inputs['subjects_dir'] + if "subjects_dir" in inputs: + self.inputs.subjects_dir = inputs["subjects_dir"] self._subjects_dir_update() return super(FSCommand, self).run(**inputs) - def _gen_fname(self, - basename, - fname=None, - cwd=None, - suffix='_fs', - use_ext=True): - '''Define a generic mapping for a single outfile + def _gen_fname(self, basename, fname=None, cwd=None, suffix="_fs", use_ext=True): + """Define a generic mapping for a single outfile The filename is potentially autogenerated by suffixing inputs.infile @@ -159,15 +160,14 @@ def _gen_fname(self, prefix paths with cwd, otherwise os.getcwd() suffix : string default suffix - ''' - if basename == '': - msg = 'Unable to generate filename for command %s. ' % self.cmd - msg += 'basename is not set!' + """ + if basename == "": + msg = "Unable to generate filename for command %s. " % self.cmd + msg += "basename is not set!" raise ValueError(msg) if cwd is None: cwd = os.getcwd() - fname = fname_presuffix( - basename, suffix=suffix, use_ext=use_ext, newpath=cwd) + fname = fname_presuffix(basename, suffix=suffix, use_ext=use_ext, newpath=cwd) return fname @property @@ -196,9 +196,9 @@ def _associated_file(in_file, out_name): inspecting the surface data structure. """ path, base = os.path.split(out_name) - if path == '': + if path == "": path, in_file = os.path.split(in_file) - hemis = ('lh.', 'rh.') + hemis = ("lh.", "rh.") if in_file[:3] in hemis and base[:3] not in hemis: base = in_file[:3] + base return os.path.join(path, base) @@ -207,22 +207,24 @@ def _associated_file(in_file, out_name): class FSScriptCommand(FSCommand): """ Support for Freesurfer script commands with log terminal_output """ - _terminal_output = 'file' + + _terminal_output = "file" _always_run = False def _list_outputs(self): outputs = self._outputs().get() - outputs['log_file'] = os.path.abspath('output.nipype') + outputs["log_file"] = os.path.abspath("output.nipype") return outputs class FSScriptOutputSpec(TraitedSpec): log_file = File( - 'output.nipype', usedefault=True, exists=True, desc="The output log") + "output.nipype", usedefault=True, exists=True, desc="The output log" + ) class FSTraitedSpecOpenMP(FSTraitedSpec): - num_threads = traits.Int(desc='allows for specifying more threads') + num_threads = traits.Int(desc="allows for specifying more threads") class FSCommandOpenMP(FSCommand): @@ -238,25 +240,24 @@ class FSCommandOpenMP(FSCommand): def __init__(self, **inputs): super(FSCommandOpenMP, self).__init__(**inputs) - self.inputs.on_trait_change(self._num_threads_update, 'num_threads') + self.inputs.on_trait_change(self._num_threads_update, "num_threads") if not self._num_threads: - self._num_threads = os.environ.get('OMP_NUM_THREADS', None) + self._num_threads = os.environ.get("OMP_NUM_THREADS", None) if not self._num_threads: - self._num_threads = os.environ.get('NSLOTS', None) + self._num_threads = os.environ.get("NSLOTS", None) if not isdefined(self.inputs.num_threads) and self._num_threads: self.inputs.num_threads = int(self._num_threads) self._num_threads_update() def _num_threads_update(self): if self.inputs.num_threads: - self.inputs.environ.update({ - 'OMP_NUM_THREADS': - str(self.inputs.num_threads) - }) + self.inputs.environ.update( + {"OMP_NUM_THREADS": str(self.inputs.num_threads)} + ) def run(self, **inputs): - if 'num_threads' in inputs: - self.inputs.num_threads = inputs['num_threads'] + if "num_threads" in inputs: + self.inputs.num_threads = inputs["num_threads"] self._num_threads_update() return super(FSCommandOpenMP, self).run(**inputs) diff --git a/nipype/interfaces/freesurfer/longitudinal.py b/nipype/interfaces/freesurfer/longitudinal.py index eed32173b4..aa5f928550 100644 --- a/nipype/interfaces/freesurfer/longitudinal.py +++ b/nipype/interfaces/freesurfer/longitudinal.py @@ -7,13 +7,11 @@ import os from ... import logging -from ..base import (TraitedSpec, File, traits, InputMultiPath, OutputMultiPath, - isdefined) -from .base import (FSCommand, FSTraitedSpec, FSCommandOpenMP, - FSTraitedSpecOpenMP) +from ..base import TraitedSpec, File, traits, InputMultiPath, OutputMultiPath, isdefined +from .base import FSCommand, FSTraitedSpec, FSCommandOpenMP, FSTraitedSpecOpenMP -__docformat__ = 'restructuredtext' -iflogger = logging.getLogger('nipype.interface') +__docformat__ = "restructuredtext" +iflogger = logging.getLogger("nipype.interface") class RobustTemplateInputSpec(FSTraitedSpecOpenMP): @@ -21,78 +19,92 @@ class RobustTemplateInputSpec(FSTraitedSpecOpenMP): in_files = InputMultiPath( File(exists=True), mandatory=True, - argstr='--mov %s', - desc='input movable volumes to be aligned to common mean/median ' - 'template') + argstr="--mov %s", + desc="input movable volumes to be aligned to common mean/median " "template", + ) out_file = File( - 'mri_robust_template_out.mgz', + "mri_robust_template_out.mgz", mandatory=True, usedefault=True, - argstr='--template %s', - desc='output template volume (final mean/median image)') + argstr="--template %s", + desc="output template volume (final mean/median image)", + ) auto_detect_sensitivity = traits.Bool( - argstr='--satit', - xor=['outlier_sensitivity'], + argstr="--satit", + xor=["outlier_sensitivity"], mandatory=True, - desc='auto-detect good sensitivity (recommended for head or full ' - 'brain scans)') + desc="auto-detect good sensitivity (recommended for head or full " + "brain scans)", + ) outlier_sensitivity = traits.Float( - argstr='--sat %.4f', - xor=['auto_detect_sensitivity'], + argstr="--sat %.4f", + xor=["auto_detect_sensitivity"], mandatory=True, desc='set outlier sensitivity manually (e.g. "--sat 4.685" ). Higher ' - 'values mean less sensitivity.') + "values mean less sensitivity.", + ) # optional transform_outputs = traits.Either( InputMultiPath(File(exists=False)), traits.Bool, - argstr='--lta %s', - desc='output xforms to template (for each input)') + argstr="--lta %s", + desc="output xforms to template (for each input)", + ) intensity_scaling = traits.Bool( default_value=False, - argstr='--iscale', - desc='allow also intensity scaling (default off)') + argstr="--iscale", + desc="allow also intensity scaling (default off)", + ) scaled_intensity_outputs = traits.Either( InputMultiPath(File(exists=False)), traits.Bool, - argstr='--iscaleout %s', - desc='final intensity scales (will activate --iscale)') + argstr="--iscaleout %s", + desc="final intensity scales (will activate --iscale)", + ) subsample_threshold = traits.Int( - argstr='--subsample %d', - desc='subsample if dim > # on all axes (default no subs.)') + argstr="--subsample %d", + desc="subsample if dim > # on all axes (default no subs.)", + ) average_metric = traits.Enum( - 'median', - 'mean', - argstr='--average %d', - desc='construct template from: 0 Mean, 1 Median (default)') + "median", + "mean", + argstr="--average %d", + desc="construct template from: 0 Mean, 1 Median (default)", + ) initial_timepoint = traits.Int( - argstr='--inittp %d', - desc='use TP# for spacial init (default random), 0: no init') + argstr="--inittp %d", + desc="use TP# for spacial init (default random), 0: no init", + ) fixed_timepoint = traits.Bool( default_value=False, - argstr='--fixtp', - desc='map everthing to init TP# (init TP is not resampled)') + argstr="--fixtp", + desc="map everthing to init TP# (init TP is not resampled)", + ) no_iteration = traits.Bool( default_value=False, - argstr='--noit', - desc='do not iterate, just create first template') + argstr="--noit", + desc="do not iterate, just create first template", + ) initial_transforms = InputMultiPath( File(exists=True), - argstr='--ixforms %s', - desc='use initial transforms (lta) on source') + argstr="--ixforms %s", + desc="use initial transforms (lta) on source", + ) in_intensity_scales = InputMultiPath( - File(exists=True), - argstr='--iscalein %s', - desc='use initial intensity scales') + File(exists=True), argstr="--iscalein %s", desc="use initial intensity scales" + ) class RobustTemplateOutputSpec(TraitedSpec): out_file = File( - exists=True, desc='output template volume (final mean/median image)') + exists=True, desc="output template volume (final mean/median image)" + ) transform_outputs = OutputMultiPath( - File(exists=True), desc="output xform files from moving to template") + File(exists=True), desc="output xform files from moving to template" + ) scaled_intensity_outputs = OutputMultiPath( - File(exists=True), desc="output final intensity scales") + File(exists=True), desc="output final intensity scales" + ) class RobustTemplate(FSCommandOpenMP): @@ -135,76 +147,73 @@ class RobustTemplate(FSCommandOpenMP): """ - _cmd = 'mri_robust_template' + _cmd = "mri_robust_template" input_spec = RobustTemplateInputSpec output_spec = RobustTemplateOutputSpec def _format_arg(self, name, spec, value): - if name == 'average_metric': + if name == "average_metric": # return enumeration value return spec.argstr % {"mean": 0, "median": 1}[value] - if name in ('transform_outputs', 'scaled_intensity_outputs'): + if name in ("transform_outputs", "scaled_intensity_outputs"): value = self._list_outputs()[name] return super(RobustTemplate, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) n_files = len(self.inputs.in_files) - fmt = '{}{:02d}.{}' if n_files > 9 else '{}{:d}.{}' + fmt = "{}{:02d}.{}" if n_files > 9 else "{}{:d}.{}" if isdefined(self.inputs.transform_outputs): fnames = self.inputs.transform_outputs if fnames is True: - fnames = [ - fmt.format('tp', i + 1, 'lta') for i in range(n_files) - ] - outputs['transform_outputs'] = [os.path.abspath(x) for x in fnames] + fnames = [fmt.format("tp", i + 1, "lta") for i in range(n_files)] + outputs["transform_outputs"] = [os.path.abspath(x) for x in fnames] if isdefined(self.inputs.scaled_intensity_outputs): fnames = self.inputs.scaled_intensity_outputs if fnames is True: - fnames = [ - fmt.format('is', i + 1, 'txt') for i in range(n_files) - ] - outputs['scaled_intensity_outputs'] = [ - os.path.abspath(x) for x in fnames - ] + fnames = [fmt.format("is", i + 1, "txt") for i in range(n_files)] + outputs["scaled_intensity_outputs"] = [os.path.abspath(x) for x in fnames] return outputs class FuseSegmentationsInputSpec(FSTraitedSpec): # required subject_id = traits.String( - argstr='%s', position=-3, desc="subject_id being processed") + argstr="%s", position=-3, desc="subject_id being processed" + ) timepoints = InputMultiPath( traits.String(), mandatory=True, - argstr='%s', + argstr="%s", position=-2, - desc='subject_ids or timepoints to be processed') + desc="subject_ids or timepoints to be processed", + ) out_file = File( - exists=False, - mandatory=True, - position=-1, - desc="output fused segmentation file") + exists=False, mandatory=True, position=-1, desc="output fused segmentation file" + ) in_segmentations = InputMultiPath( File(exists=True), argstr="-a %s", mandatory=True, desc="name of aseg file to use (default: aseg.mgz) \ - must include the aseg files for all the given timepoints") + must include the aseg files for all the given timepoints", + ) in_segmentations_noCC = InputMultiPath( File(exists=True), argstr="-c %s", mandatory=True, desc="name of aseg file w/o CC labels (default: aseg.auto_noCCseg.mgz) \ - must include the corresponding file for all the given timepoints") + must include the corresponding file for all the given timepoints", + ) in_norms = InputMultiPath( File(exists=True), argstr="-n %s", mandatory=True, desc="-n - name of norm file to use (default: norm.mgs) \ must include the corresponding norm file for all given timepoints \ - as well as for the current subject") + as well as for the current subject", + ) class FuseSegmentationsOutputSpec(TraitedSpec): @@ -228,17 +237,17 @@ class FuseSegmentations(FSCommand): 'mri_fuse_segmentations -n norm.mgz -a aseg.mgz -c aseg.mgz tp.long.A.template tp1 tp2' """ - _cmd = 'mri_fuse_segmentations' + _cmd = "mri_fuse_segmentations" input_spec = FuseSegmentationsInputSpec output_spec = FuseSegmentationsOutputSpec def _format_arg(self, name, spec, value): - if name in ('in_segmentations', 'in_segmentations_noCC', 'in_norms'): + if name in ("in_segmentations", "in_segmentations_noCC", "in_norms"): # return enumeration value return spec.argstr % os.path.basename(value[0]) return super(FuseSegmentations, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs diff --git a/nipype/interfaces/freesurfer/model.py b/nipype/interfaces/freesurfer/model.py index 526f0a9919..8bf7918ae4 100644 --- a/nipype/interfaces/freesurfer/model.py +++ b/nipype/interfaces/freesurfer/model.py @@ -8,84 +8,106 @@ import os from ...utils.filemanip import fname_presuffix, split_filename -from ..base import (TraitedSpec, File, traits, InputMultiPath, OutputMultiPath, - Directory, isdefined) +from ..base import ( + TraitedSpec, + File, + traits, + InputMultiPath, + OutputMultiPath, + Directory, + isdefined, +) from .base import FSCommand, FSTraitedSpec from .utils import copy2subjdir -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" class MRISPreprocInputSpec(FSTraitedSpec): - out_file = File(argstr='--out %s', genfile=True, desc='output filename') + out_file = File(argstr="--out %s", genfile=True, desc="output filename") target = traits.Str( - argstr='--target %s', mandatory=True, desc='target subject name') + argstr="--target %s", mandatory=True, desc="target subject name" + ) hemi = traits.Enum( - 'lh', - 'rh', - argstr='--hemi %s', + "lh", + "rh", + argstr="--hemi %s", mandatory=True, - desc='hemisphere for source and target') + desc="hemisphere for source and target", + ) surf_measure = traits.Str( - argstr='--meas %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), - desc='Use subject/surf/hemi.surf_measure as input') + argstr="--meas %s", + xor=("surf_measure", "surf_measure_file", "surf_area"), + desc="Use subject/surf/hemi.surf_measure as input", + ) surf_area = traits.Str( - argstr='--area %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), - desc= - 'Extract vertex area from subject/surf/hemi.surfname to use as input.') + argstr="--area %s", + xor=("surf_measure", "surf_measure_file", "surf_area"), + desc="Extract vertex area from subject/surf/hemi.surfname to use as input.", + ) subjects = traits.List( - argstr='--s %s...', - xor=('subjects', 'fsgd_file', 'subject_file'), - desc='subjects from who measures are calculated') + argstr="--s %s...", + xor=("subjects", "fsgd_file", "subject_file"), + desc="subjects from who measures are calculated", + ) fsgd_file = File( exists=True, - argstr='--fsgd %s', - xor=('subjects', 'fsgd_file', 'subject_file'), - desc='specify subjects using fsgd file') + argstr="--fsgd %s", + xor=("subjects", "fsgd_file", "subject_file"), + desc="specify subjects using fsgd file", + ) subject_file = File( exists=True, - argstr='--f %s', - xor=('subjects', 'fsgd_file', 'subject_file'), - desc='file specifying subjects separated by white space') + argstr="--f %s", + xor=("subjects", "fsgd_file", "subject_file"), + desc="file specifying subjects separated by white space", + ) surf_measure_file = InputMultiPath( File(exists=True), - argstr='--is %s...', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), - desc='file alternative to surfmeas, still requires list of subjects') - source_format = traits.Str(argstr='--srcfmt %s', desc='source format') + argstr="--is %s...", + xor=("surf_measure", "surf_measure_file", "surf_area"), + desc="file alternative to surfmeas, still requires list of subjects", + ) + source_format = traits.Str(argstr="--srcfmt %s", desc="source format") surf_dir = traits.Str( - argstr='--surfdir %s', desc='alternative directory (instead of surf)') + argstr="--surfdir %s", desc="alternative directory (instead of surf)" + ) vol_measure_file = InputMultiPath( traits.Tuple(File(exists=True), File(exists=True)), - argstr='--iv %s %s...', - desc='list of volume measure and reg file tuples') + argstr="--iv %s %s...", + desc="list of volume measure and reg file tuples", + ) proj_frac = traits.Float( - argstr='--projfrac %s', desc='projection fraction for vol2surf') + argstr="--projfrac %s", desc="projection fraction for vol2surf" + ) fwhm = traits.Float( - argstr='--fwhm %f', - xor=['num_iters'], - desc='smooth by fwhm mm on the target surface') + argstr="--fwhm %f", + xor=["num_iters"], + desc="smooth by fwhm mm on the target surface", + ) num_iters = traits.Int( - argstr='--niters %d', - xor=['fwhm'], - desc='niters : smooth by niters on the target surface') + argstr="--niters %d", + xor=["fwhm"], + desc="niters : smooth by niters on the target surface", + ) fwhm_source = traits.Float( - argstr='--fwhm-src %f', - xor=['num_iters_source'], - desc='smooth by fwhm mm on the source surface') + argstr="--fwhm-src %f", + xor=["num_iters_source"], + desc="smooth by fwhm mm on the source surface", + ) num_iters_source = traits.Int( - argstr='--niterssrc %d', - xor=['fwhm_source'], - desc='niters : smooth by niters on the source surface') + argstr="--niterssrc %d", + xor=["fwhm_source"], + desc="niters : smooth by niters on the source surface", + ) smooth_cortex_only = traits.Bool( - argstr='--smooth-cortex-only', - desc='only smooth cortex (ie, exclude medial wall)') + argstr="--smooth-cortex-only", + desc="only smooth cortex (ie, exclude medial wall)", + ) class MRISPreprocOutputSpec(TraitedSpec): - out_file = File(desc='preprocessed output file') + out_file = File(desc="preprocessed output file") class MRISPreproc(FSCommand): @@ -106,22 +128,22 @@ class MRISPreproc(FSCommand): """ - _cmd = 'mris_preproc' + _cmd = "mris_preproc" input_spec = MRISPreprocInputSpec output_spec = MRISPreprocOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outfile = self.inputs.out_file - outputs['out_file'] = outfile + outputs["out_file"] = outfile if not isdefined(outfile): - outputs['out_file'] = os.path.join( - os.getcwd(), 'concat_%s_%s.mgz' % (self.inputs.hemi, - self.inputs.target)) + outputs["out_file"] = os.path.join( + os.getcwd(), "concat_%s_%s.mgz" % (self.inputs.hemi, self.inputs.target) + ) return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._list_outputs()[name] return None @@ -129,29 +151,34 @@ def _gen_filename(self, name): class MRISPreprocReconAllInputSpec(MRISPreprocInputSpec): surf_measure_file = File( exists=True, - argstr='--meas %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), - desc='file necessary for surfmeas') + argstr="--meas %s", + xor=("surf_measure", "surf_measure_file", "surf_area"), + desc="file necessary for surfmeas", + ) surfreg_files = InputMultiPath( File(exists=True), argstr="--surfreg %s", - requires=['lh_surfreg_target', 'rh_surfreg_target'], - desc="lh and rh input surface registration files") + requires=["lh_surfreg_target", "rh_surfreg_target"], + desc="lh and rh input surface registration files", + ) lh_surfreg_target = File( - desc="Implicit target surface registration file", - requires=['surfreg_files']) + desc="Implicit target surface registration file", requires=["surfreg_files"] + ) rh_surfreg_target = File( - desc="Implicit target surface registration file", - requires=['surfreg_files']) + desc="Implicit target surface registration file", requires=["surfreg_files"] + ) subject_id = traits.String( - 'subject_id', - argstr='--s %s', + "subject_id", + argstr="--s %s", usedefault=True, - xor=('subjects', 'fsgd_file', 'subject_file', 'subject_id'), - desc='subject from whom measures are calculated') + xor=("subjects", "fsgd_file", "subject_file", "subject_id"), + desc="subject from whom measures are calculated", + ) copy_inputs = traits.Bool( - desc="If running as a node, set this to True " + - "this will copy some implicit inputs to the " + "node directory.") + desc="If running as a node, set this to True " + + "this will copy some implicit inputs to the " + + "node directory." + ) class MRISPreprocReconAll(MRISPreproc): @@ -174,30 +201,32 @@ class MRISPreprocReconAll(MRISPreproc): def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir if isdefined(self.inputs.surf_dir): folder = self.inputs.surf_dir else: - folder = 'surf' + folder = "surf" if isdefined(self.inputs.surfreg_files): for surfreg in self.inputs.surfreg_files: basename = os.path.basename(surfreg) copy2subjdir(self, surfreg, folder, basename) - if basename.startswith('lh.'): + if basename.startswith("lh."): copy2subjdir( self, self.inputs.lh_surfreg_target, folder, basename, - subject_id=self.inputs.target) + subject_id=self.inputs.target, + ) else: copy2subjdir( self, self.inputs.rh_surfreg_target, folder, basename, - subject_id=self.inputs.target) + subject_id=self.inputs.target, + ) if isdefined(self.inputs.surf_measure_file): copy2subjdir(self, self.inputs.surf_measure_file, folder) @@ -206,173 +235,194 @@ def run(self, **inputs): def _format_arg(self, name, spec, value): # mris_preproc looks for these files in the surf dir - if name == 'surfreg_files': + if name == "surfreg_files": basename = os.path.basename(value[0]) - return spec.argstr % basename.lstrip('rh.').lstrip('lh.') + return spec.argstr % basename.lstrip("rh.").lstrip("lh.") if name == "surf_measure_file": basename = os.path.basename(value) - return spec.argstr % basename.lstrip('rh.').lstrip('lh.') + return spec.argstr % basename.lstrip("rh.").lstrip("lh.") return super(MRISPreprocReconAll, self)._format_arg(name, spec, value) class GLMFitInputSpec(FSTraitedSpec): - glm_dir = traits.Str( - argstr='--glmdir %s', desc='save outputs to dir', genfile=True) + glm_dir = traits.Str(argstr="--glmdir %s", desc="save outputs to dir", genfile=True) in_file = File( - desc='input 4D file', argstr='--y %s', mandatory=True, copyfile=False) - _design_xor = ('fsgd', 'design', 'one_sample') + desc="input 4D file", argstr="--y %s", mandatory=True, copyfile=False + ) + _design_xor = ("fsgd", "design", "one_sample") fsgd = traits.Tuple( File(exists=True), - traits.Enum('doss', 'dods'), - argstr='--fsgd %s %s', + traits.Enum("doss", "dods"), + argstr="--fsgd %s %s", xor=_design_xor, - desc='freesurfer descriptor file') + desc="freesurfer descriptor file", + ) design = File( - exists=True, - argstr='--X %s', - xor=_design_xor, - desc='design matrix file') + exists=True, argstr="--X %s", xor=_design_xor, desc="design matrix file" + ) contrast = InputMultiPath( - File(exists=True), argstr='--C %s...', desc='contrast file') + File(exists=True), argstr="--C %s...", desc="contrast file" + ) one_sample = traits.Bool( - argstr='--osgm', - xor=('one_sample', 'fsgd', 'design', 'contrast'), - desc='construct X and C as a one-sample group mean') + argstr="--osgm", + xor=("one_sample", "fsgd", "design", "contrast"), + desc="construct X and C as a one-sample group mean", + ) no_contrast_ok = traits.Bool( - argstr='--no-contrasts-ok', - desc='do not fail if no contrasts specified') + argstr="--no-contrasts-ok", desc="do not fail if no contrasts specified" + ) per_voxel_reg = InputMultiPath( - File(exists=True), argstr='--pvr %s...', desc='per-voxel regressors') + File(exists=True), argstr="--pvr %s...", desc="per-voxel regressors" + ) self_reg = traits.Tuple( traits.Int, traits.Int, traits.Int, - argstr='--selfreg %d %d %d', - desc='self-regressor from index col row slice') + argstr="--selfreg %d %d %d", + desc="self-regressor from index col row slice", + ) weighted_ls = File( exists=True, - argstr='--wls %s', - xor=('weight_file', 'weight_inv', 'weight_sqrt'), - desc='weighted least squares') + argstr="--wls %s", + xor=("weight_file", "weight_inv", "weight_sqrt"), + desc="weighted least squares", + ) fixed_fx_var = File( - exists=True, argstr='--yffxvar %s', desc='for fixed effects analysis') + exists=True, argstr="--yffxvar %s", desc="for fixed effects analysis" + ) fixed_fx_dof = traits.Int( - argstr='--ffxdof %d', - xor=['fixed_fx_dof_file'], - desc='dof for fixed effects analysis') + argstr="--ffxdof %d", + xor=["fixed_fx_dof_file"], + desc="dof for fixed effects analysis", + ) fixed_fx_dof_file = File( - argstr='--ffxdofdat %d', - xor=['fixed_fx_dof'], - desc='text file with dof for fixed effects analysis') + argstr="--ffxdofdat %d", + xor=["fixed_fx_dof"], + desc="text file with dof for fixed effects analysis", + ) weight_file = File( - exists=True, - xor=['weighted_ls'], - desc='weight for each input at each voxel') + exists=True, xor=["weighted_ls"], desc="weight for each input at each voxel" + ) weight_inv = traits.Bool( - argstr='--w-inv', desc='invert weights', xor=['weighted_ls']) + argstr="--w-inv", desc="invert weights", xor=["weighted_ls"] + ) weight_sqrt = traits.Bool( - argstr='--w-sqrt', desc='sqrt of weights', xor=['weighted_ls']) - fwhm = traits.Range( - low=0.0, argstr='--fwhm %f', desc='smooth input by fwhm') + argstr="--w-sqrt", desc="sqrt of weights", xor=["weighted_ls"] + ) + fwhm = traits.Range(low=0.0, argstr="--fwhm %f", desc="smooth input by fwhm") var_fwhm = traits.Range( - low=0.0, argstr='--var-fwhm %f', desc='smooth variance by fwhm') + low=0.0, argstr="--var-fwhm %f", desc="smooth variance by fwhm" + ) no_mask_smooth = traits.Bool( - argstr='--no-mask-smooth', desc='do not mask when smoothing') + argstr="--no-mask-smooth", desc="do not mask when smoothing" + ) no_est_fwhm = traits.Bool( - argstr='--no-est-fwhm', desc='turn off FWHM output estimation') - mask_file = File(exists=True, argstr='--mask %s', desc='binary mask') + argstr="--no-est-fwhm", desc="turn off FWHM output estimation" + ) + mask_file = File(exists=True, argstr="--mask %s", desc="binary mask") label_file = File( exists=True, - argstr='--label %s', - xor=['cortex'], - desc='use label as mask, surfaces only') + argstr="--label %s", + xor=["cortex"], + desc="use label as mask, surfaces only", + ) cortex = traits.Bool( - argstr='--cortex', - xor=['label_file'], - desc='use subjects ?h.cortex.label as label') - invert_mask = traits.Bool(argstr='--mask-inv', desc='invert mask') + argstr="--cortex", + xor=["label_file"], + desc="use subjects ?h.cortex.label as label", + ) + invert_mask = traits.Bool(argstr="--mask-inv", desc="invert mask") prune = traits.Bool( - argstr='--prune', - desc= - 'remove voxels that do not have a non-zero value at each frame (def)') + argstr="--prune", + desc="remove voxels that do not have a non-zero value at each frame (def)", + ) no_prune = traits.Bool( - argstr='--no-prune', xor=['prunethresh'], desc='do not prune') + argstr="--no-prune", xor=["prunethresh"], desc="do not prune" + ) prune_thresh = traits.Float( - argstr='--prune_thr %f', - xor=['noprune'], - desc='prune threshold. Default is FLT_MIN') + argstr="--prune_thr %f", + xor=["noprune"], + desc="prune threshold. Default is FLT_MIN", + ) compute_log_y = traits.Bool( - argstr='--logy', desc='compute natural log of y prior to analysis') + argstr="--logy", desc="compute natural log of y prior to analysis" + ) save_estimate = traits.Bool( - argstr='--yhat-save', desc='save signal estimate (yhat)') - save_residual = traits.Bool( - argstr='--eres-save', desc='save residual error (eres)') + argstr="--yhat-save", desc="save signal estimate (yhat)" + ) + save_residual = traits.Bool(argstr="--eres-save", desc="save residual error (eres)") save_res_corr_mtx = traits.Bool( - argstr='--eres-scm', - desc='save residual error spatial correlation matrix (eres.scm). Big!') + argstr="--eres-scm", + desc="save residual error spatial correlation matrix (eres.scm). Big!", + ) surf = traits.Bool( argstr="--surf %s %s %s", requires=["subject_id", "hemi"], - desc="analysis is on a surface mesh") + desc="analysis is on a surface mesh", + ) subject_id = traits.Str(desc="subject id for surface geometry") hemi = traits.Enum("lh", "rh", desc="surface hemisphere") surf_geo = traits.Str( - "white", - usedefault=True, - desc="surface geometry name (e.g. white, pial)") + "white", usedefault=True, desc="surface geometry name (e.g. white, pial)" + ) simulation = traits.Tuple( - traits.Enum('perm', 'mc-full', 'mc-z'), + traits.Enum("perm", "mc-full", "mc-z"), traits.Int(min=1), traits.Float, traits.Str, - argstr='--sim %s %d %f %s', - desc='nulltype nsim thresh csdbasename') + argstr="--sim %s %d %f %s", + desc="nulltype nsim thresh csdbasename", + ) sim_sign = traits.Enum( - 'abs', 'pos', 'neg', argstr='--sim-sign %s', desc='abs, pos, or neg') + "abs", "pos", "neg", argstr="--sim-sign %s", desc="abs, pos, or neg" + ) uniform = traits.Tuple( traits.Float, traits.Float, - argstr='--uniform %f %f', - desc='use uniform distribution instead of gaussian') - pca = traits.Bool( - argstr='--pca', desc='perform pca/svd analysis on residual') + argstr="--uniform %f %f", + desc="use uniform distribution instead of gaussian", + ) + pca = traits.Bool(argstr="--pca", desc="perform pca/svd analysis on residual") calc_AR1 = traits.Bool( - argstr='--tar1', desc='compute and save temporal AR1 of residual') + argstr="--tar1", desc="compute and save temporal AR1 of residual" + ) save_cond = traits.Bool( - argstr='--save-cond', - desc='flag to save design matrix condition at each voxel') + argstr="--save-cond", desc="flag to save design matrix condition at each voxel" + ) vox_dump = traits.Tuple( traits.Int, traits.Int, traits.Int, - argstr='--voxdump %d %d %d', - desc='dump voxel GLM and exit') - seed = traits.Int(argstr='--seed %d', desc='used for synthesizing noise') - synth = traits.Bool(argstr='--synth', desc='replace input with gaussian') - resynth_test = traits.Int( - argstr='--resynthtest %d', desc='test GLM by resynthsis') - profile = traits.Int(argstr='--profile %d', desc='niters : test speed') + argstr="--voxdump %d %d %d", + desc="dump voxel GLM and exit", + ) + seed = traits.Int(argstr="--seed %d", desc="used for synthesizing noise") + synth = traits.Bool(argstr="--synth", desc="replace input with gaussian") + resynth_test = traits.Int(argstr="--resynthtest %d", desc="test GLM by resynthsis") + profile = traits.Int(argstr="--profile %d", desc="niters : test speed") force_perm = traits.Bool( - argstr='--perm-force', - desc='force perumtation test, even when design matrix is not orthog') - diag = traits.Int(argstr='--diag %d', desc='Gdiag_no : set diagnositc level') + argstr="--perm-force", + desc="force perumtation test, even when design matrix is not orthog", + ) + diag = traits.Int(argstr="--diag %d", desc="Gdiag_no : set diagnositc level") diag_cluster = traits.Bool( - argstr='--diag-cluster', - desc='save sig volume and exit from first sim loop') - debug = traits.Bool(argstr='--debug', desc='turn on debugging') + argstr="--diag-cluster", desc="save sig volume and exit from first sim loop" + ) + debug = traits.Bool(argstr="--debug", desc="turn on debugging") check_opts = traits.Bool( - argstr='--checkopts', - desc="don't run anything, just check options and exit") + argstr="--checkopts", desc="don't run anything, just check options and exit" + ) allow_repeated_subjects = traits.Bool( - argstr='--allowsubjrep', - desc= - 'allow subject names to repeat in the fsgd file (must appear before --fsgd' + argstr="--allowsubjrep", + desc="allow subject names to repeat in the fsgd file (must appear before --fsgd", ) allow_ill_cond = traits.Bool( - argstr='--illcond', desc='allow ill-conditioned design matrices') + argstr="--illcond", desc="allow ill-conditioned design matrices" + ) sim_done_file = File( - argstr='--sim-done %s', desc='create file when simulation finished') + argstr="--sim-done %s", desc="create file when simulation finished" + ) class GLMFitOutputSpec(TraitedSpec): @@ -385,18 +435,13 @@ class GLMFitOutputSpec(TraitedSpec): estimate_file = File(desc="map of the estimated Y values") mask_file = File(desc="map of the mask used in the analysis") fwhm_file = File(desc="text file with estimated smoothness") - dof_file = File( - desc="text file with effective degrees-of-freedom for the analysis") - gamma_file = OutputMultiPath( - desc="map of contrast of regression coefficients") - gamma_var_file = OutputMultiPath( - desc="map of regression contrast variance") + dof_file = File(desc="text file with effective degrees-of-freedom for the analysis") + gamma_file = OutputMultiPath(desc="map of contrast of regression coefficients") + gamma_var_file = OutputMultiPath(desc="map of regression contrast variance") sig_file = OutputMultiPath(desc="map of F-test significance (in -log10p)") ftest_file = OutputMultiPath(desc="map of test statistic values") - spatial_eigenvectors = File( - desc="map of spatial eigenvectors from residual PCA") - frame_eigenvectors = File( - desc="matrix of frame eigenvectors from residual PCA") + spatial_eigenvectors = File(desc="map of spatial eigenvectors from residual PCA") + frame_eigenvectors = File(desc="matrix of frame eigenvectors from residual PCA") singular_values = File(desc="matrix singular values from residual PCA") svd_stats_file = File(desc="text file summarizing the residual PCA") @@ -415,7 +460,7 @@ class GLMFit(FSCommand): """ - _cmd = 'mri_glmfit' + _cmd = "mri_glmfit" input_spec = GLMFitInputSpec output_spec = GLMFitOutputSpec @@ -459,12 +504,8 @@ def _list_outputs(self): contrasts = ["osgm"] # Add in the contrast images - outputs["sig_file"] = [ - os.path.join(glmdir, c, "sig.mgh") for c in contrasts - ] - outputs["ftest_file"] = [ - os.path.join(glmdir, c, "F.mgh") for c in contrasts - ] + outputs["sig_file"] = [os.path.join(glmdir, c, "sig.mgh") for c in contrasts] + outputs["ftest_file"] = [os.path.join(glmdir, c, "F.mgh") for c in contrasts] outputs["gamma_file"] = [ os.path.join(glmdir, c, "gamma.mgh") for c in contrasts ] @@ -483,7 +524,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name == 'glm_dir': + if name == "glm_dir": return os.getcwd() return None @@ -497,76 +538,75 @@ def __init__(self, **kwargs): class BinarizeInputSpec(FSTraitedSpec): in_file = File( exists=True, - argstr='--i %s', + argstr="--i %s", mandatory=True, copyfile=False, - desc='input volume') - min = traits.Float( - argstr='--min %f', xor=['wm_ven_csf'], desc='min thresh') - max = traits.Float( - argstr='--max %f', xor=['wm_ven_csf'], desc='max thresh') - rmin = traits.Float( - argstr='--rmin %f', desc='compute min based on rmin*globalmean') - rmax = traits.Float( - argstr='--rmax %f', desc='compute max based on rmax*globalmean') + desc="input volume", + ) + min = traits.Float(argstr="--min %f", xor=["wm_ven_csf"], desc="min thresh") + max = traits.Float(argstr="--max %f", xor=["wm_ven_csf"], desc="max thresh") + rmin = traits.Float(argstr="--rmin %f", desc="compute min based on rmin*globalmean") + rmax = traits.Float(argstr="--rmax %f", desc="compute max based on rmax*globalmean") match = traits.List( - traits.Int, argstr='--match %d...', desc='match instead of threshold') + traits.Int, argstr="--match %d...", desc="match instead of threshold" + ) wm = traits.Bool( - argstr='--wm', - desc='set match vals to 2 and 41 (aseg for cerebral WM)') + argstr="--wm", desc="set match vals to 2 and 41 (aseg for cerebral WM)" + ) ventricles = traits.Bool( - argstr='--ventricles', - desc='set match vals those for aseg ventricles+choroid (not 4th)') + argstr="--ventricles", + desc="set match vals those for aseg ventricles+choroid (not 4th)", + ) wm_ven_csf = traits.Bool( - argstr='--wm+vcsf', - xor=['min', 'max'], - desc='WM and ventricular CSF, including choroid (not 4th)') - binary_file = File( - argstr='--o %s', genfile=True, desc='binary output volume') - out_type = traits.Enum( - 'nii', 'nii.gz', 'mgz', argstr='', desc='output file type') + argstr="--wm+vcsf", + xor=["min", "max"], + desc="WM and ventricular CSF, including choroid (not 4th)", + ) + binary_file = File(argstr="--o %s", genfile=True, desc="binary output volume") + out_type = traits.Enum("nii", "nii.gz", "mgz", argstr="", desc="output file type") count_file = traits.Either( traits.Bool, File, - argstr='--count %s', - desc='save number of hits in ascii file (hits, ntotvox, pct)') + argstr="--count %s", + desc="save number of hits in ascii file (hits, ntotvox, pct)", + ) bin_val = traits.Int( - argstr='--binval %d', - desc='set vox within thresh to val (default is 1)') + argstr="--binval %d", desc="set vox within thresh to val (default is 1)" + ) bin_val_not = traits.Int( - argstr='--binvalnot %d', - desc='set vox outside range to val (default is 0)') - invert = traits.Bool(argstr='--inv', desc='set binval=0, binvalnot=1') + argstr="--binvalnot %d", desc="set vox outside range to val (default is 0)" + ) + invert = traits.Bool(argstr="--inv", desc="set binval=0, binvalnot=1") frame_no = traits.Int( - argstr='--frame %s', desc='use 0-based frame of input (default is 0)') - merge_file = File( - exists=True, argstr='--merge %s', desc='merge with mergevol') - mask_file = File( - exists=True, argstr='--mask maskvol', desc='must be within mask') - mask_thresh = traits.Float( - argstr='--mask-thresh %f', desc='set thresh for mask') + argstr="--frame %s", desc="use 0-based frame of input (default is 0)" + ) + merge_file = File(exists=True, argstr="--merge %s", desc="merge with mergevol") + mask_file = File(exists=True, argstr="--mask maskvol", desc="must be within mask") + mask_thresh = traits.Float(argstr="--mask-thresh %f", desc="set thresh for mask") abs = traits.Bool( - argstr='--abs', desc='take abs of invol first (ie, make unsigned)') + argstr="--abs", desc="take abs of invol first (ie, make unsigned)" + ) bin_col_num = traits.Bool( - argstr='--bincol', - desc='set binarized voxel value to its column number') - zero_edges = traits.Bool( - argstr='--zero-edges', desc='zero the edge voxels') + argstr="--bincol", desc="set binarized voxel value to its column number" + ) + zero_edges = traits.Bool(argstr="--zero-edges", desc="zero the edge voxels") zero_slice_edge = traits.Bool( - argstr='--zero-slice-edges', desc='zero the edge slice voxels') - dilate = traits.Int( - argstr='--dilate %d', desc='niters: dilate binarization in 3D') + argstr="--zero-slice-edges", desc="zero the edge slice voxels" + ) + dilate = traits.Int(argstr="--dilate %d", desc="niters: dilate binarization in 3D") erode = traits.Int( - argstr='--erode %d', - desc='nerode: erode binarization in 3D (after any dilation)') + argstr="--erode %d", + desc="nerode: erode binarization in 3D (after any dilation)", + ) erode2d = traits.Int( - argstr='--erode2d %d', - desc='nerode2d: erode binarization in 2D (after any 3D erosion)') + argstr="--erode2d %d", + desc="nerode2d: erode binarization in 2D (after any 3D erosion)", + ) class BinarizeOutputSpec(TraitedSpec): - binary_file = File(exists=True, desc='binarized output volume') - count_file = File(desc='ascii file containing number of hits') + binary_file = File(exists=True, desc="binarized output volume") + count_file = File(desc="ascii file containing number of hits") class Binarize(FSCommand): @@ -581,7 +621,7 @@ class Binarize(FSCommand): """ - _cmd = 'mri_binarize' + _cmd = "mri_binarize" input_spec = BinarizeInputSpec output_spec = BinarizeOutputSpec @@ -593,38 +633,41 @@ def _list_outputs(self): outfile = fname_presuffix( self.inputs.in_file, newpath=os.getcwd(), - suffix='.'.join(('_thresh', self.inputs.out_type)), - use_ext=False) + suffix=".".join(("_thresh", self.inputs.out_type)), + use_ext=False, + ) else: outfile = fname_presuffix( - self.inputs.in_file, newpath=os.getcwd(), suffix='_thresh') - outputs['binary_file'] = os.path.abspath(outfile) + self.inputs.in_file, newpath=os.getcwd(), suffix="_thresh" + ) + outputs["binary_file"] = os.path.abspath(outfile) value = self.inputs.count_file if isdefined(value): if isinstance(value, bool): if value: - outputs['count_file'] = fname_presuffix( + outputs["count_file"] = fname_presuffix( self.inputs.in_file, - suffix='_count.txt', + suffix="_count.txt", newpath=os.getcwd(), - use_ext=False) + use_ext=False, + ) else: - outputs['count_file'] = value + outputs["count_file"] = value return outputs def _format_arg(self, name, spec, value): - if name == 'count_file': + if name == "count_file": if isinstance(value, bool): fname = self._list_outputs()[name] else: fname = value return spec.argstr % fname - if name == 'out_type': - return '' + if name == "out_type": + return "" return super(Binarize, self)._format_arg(name, spec, value) def _gen_filename(self, name): - if name == 'binary_file': + if name == "binary_file": return self._list_outputs()[name] return None @@ -632,73 +675,77 @@ def _gen_filename(self, name): class ConcatenateInputSpec(FSTraitedSpec): in_files = InputMultiPath( File(exists=True), - desc='Individual volumes to be concatenated', - argstr='--i %s...', - mandatory=True) - concatenated_file = File( - desc='Output volume', argstr='--o %s', genfile=True) + desc="Individual volumes to be concatenated", + argstr="--i %s...", + mandatory=True, + ) + concatenated_file = File(desc="Output volume", argstr="--o %s", genfile=True) sign = traits.Enum( - 'abs', - 'pos', - 'neg', - argstr='--%s', - desc='Take only pos or neg voxles from input, or take abs') + "abs", + "pos", + "neg", + argstr="--%s", + desc="Take only pos or neg voxles from input, or take abs", + ) stats = traits.Enum( - 'sum', - 'var', - 'std', - 'max', - 'min', - 'mean', - argstr='--%s', - desc='Compute the sum, var, std, max, min or mean of the input volumes' + "sum", + "var", + "std", + "max", + "min", + "mean", + argstr="--%s", + desc="Compute the sum, var, std, max, min or mean of the input volumes", ) paired_stats = traits.Enum( - 'sum', - 'avg', - 'diff', - 'diff-norm', - 'diff-norm1', - 'diff-norm2', - argstr='--paired-%s', - desc='Compute paired sum, avg, or diff') + "sum", + "avg", + "diff", + "diff-norm", + "diff-norm1", + "diff-norm2", + argstr="--paired-%s", + desc="Compute paired sum, avg, or diff", + ) gmean = traits.Int( - argstr='--gmean %d', - desc='create matrix to average Ng groups, Nper=Ntot/Ng') + argstr="--gmean %d", desc="create matrix to average Ng groups, Nper=Ntot/Ng" + ) mean_div_n = traits.Bool( - argstr='--mean-div-n', desc='compute mean/nframes (good for var)') + argstr="--mean-div-n", desc="compute mean/nframes (good for var)" + ) multiply_by = traits.Float( - argstr='--mul %f', desc='Multiply input volume by some amount') + argstr="--mul %f", desc="Multiply input volume by some amount" + ) add_val = traits.Float( - argstr='--add %f', desc='Add some amount to the input volume') + argstr="--add %f", desc="Add some amount to the input volume" + ) multiply_matrix_file = File( - exists=True, - argstr='--mtx %s', - desc='Multiply input by an ascii matrix in file') + exists=True, argstr="--mtx %s", desc="Multiply input by an ascii matrix in file" + ) combine = traits.Bool( - argstr='--combine', - desc='Combine non-zero values into single frame volume') + argstr="--combine", desc="Combine non-zero values into single frame volume" + ) keep_dtype = traits.Bool( - argstr='--keep-datatype', - desc='Keep voxelwise precision type (default is float') + argstr="--keep-datatype", desc="Keep voxelwise precision type (default is float" + ) max_bonfcor = traits.Bool( - argstr='--max-bonfcor', - desc='Compute max and bonferroni correct (assumes -log10(ps))') + argstr="--max-bonfcor", + desc="Compute max and bonferroni correct (assumes -log10(ps))", + ) max_index = traits.Bool( - argstr='--max-index', - desc='Compute the index of max voxel in concatenated volumes') - mask_file = File( - exists=True, argstr='--mask %s', desc='Mask input with a volume') + argstr="--max-index", + desc="Compute the index of max voxel in concatenated volumes", + ) + mask_file = File(exists=True, argstr="--mask %s", desc="Mask input with a volume") vote = traits.Bool( - argstr='--vote', - desc='Most frequent value at each voxel and fraction of occurances') - sort = traits.Bool( - argstr='--sort', desc='Sort each voxel by ascending frame value') + argstr="--vote", + desc="Most frequent value at each voxel and fraction of occurances", + ) + sort = traits.Bool(argstr="--sort", desc="Sort each voxel by ascending frame value") class ConcatenateOutputSpec(TraitedSpec): - concatenated_file = File( - exists=True, desc='Path/name of the output volume') + concatenated_file = File(exists=True, desc="Path/name of the output volume") class Concatenate(FSCommand): @@ -719,7 +766,7 @@ class Concatenate(FSCommand): """ - _cmd = 'mri_concat' + _cmd = "mri_concat" input_spec = ConcatenateInputSpec output_spec = ConcatenateOutputSpec @@ -728,181 +775,198 @@ def _list_outputs(self): fname = self.inputs.concatenated_file if not isdefined(fname): - fname = 'concat_output.nii.gz' - outputs['concatenated_file'] = os.path.join(os.getcwd(), fname) + fname = "concat_output.nii.gz" + outputs["concatenated_file"] = os.path.join(os.getcwd(), fname) return outputs def _gen_filename(self, name): - if name == 'concatenated_file': + if name == "concatenated_file": return self._list_outputs()[name] return None class SegStatsInputSpec(FSTraitedSpec): - _xor_inputs = ('segmentation_file', 'annot', 'surf_label') + _xor_inputs = ("segmentation_file", "annot", "surf_label") segmentation_file = File( exists=True, - argstr='--seg %s', + argstr="--seg %s", xor=_xor_inputs, mandatory=True, - desc='segmentation volume path') + desc="segmentation volume path", + ) annot = traits.Tuple( traits.Str, - traits.Enum('lh', 'rh'), + traits.Enum("lh", "rh"), traits.Str, - argstr='--annot %s %s %s', + argstr="--annot %s %s %s", xor=_xor_inputs, mandatory=True, - desc='subject hemi parc : use surface parcellation') + desc="subject hemi parc : use surface parcellation", + ) surf_label = traits.Tuple( traits.Str, - traits.Enum('lh', 'rh'), + traits.Enum("lh", "rh"), traits.Str, - argstr='--slabel %s %s %s', + argstr="--slabel %s %s %s", xor=_xor_inputs, mandatory=True, - desc='subject hemi label : use surface label') + desc="subject hemi label : use surface label", + ) summary_file = File( - argstr='--sum %s', + argstr="--sum %s", genfile=True, position=-1, - desc='Segmentation stats summary table file') + desc="Segmentation stats summary table file", + ) partial_volume_file = File( - exists=True, argstr='--pv %s', desc='Compensate for partial voluming') + exists=True, argstr="--pv %s", desc="Compensate for partial voluming" + ) in_file = File( exists=True, - argstr='--i %s', - desc='Use the segmentation to report stats on this volume') + argstr="--i %s", + desc="Use the segmentation to report stats on this volume", + ) frame = traits.Int( - argstr='--frame %d', desc='Report stats on nth frame of input volume') - multiply = traits.Float(argstr='--mul %f', desc='multiply input by val') + argstr="--frame %d", desc="Report stats on nth frame of input volume" + ) + multiply = traits.Float(argstr="--mul %f", desc="multiply input by val") calc_snr = traits.Bool( - argstr='--snr', desc='save mean/std as extra column in output table') + argstr="--snr", desc="save mean/std as extra column in output table" + ) calc_power = traits.Enum( - 'sqr', - 'sqrt', - argstr='--%s', - desc='Compute either the sqr or the sqrt of the input') - _ctab_inputs = ('color_table_file', 'default_color_table', - 'gca_color_table') + "sqr", + "sqrt", + argstr="--%s", + desc="Compute either the sqr or the sqrt of the input", + ) + _ctab_inputs = ("color_table_file", "default_color_table", "gca_color_table") color_table_file = File( exists=True, - argstr='--ctab %s', + argstr="--ctab %s", xor=_ctab_inputs, - desc='color table file with seg id names') + desc="color table file with seg id names", + ) default_color_table = traits.Bool( - argstr='--ctab-default', + argstr="--ctab-default", xor=_ctab_inputs, - desc='use $FREESURFER_HOME/FreeSurferColorLUT.txt') + desc="use $FREESURFER_HOME/FreeSurferColorLUT.txt", + ) gca_color_table = File( exists=True, - argstr='--ctab-gca %s', + argstr="--ctab-gca %s", xor=_ctab_inputs, - desc='get color table from GCA (CMA)') + desc="get color table from GCA (CMA)", + ) segment_id = traits.List( - argstr='--id %s...', desc='Manually specify segmentation ids') - exclude_id = traits.Int( - argstr='--excludeid %d', desc='Exclude seg id from report') + argstr="--id %s...", desc="Manually specify segmentation ids" + ) + exclude_id = traits.Int(argstr="--excludeid %d", desc="Exclude seg id from report") exclude_ctx_gm_wm = traits.Bool( - argstr='--excl-ctxgmwm', desc='exclude cortical gray and white matter') + argstr="--excl-ctxgmwm", desc="exclude cortical gray and white matter" + ) wm_vol_from_surf = traits.Bool( - argstr='--surf-wm-vol', desc='Compute wm volume from surf') + argstr="--surf-wm-vol", desc="Compute wm volume from surf" + ) cortex_vol_from_surf = traits.Bool( - argstr='--surf-ctx-vol', desc='Compute cortex volume from surf') + argstr="--surf-ctx-vol", desc="Compute cortex volume from surf" + ) non_empty_only = traits.Bool( - argstr='--nonempty', desc='Only report nonempty segmentations') + argstr="--nonempty", desc="Only report nonempty segmentations" + ) empty = traits.Bool( - argstr="--empty", - desc="Report on segmentations listed in the color table") + argstr="--empty", desc="Report on segmentations listed in the color table" + ) mask_file = File( - exists=True, argstr='--mask %s', desc='Mask volume (same size as seg') + exists=True, argstr="--mask %s", desc="Mask volume (same size as seg" + ) mask_thresh = traits.Float( - argstr='--maskthresh %f', - desc='binarize mask with this threshold <0.5>') + argstr="--maskthresh %f", desc="binarize mask with this threshold <0.5>" + ) mask_sign = traits.Enum( - 'abs', - 'pos', - 'neg', - '--masksign %s', - desc='Sign for mask threshold: pos, neg, or abs') + "abs", + "pos", + "neg", + "--masksign %s", + desc="Sign for mask threshold: pos, neg, or abs", + ) mask_frame = traits.Int( - '--maskframe %d', - requires=['mask_file'], - desc='Mask with this (0 based) frame of the mask volume') + "--maskframe %d", + requires=["mask_file"], + desc="Mask with this (0 based) frame of the mask volume", + ) mask_invert = traits.Bool( - argstr='--maskinvert', desc='Invert binarized mask volume') - mask_erode = traits.Int( - argstr='--maskerode %d', desc='Erode mask by some amount') + argstr="--maskinvert", desc="Invert binarized mask volume" + ) + mask_erode = traits.Int(argstr="--maskerode %d", desc="Erode mask by some amount") brain_vol = traits.Enum( - 'brain-vol-from-seg', - 'brainmask', - argstr='--%s', - desc= - 'Compute brain volume either with ``brainmask`` or ``brain-vol-from-seg``' + "brain-vol-from-seg", + "brainmask", + argstr="--%s", + desc="Compute brain volume either with ``brainmask`` or ``brain-vol-from-seg``", ) brainmask_file = File( argstr="--brainmask %s", exists=True, - desc= - "Load brain mask and compute the volume of the brain as the non-zero voxels in this volume" + desc="Load brain mask and compute the volume of the brain as the non-zero voxels in this volume", ) - etiv = traits.Bool( - argstr='--etiv', desc='Compute ICV from talairach transform') + etiv = traits.Bool(argstr="--etiv", desc="Compute ICV from talairach transform") etiv_only = traits.Enum( - 'etiv', - 'old-etiv', - '--%s-only', - desc='Compute etiv and exit. Use ``etiv`` or ``old-etiv``') + "etiv", + "old-etiv", + "--%s-only", + desc="Compute etiv and exit. Use ``etiv`` or ``old-etiv``", + ) avgwf_txt_file = traits.Either( traits.Bool, File, - argstr='--avgwf %s', - desc='Save average waveform into file (bool or filename)') + argstr="--avgwf %s", + desc="Save average waveform into file (bool or filename)", + ) avgwf_file = traits.Either( traits.Bool, File, - argstr='--avgwfvol %s', - desc='Save as binary volume (bool or filename)') + argstr="--avgwfvol %s", + desc="Save as binary volume (bool or filename)", + ) sf_avg_file = traits.Either( - traits.Bool, - File, - argstr='--sfavg %s', - desc='Save mean across space and time') + traits.Bool, File, argstr="--sfavg %s", desc="Save mean across space and time" + ) vox = traits.List( traits.Int, - argstr='--vox %s', - desc='Replace seg with all 0s except at C R S (three int inputs)') - supratent = traits.Bool( - argstr="--supratent", desc="Undocumented input flag") + argstr="--vox %s", + desc="Replace seg with all 0s except at C R S (three int inputs)", + ) + supratent = traits.Bool(argstr="--supratent", desc="Undocumented input flag") subcort_gm = traits.Bool( - argstr="--subcortgray", - desc="Compute volume of subcortical gray matter") + argstr="--subcortgray", desc="Compute volume of subcortical gray matter" + ) total_gray = traits.Bool( - argstr="--totalgray", desc="Compute volume of total gray matter") + argstr="--totalgray", desc="Compute volume of total gray matter" + ) euler = traits.Bool( argstr="--euler", - desc= - "Write out number of defect holes in orig.nofix based on the euler number" + desc="Write out number of defect holes in orig.nofix based on the euler number", ) in_intensity = File( - argstr="--in %s --in-intensity-name %s", - desc="Undocumented input norm.mgz file") + argstr="--in %s --in-intensity-name %s", desc="Undocumented input norm.mgz file" + ) intensity_units = traits.Enum( - 'MR', + "MR", argstr="--in-intensity-units %s", requires=["in_intensity"], - desc="Intensity units") + desc="Intensity units", + ) class SegStatsOutputSpec(TraitedSpec): - summary_file = File( - exists=True, desc='Segmentation summary statistics table') + summary_file = File(exists=True, desc="Segmentation summary statistics table") avgwf_txt_file = File( - desc='Text file with functional statistics averaged over segs') - avgwf_file = File( - desc='Volume with functional statistics averaged over segs') + desc="Text file with functional statistics averaged over segs" + ) + avgwf_file = File(desc="Volume with functional statistics averaged over segs") sf_avg_file = File( - desc='Text file with func statistics averaged over segs and framss') + desc="Text file with func statistics averaged over segs and framss" + ) class SegStats(FSCommand): @@ -923,56 +987,58 @@ class SegStats(FSCommand): """ - _cmd = 'mri_segstats' + _cmd = "mri_segstats" input_spec = SegStatsInputSpec output_spec = SegStatsOutputSpec def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.summary_file): - outputs['summary_file'] = os.path.abspath(self.inputs.summary_file) + outputs["summary_file"] = os.path.abspath(self.inputs.summary_file) else: - outputs['summary_file'] = os.path.join(os.getcwd(), - 'summary.stats') + outputs["summary_file"] = os.path.join(os.getcwd(), "summary.stats") suffices = dict( - avgwf_txt_file='_avgwf.txt', - avgwf_file='_avgwf.nii.gz', - sf_avg_file='sfavg.txt') + avgwf_txt_file="_avgwf.txt", + avgwf_file="_avgwf.nii.gz", + sf_avg_file="sfavg.txt", + ) if isdefined(self.inputs.segmentation_file): _, src = os.path.split(self.inputs.segmentation_file) if isdefined(self.inputs.annot): - src = '_'.join(self.inputs.annot) + src = "_".join(self.inputs.annot) if isdefined(self.inputs.surf_label): - src = '_'.join(self.inputs.surf_label) + src = "_".join(self.inputs.surf_label) for name, suffix in list(suffices.items()): value = getattr(self.inputs, name) if isdefined(value): if isinstance(value, bool): outputs[name] = fname_presuffix( - src, suffix=suffix, newpath=os.getcwd(), use_ext=False) + src, suffix=suffix, newpath=os.getcwd(), use_ext=False + ) else: outputs[name] = os.path.abspath(value) return outputs def _format_arg(self, name, spec, value): - if name in ('summary_file', 'avgwf_txt_file'): + if name in ("summary_file", "avgwf_txt_file"): if not isinstance(value, bool): if not os.path.isabs(value): - value = os.path.join('.', value) - if name in ['avgwf_txt_file', 'avgwf_file', 'sf_avg_file']: + value = os.path.join(".", value) + if name in ["avgwf_txt_file", "avgwf_file", "sf_avg_file"]: if isinstance(value, bool): fname = self._list_outputs()[name] else: fname = value return spec.argstr % fname - elif name == 'in_intensity': - intensity_name = os.path.basename( - self.inputs.in_intensity).replace('.mgz', '') + elif name == "in_intensity": + intensity_name = os.path.basename(self.inputs.in_intensity).replace( + ".mgz", "" + ) return spec.argstr % (value, intensity_name) return super(SegStats, self)._format_arg(name, spec, value) def _gen_filename(self, name): - if name == 'summary_file': + if name == "summary_file": return self._list_outputs()[name] return None @@ -980,40 +1046,40 @@ def _gen_filename(self, name): class SegStatsReconAllInputSpec(SegStatsInputSpec): # recon-all input requirements subject_id = traits.String( - 'subject_id', + "subject_id", usedefault=True, argstr="--subject %s", mandatory=True, - desc="Subject id being processed") + desc="Subject id being processed", + ) # implicit - ribbon = File( - mandatory=True, exists=True, desc="Input file mri/ribbon.mgz") + ribbon = File(mandatory=True, exists=True, desc="Input file mri/ribbon.mgz") presurf_seg = File(exists=True, desc="Input segmentation volume") transform = File(mandatory=True, exists=True, desc="Input transform file") - lh_orig_nofix = File( - mandatory=True, exists=True, desc="Input lh.orig.nofix") - rh_orig_nofix = File( - mandatory=True, exists=True, desc="Input rh.orig.nofix") + lh_orig_nofix = File(mandatory=True, exists=True, desc="Input lh.orig.nofix") + rh_orig_nofix = File(mandatory=True, exists=True, desc="Input rh.orig.nofix") lh_white = File( mandatory=True, exists=True, - desc="Input file must be /surf/lh.white") + desc="Input file must be /surf/lh.white", + ) rh_white = File( mandatory=True, exists=True, - desc="Input file must be /surf/rh.white") + desc="Input file must be /surf/rh.white", + ) lh_pial = File( - mandatory=True, - exists=True, - desc="Input file must be /surf/lh.pial") + mandatory=True, exists=True, desc="Input file must be /surf/lh.pial" + ) rh_pial = File( - mandatory=True, - exists=True, - desc="Input file must be /surf/rh.pial") + mandatory=True, exists=True, desc="Input file must be /surf/rh.pial" + ) aseg = File(exists=True, desc="Mandatory implicit input in 5.3") - copy_inputs = traits.Bool(desc="If running as a node, set this to True " + - "otherwise, this will copy the implicit inputs " - + "to the node directory.") + copy_inputs = traits.Bool( + desc="If running as a node, set this to True " + + "otherwise, this will copy the implicit inputs " + + "to the node directory." + ) class SegStatsReconAll(SegStats): @@ -1053,117 +1119,128 @@ class SegStatsReconAll(SegStats): >>> segstatsreconall.cmdline 'mri_segstats --annot PWS04 lh aparc --avgwf ./avgwf.txt --brain-vol-from-seg --surf-ctx-vol --empty --etiv --euler --excl-ctxgmwm --excludeid 0 --subcortgray --subject 10335 --supratent --totalgray --surf-wm-vol --sum ./summary.stats' """ + input_spec = SegStatsReconAllInputSpec output_spec = SegStatsOutputSpec def _format_arg(self, name, spec, value): - if name == 'brainmask_file': + if name == "brainmask_file": return spec.argstr % os.path.basename(value) return super(SegStatsReconAll, self)._format_arg(name, spec, value) def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir - copy2subjdir(self, self.inputs.lh_orig_nofix, 'surf', - 'lh.orig.nofix') - copy2subjdir(self, self.inputs.rh_orig_nofix, 'surf', - 'rh.orig.nofix') - copy2subjdir(self, self.inputs.lh_white, 'surf', 'lh.white') - copy2subjdir(self, self.inputs.rh_white, 'surf', 'rh.white') - copy2subjdir(self, self.inputs.lh_pial, 'surf', 'lh.pial') - copy2subjdir(self, self.inputs.rh_pial, 'surf', 'rh.pial') - copy2subjdir(self, self.inputs.ribbon, 'mri', 'ribbon.mgz') - copy2subjdir(self, self.inputs.presurf_seg, 'mri', - 'aseg.presurf.mgz') - copy2subjdir(self, self.inputs.aseg, 'mri', 'aseg.mgz') - copy2subjdir(self, self.inputs.transform, - os.path.join('mri', 'transforms'), 'talairach.xfm') - copy2subjdir(self, self.inputs.in_intensity, 'mri') - copy2subjdir(self, self.inputs.brainmask_file, 'mri') + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir + copy2subjdir(self, self.inputs.lh_orig_nofix, "surf", "lh.orig.nofix") + copy2subjdir(self, self.inputs.rh_orig_nofix, "surf", "rh.orig.nofix") + copy2subjdir(self, self.inputs.lh_white, "surf", "lh.white") + copy2subjdir(self, self.inputs.rh_white, "surf", "rh.white") + copy2subjdir(self, self.inputs.lh_pial, "surf", "lh.pial") + copy2subjdir(self, self.inputs.rh_pial, "surf", "rh.pial") + copy2subjdir(self, self.inputs.ribbon, "mri", "ribbon.mgz") + copy2subjdir(self, self.inputs.presurf_seg, "mri", "aseg.presurf.mgz") + copy2subjdir(self, self.inputs.aseg, "mri", "aseg.mgz") + copy2subjdir( + self, + self.inputs.transform, + os.path.join("mri", "transforms"), + "talairach.xfm", + ) + copy2subjdir(self, self.inputs.in_intensity, "mri") + copy2subjdir(self, self.inputs.brainmask_file, "mri") return super(SegStatsReconAll, self).run(**inputs) class Label2VolInputSpec(FSTraitedSpec): label_file = InputMultiPath( File(exists=True), - argstr='--label %s...', - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), + argstr="--label %s...", + xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), copyfile=False, mandatory=True, - desc='list of label files') + desc="list of label files", + ) annot_file = File( exists=True, - argstr='--annot %s', - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), - requires=('subject_id', 'hemi'), + argstr="--annot %s", + xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), + requires=("subject_id", "hemi"), mandatory=True, copyfile=False, - desc='surface annotation file') + desc="surface annotation file", + ) seg_file = File( exists=True, - argstr='--seg %s', - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), + argstr="--seg %s", + xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), mandatory=True, copyfile=False, - desc='segmentation file') + desc="segmentation file", + ) aparc_aseg = traits.Bool( - argstr='--aparc+aseg', - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), + argstr="--aparc+aseg", + xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), mandatory=True, - desc='use aparc+aseg.mgz in subjectdir as seg') + desc="use aparc+aseg.mgz in subjectdir as seg", + ) template_file = File( - exists=True, - argstr='--temp %s', - mandatory=True, - desc='output template volume') + exists=True, argstr="--temp %s", mandatory=True, desc="output template volume" + ) reg_file = File( exists=True, - argstr='--reg %s', - xor=('reg_file', 'reg_header', 'identity'), - desc='tkregister style matrix VolXYZ = R*LabelXYZ') + argstr="--reg %s", + xor=("reg_file", "reg_header", "identity"), + desc="tkregister style matrix VolXYZ = R*LabelXYZ", + ) reg_header = File( exists=True, - argstr='--regheader %s', - xor=('reg_file', 'reg_header', 'identity'), - desc='label template volume') + argstr="--regheader %s", + xor=("reg_file", "reg_header", "identity"), + desc="label template volume", + ) identity = traits.Bool( - argstr='--identity', - xor=('reg_file', 'reg_header', 'identity'), - desc='set R=I') + argstr="--identity", xor=("reg_file", "reg_header", "identity"), desc="set R=I" + ) invert_mtx = traits.Bool( - argstr='--invertmtx', desc='Invert the registration matrix') + argstr="--invertmtx", desc="Invert the registration matrix" + ) fill_thresh = traits.Range( - 0., 1., argstr='--fillthresh %g', desc='thresh : between 0 and 1') + 0.0, 1.0, argstr="--fillthresh %g", desc="thresh : between 0 and 1" + ) label_voxel_volume = traits.Float( - argstr='--labvoxvol %f', desc='volume of each label point (def 1mm3)') + argstr="--labvoxvol %f", desc="volume of each label point (def 1mm3)" + ) proj = traits.Tuple( - traits.Enum('abs', 'frac'), + traits.Enum("abs", "frac"), traits.Float, traits.Float, traits.Float, - argstr='--proj %s %f %f %f', - requires=('subject_id', 'hemi'), - desc='project along surface normal') - subject_id = traits.Str(argstr='--subject %s', desc='subject id') + argstr="--proj %s %f %f %f", + requires=("subject_id", "hemi"), + desc="project along surface normal", + ) + subject_id = traits.Str(argstr="--subject %s", desc="subject id") hemi = traits.Enum( - 'lh', 'rh', argstr='--hemi %s', desc='hemisphere to use lh or rh') - surface = traits.Str( - argstr='--surf %s', desc='use surface instead of white') - vol_label_file = File(argstr='--o %s', genfile=True, desc='output volume') + "lh", "rh", argstr="--hemi %s", desc="hemisphere to use lh or rh" + ) + surface = traits.Str(argstr="--surf %s", desc="use surface instead of white") + vol_label_file = File(argstr="--o %s", genfile=True, desc="output volume") label_hit_file = File( - argstr='--hits %s', desc='file with each frame is nhits for a label') + argstr="--hits %s", desc="file with each frame is nhits for a label" + ) map_label_stat = File( - argstr='--label-stat %s', - desc='map the label stats field into the vol') + argstr="--label-stat %s", desc="map the label stats field into the vol" + ) native_vox2ras = traits.Bool( - argstr='--native-vox2ras', - desc='use native vox2ras xform instead of tkregister-style') + argstr="--native-vox2ras", + desc="use native vox2ras xform instead of tkregister-style", + ) class Label2VolOutputSpec(TraitedSpec): - vol_label_file = File(exists=True, desc='output volume') + vol_label_file = File(exists=True, desc="output volume") class Label2Vol(FSCommand): @@ -1178,7 +1255,7 @@ class Label2Vol(FSCommand): """ - _cmd = 'mri_label2vol' + _cmd = "mri_label2vol" input_spec = Label2VolInputSpec output_spec = Label2VolOutputSpec @@ -1186,21 +1263,22 @@ def _list_outputs(self): outputs = self.output_spec().get() outfile = self.inputs.vol_label_file if not isdefined(outfile): - for key in ['label_file', 'annot_file', 'seg_file']: + for key in ["label_file", "annot_file", "seg_file"]: if isdefined(getattr(self.inputs, key)): path = getattr(self.inputs, key) if isinstance(path, list): path = path[0] _, src = os.path.split(path) if isdefined(self.inputs.aparc_aseg): - src = 'aparc+aseg.mgz' + src = "aparc+aseg.mgz" outfile = fname_presuffix( - src, suffix='_vol.nii.gz', newpath=os.getcwd(), use_ext=False) - outputs['vol_label_file'] = outfile + src, suffix="_vol.nii.gz", newpath=os.getcwd(), use_ext=False + ) + outputs["vol_label_file"] = outfile return outputs def _gen_filename(self, name): - if name == 'vol_label_file': + if name == "vol_label_file": return self._list_outputs()[name] return None @@ -1208,51 +1286,53 @@ def _gen_filename(self, name): class MS_LDAInputSpec(FSTraitedSpec): lda_labels = traits.List( traits.Int(), - argstr='-lda %s', + argstr="-lda %s", mandatory=True, minlen=2, maxlen=2, - sep=' ', - desc='pair of class labels to optimize') + sep=" ", + desc="pair of class labels to optimize", + ) weight_file = File( - argstr='-weight %s', + argstr="-weight %s", mandatory=True, - desc='filename for the LDA weights (input or output)') + desc="filename for the LDA weights (input or output)", + ) vol_synth_file = File( exists=False, - argstr='-synth %s', + argstr="-synth %s", mandatory=True, - desc=('filename for the synthesized output ' - 'volume')) + desc=("filename for the synthesized output " "volume"), + ) label_file = File( - exists=True, argstr='-label %s', desc='filename of the label volume') + exists=True, argstr="-label %s", desc="filename of the label volume" + ) mask_file = File( - exists=True, - argstr='-mask %s', - desc='filename of the brain mask volume') + exists=True, argstr="-mask %s", desc="filename of the brain mask volume" + ) shift = traits.Int( - argstr='-shift %d', - desc='shift all values equal to the given value to zero') + argstr="-shift %d", desc="shift all values equal to the given value to zero" + ) conform = traits.Bool( - argstr='-conform', - desc=('Conform the input volumes (brain mask ' - 'typically already conformed)')) + argstr="-conform", + desc=("Conform the input volumes (brain mask " "typically already conformed)"), + ) use_weights = traits.Bool( - argstr='-W', - desc=('Use the weights from a previously ' - 'generated weight file')) + argstr="-W", desc=("Use the weights from a previously " "generated weight file") + ) images = InputMultiPath( File(exists=True), - argstr='%s', + argstr="%s", mandatory=True, copyfile=False, - desc='list of input FLASH images', - position=-1) + desc="list of input FLASH images", + position=-1, + ) class MS_LDAOutputSpec(TraitedSpec): - weight_file = File(exists=True, desc='') - vol_synth_file = File(exists=True, desc='') + weight_file = File(exists=True, desc="") + vol_synth_file = File(exists=True, desc="") class MS_LDA(FSCommand): @@ -1273,34 +1353,32 @@ class MS_LDA(FSCommand): 'mri_ms_LDA -conform -label label.mgz -lda 2 3 -shift 1 -W -synth synth_out.mgz -weight weights.txt FLASH1.mgz FLASH2.mgz FLASH3.mgz' """ - _cmd = 'mri_ms_LDA' + _cmd = "mri_ms_LDA" input_spec = MS_LDAInputSpec output_spec = MS_LDAOutputSpec def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.output_synth): - outputs['vol_synth_file'] = os.path.abspath( - self.inputs.output_synth) + outputs["vol_synth_file"] = os.path.abspath(self.inputs.output_synth) else: - outputs['vol_synth_file'] = os.path.abspath( - self.inputs.vol_synth_file) - if not isdefined( - self.inputs.use_weights) or self.inputs.use_weights is False: - outputs['weight_file'] = os.path.abspath(self.inputs.weight_file) + outputs["vol_synth_file"] = os.path.abspath(self.inputs.vol_synth_file) + if not isdefined(self.inputs.use_weights) or self.inputs.use_weights is False: + outputs["weight_file"] = os.path.abspath(self.inputs.weight_file) return outputs def _verify_weights_file_exists(self): if not os.path.exists(os.path.abspath(self.inputs.weight_file)): raise traits.TraitError( - "MS_LDA: use_weights must accompany an existing weights file") + "MS_LDA: use_weights must accompany an existing weights file" + ) def _format_arg(self, name, spec, value): - if name == 'use_weights': + if name == "use_weights": if self.inputs.use_weights is True: self._verify_weights_file_exists() else: - return '' + return "" # TODO: Fix bug when boolean values are set explicitly to false return super(MS_LDA, self)._format_arg(name, spec, value) @@ -1310,57 +1388,56 @@ def _gen_filename(self, name): class Label2LabelInputSpec(FSTraitedSpec): hemisphere = traits.Enum( - 'lh', - 'rh', - argstr="--hemi %s", - mandatory=True, - desc="Input hemisphere") + "lh", "rh", argstr="--hemi %s", mandatory=True, desc="Input hemisphere" + ) subject_id = traits.String( - 'subject_id', + "subject_id", usedefault=True, argstr="--trgsubject %s", mandatory=True, - desc="Target subject") + desc="Target subject", + ) sphere_reg = File( - mandatory=True, - exists=True, - desc="Implicit input .sphere.reg") - white = File( - mandatory=True, exists=True, desc="Implicit input .white") + mandatory=True, exists=True, desc="Implicit input .sphere.reg" + ) + white = File(mandatory=True, exists=True, desc="Implicit input .white") source_sphere_reg = File( - mandatory=True, - exists=True, - desc="Implicit input .sphere.reg") + mandatory=True, exists=True, desc="Implicit input .sphere.reg" + ) source_white = File( - mandatory=True, exists=True, desc="Implicit input .white") + mandatory=True, exists=True, desc="Implicit input .white" + ) source_label = File( - argstr="--srclabel %s", - mandatory=True, - exists=True, - desc="Source label") + argstr="--srclabel %s", mandatory=True, exists=True, desc="Source label" + ) source_subject = traits.String( - argstr="--srcsubject %s", mandatory=True, desc="Source subject name") + argstr="--srcsubject %s", mandatory=True, desc="Source subject name" + ) # optional out_file = File( argstr="--trglabel %s", - name_source=['source_label'], - name_template='%s_converted', + name_source=["source_label"], + name_template="%s_converted", hash_files=False, keep_extension=True, - desc="Target label") + desc="Target label", + ) registration_method = traits.Enum( - 'surface', - 'volume', + "surface", + "volume", usedefault=True, argstr="--regmethod %s", - desc="Registration method") + desc="Registration method", + ) copy_inputs = traits.Bool( - desc="If running as a node, set this to True." + - "This will copy the input files to the node " + "directory.") + desc="If running as a node, set this to True." + + "This will copy the input files to the node " + + "directory." + ) class Label2LabelOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='Output label') + out_file = File(exists=True, desc="Output label") class Label2Label(FSCommand): @@ -1391,43 +1468,49 @@ class Label2Label(FSCommand): 'mri_label2label --hemi lh --trglabel lh-pial_converted.stl --regmethod surface --srclabel lh-pial.stl --srcsubject fsaverage --trgsubject 10335' """ - _cmd = 'mri_label2label' + _cmd = "mri_label2label" input_spec = Label2LabelInputSpec output_spec = Label2LabelOutputSpec def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'label', - self.inputs.out_file) + outputs["out_file"] = os.path.join( + self.inputs.subjects_dir, + self.inputs.subject_id, + "label", + self.inputs.out_file, + ) return outputs def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir hemi = self.inputs.hemisphere - copy2subjdir(self, self.inputs.sphere_reg, 'surf', - '{0}.sphere.reg'.format(hemi)) - copy2subjdir(self, self.inputs.white, 'surf', - '{0}.white'.format(hemi)) + copy2subjdir( + self, self.inputs.sphere_reg, "surf", "{0}.sphere.reg".format(hemi) + ) + copy2subjdir(self, self.inputs.white, "surf", "{0}.white".format(hemi)) copy2subjdir( self, self.inputs.source_sphere_reg, - 'surf', - '{0}.sphere.reg'.format(hemi), - subject_id=self.inputs.source_subject) + "surf", + "{0}.sphere.reg".format(hemi), + subject_id=self.inputs.source_subject, + ) copy2subjdir( self, self.inputs.source_white, - 'surf', - '{0}.white'.format(hemi), - subject_id=self.inputs.source_subject) + "surf", + "{0}.white".format(hemi), + subject_id=self.inputs.source_subject, + ) # label dir must exist in order for output file to be written - label_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'label') + label_dir = os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id, "label" + ) if not os.path.isdir(label_dir): os.makedirs(label_dir) @@ -1437,42 +1520,41 @@ def run(self, **inputs): class Label2AnnotInputSpec(FSTraitedSpec): # required hemisphere = traits.Enum( - 'lh', - 'rh', - argstr="--hemi %s", - mandatory=True, - desc="Input hemisphere") + "lh", "rh", argstr="--hemi %s", mandatory=True, desc="Input hemisphere" + ) subject_id = traits.String( - 'subject_id', + "subject_id", usedefault=True, argstr="--s %s", mandatory=True, - desc="Subject name/ID") + desc="Subject name/ID", + ) in_labels = traits.List( - argstr="--l %s...", mandatory=True, desc="List of input label files") + argstr="--l %s...", mandatory=True, desc="List of input label files" + ) out_annot = traits.String( - argstr="--a %s", - mandatory=True, - desc="Name of the annotation to create") + argstr="--a %s", mandatory=True, desc="Name of the annotation to create" + ) orig = File(exists=True, mandatory=True, desc="implicit {hemisphere}.orig") # optional keep_max = traits.Bool( - argstr="--maxstatwinner", desc="Keep label with highest 'stat' value") + argstr="--maxstatwinner", desc="Keep label with highest 'stat' value" + ) verbose_off = traits.Bool( - argstr="--noverbose", - desc="Turn off overlap and stat override messages") + argstr="--noverbose", desc="Turn off overlap and stat override messages" + ) color_table = File( argstr="--ctab %s", exists=True, - desc= - "File that defines the structure names, their indices, and their color" + desc="File that defines the structure names, their indices, and their color", ) copy_inputs = traits.Bool( - desc="copy implicit inputs and create a temp subjects_dir") + desc="copy implicit inputs and create a temp subjects_dir" + ) class Label2AnnotOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='Output annotation file') + out_file = File(exists=True, desc="Output annotation file") class Label2Annot(FSCommand): @@ -1492,23 +1574,25 @@ class Label2Annot(FSCommand): 'mris_label2annot --hemi lh --l lh.aparc.label --a test --s 10335' """ - _cmd = 'mris_label2annot' + _cmd = "mris_label2annot" input_spec = Label2AnnotInputSpec output_spec = Label2AnnotOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir copy2subjdir( self, self.inputs.orig, - folder='surf', - basename='{0}.orig'.format(self.inputs.hemisphere)) + folder="surf", + basename="{0}.orig".format(self.inputs.hemisphere), + ) # label dir must exist in order for output file to be written - label_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'label') + label_dir = os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id, "label" + ) if not os.path.isdir(label_dir): os.makedirs(label_dir) return super(Label2Annot, self).run(**inputs) @@ -1516,39 +1600,27 @@ def run(self, **inputs): def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = os.path.join( - str(self.inputs.subjects_dir), str(self.inputs.subject_id), - 'label', - str(self.inputs.hemisphere) + '.' + str(self.inputs.out_annot) + - '.annot') + str(self.inputs.subjects_dir), + str(self.inputs.subject_id), + "label", + str(self.inputs.hemisphere) + "." + str(self.inputs.out_annot) + ".annot", + ) return outputs class SphericalAverageInputSpec(FSTraitedSpec): out_file = File( - argstr="%s", - genfile=True, - exists=False, - position=-1, - desc="Output filename") + argstr="%s", genfile=True, exists=False, position=-1, desc="Output filename" + ) in_average = Directory( - argstr="%s", - exists=True, - genfile=True, - position=-2, - desc="Average subject") + argstr="%s", exists=True, genfile=True, position=-2, desc="Average subject" + ) in_surf = File( - argstr="%s", - mandatory=True, - exists=True, - position=-3, - desc="Input surface file") + argstr="%s", mandatory=True, exists=True, position=-3, desc="Input surface file" + ) hemisphere = traits.Enum( - 'lh', - 'rh', - argstr="%s", - mandatory=True, - position=-4, - desc="Input hemisphere") + "lh", "rh", argstr="%s", mandatory=True, position=-4, desc="Input hemisphere" + ) fname = traits.String( argstr="%s", mandatory=True, @@ -1558,28 +1630,28 @@ class SphericalAverageInputSpec(FSTraitedSpec): filename, set fname to 'rh.entorhinal' and which to 'label'. The program will then search for '{in_average}/label/rh.entorhinal.label' - """) + """, + ) which = traits.Enum( - 'coords', - 'label', - 'vals', - 'curv', - 'area', + "coords", + "label", + "vals", + "curv", + "area", argstr="%s", mandatory=True, position=-6, - desc="No documentation") - subject_id = traits.String( - argstr="-o %s", mandatory=True, desc="Output subject id") + desc="No documentation", + ) + subject_id = traits.String(argstr="-o %s", mandatory=True, desc="Output subject id") # optional erode = traits.Int(argstr="-erode %d", desc="Undocumented") - in_orig = File( - argstr="-orig %s", exists=True, desc="Original surface filename") + in_orig = File(argstr="-orig %s", exists=True, desc="Original surface filename") threshold = traits.Float(argstr="-t %.1f", desc="Undocumented") class SphericalAverageOutputSpec(TraitedSpec): - out_file = File(exists=False, desc='Output label') + out_file = File(exists=False, desc="Output label") class SphericalAverage(FSCommand): @@ -1603,26 +1675,26 @@ class SphericalAverage(FSCommand): 'mris_spherical_average -erode 2 -o 10335 -t 5.0 label lh.entorhinal lh pial . test.out' """ - _cmd = 'mris_spherical_average' + _cmd = "mris_spherical_average" input_spec = SphericalAverageInputSpec output_spec = SphericalAverageOutputSpec def _format_arg(self, name, spec, value): - if name == 'in_orig' or name == 'in_surf': + if name == "in_orig" or name == "in_surf": surf = os.path.basename(value) - for item in ['lh.', 'rh.']: - surf = surf.replace(item, '') + for item in ["lh.", "rh."]: + surf = surf.replace(item, "") return spec.argstr % surf return super(SphericalAverage, self)._format_arg(name, spec, value) def _gen_filename(self, name): - if name == 'in_average': - avg_subject = str(self.inputs.hemisphere) + '.EC_average' + if name == "in_average": + avg_subject = str(self.inputs.hemisphere) + ".EC_average" avg_directory = os.path.join(self.inputs.subjects_dir, avg_subject) if not os.path.isdir(avg_directory): - fs_home = os.path.abspath(os.environ.get('FREESURFER_HOME')) + fs_home = os.path.abspath(os.environ.get("FREESURFER_HOME")) return avg_subject - elif name == 'out_file': + elif name == "out_file": return self._list_outputs()[name] else: return None @@ -1630,15 +1702,15 @@ def _gen_filename(self, name): def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.out_file): - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) else: - out_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'label') + out_dir = os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id, "label" + ) if isdefined(self.inputs.in_average): basename = os.path.basename(self.inputs.in_average) - basename = basename.replace('_', '_exvivo_') + '.label' + basename = basename.replace("_", "_exvivo_") + ".label" else: - basename = str( - self.inputs.hemisphere) + '.EC_exvivo_average.label' - outputs['out_file'] = os.path.join(out_dir, basename) + basename = str(self.inputs.hemisphere) + ".EC_exvivo_average.label" + outputs["out_file"] = os.path.join(out_dir, basename) return outputs diff --git a/nipype/interfaces/freesurfer/preprocess.py b/nipype/interfaces/freesurfer/preprocess.py index 020d1b7899..f7e09f7629 100644 --- a/nipype/interfaces/freesurfer/preprocess.py +++ b/nipype/interfaces/freesurfer/preprocess.py @@ -14,15 +14,22 @@ from ... import logging, LooseVersion from ...utils.filemanip import fname_presuffix, check_depends from ..io import FreeSurferSource -from ..base import (TraitedSpec, File, traits, Directory, InputMultiPath, - OutputMultiPath, CommandLine, CommandLineInputSpec, - isdefined) -from .base import (FSCommand, FSTraitedSpec, FSTraitedSpecOpenMP, - FSCommandOpenMP, Info) +from ..base import ( + TraitedSpec, + File, + traits, + Directory, + InputMultiPath, + OutputMultiPath, + CommandLine, + CommandLineInputSpec, + isdefined, +) +from .base import FSCommand, FSTraitedSpec, FSTraitedSpecOpenMP, FSCommandOpenMP, Info from .utils import copy2subjdir -__docformat__ = 'restructuredtext' -iflogger = logging.getLogger('nipype.interface') +__docformat__ = "restructuredtext" +iflogger = logging.getLogger("nipype.interface") # Keeping this to avoid breaking external programs that depend on it, but # this should not be used internally @@ -32,22 +39,24 @@ class ParseDICOMDirInputSpec(FSTraitedSpec): dicom_dir = Directory( exists=True, - argstr='--d %s', + argstr="--d %s", mandatory=True, - desc='path to siemens dicom directory') + desc="path to siemens dicom directory", + ) dicom_info_file = File( - 'dicominfo.txt', - argstr='--o %s', + "dicominfo.txt", + argstr="--o %s", usedefault=True, - desc='file to which results are written') - sortbyrun = traits.Bool(argstr='--sortbyrun', desc='assign run numbers') + desc="file to which results are written", + ) + sortbyrun = traits.Bool(argstr="--sortbyrun", desc="assign run numbers") summarize = traits.Bool( - argstr='--summarize', desc='only print out info for run leaders') + argstr="--summarize", desc="only print out info for run leaders" + ) class ParseDICOMDirOutputSpec(TraitedSpec): - dicom_info_file = File( - exists=True, desc='text file containing dicom information') + dicom_info_file = File(exists=True, desc="text file containing dicom information") class ParseDICOMDir(FSCommand): @@ -66,66 +75,72 @@ class ParseDICOMDir(FSCommand): """ - _cmd = 'mri_parse_sdcmdir' + _cmd = "mri_parse_sdcmdir" input_spec = ParseDICOMDirInputSpec output_spec = ParseDICOMDirOutputSpec def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.dicom_info_file): - outputs['dicom_info_file'] = os.path.join( - os.getcwd(), self.inputs.dicom_info_file) + outputs["dicom_info_file"] = os.path.join( + os.getcwd(), self.inputs.dicom_info_file + ) return outputs class UnpackSDICOMDirInputSpec(FSTraitedSpec): source_dir = Directory( exists=True, - argstr='-src %s', + argstr="-src %s", mandatory=True, - desc='directory with the DICOM files') + desc="directory with the DICOM files", + ) output_dir = Directory( - argstr='-targ %s', - desc='top directory into which the files will be unpacked') + argstr="-targ %s", desc="top directory into which the files will be unpacked" + ) run_info = traits.Tuple( traits.Int, traits.Str, traits.Str, traits.Str, mandatory=True, - argstr='-run %d %s %s %s', - xor=('run_info', 'config', 'seq_config'), - desc='runno subdir format name : spec unpacking rules on cmdline') + argstr="-run %d %s %s %s", + xor=("run_info", "config", "seq_config"), + desc="runno subdir format name : spec unpacking rules on cmdline", + ) config = File( exists=True, - argstr='-cfg %s', + argstr="-cfg %s", mandatory=True, - xor=('run_info', 'config', 'seq_config'), - desc='specify unpacking rules in file') + xor=("run_info", "config", "seq_config"), + desc="specify unpacking rules in file", + ) seq_config = File( exists=True, - argstr='-seqcfg %s', + argstr="-seqcfg %s", mandatory=True, - xor=('run_info', 'config', 'seq_config'), - desc='specify unpacking rules based on sequence') + xor=("run_info", "config", "seq_config"), + desc="specify unpacking rules based on sequence", + ) dir_structure = traits.Enum( - 'fsfast', - 'generic', - argstr='-%s', - desc='unpack to specified directory structures') - no_info_dump = traits.Bool( - argstr='-noinfodump', desc='do not create infodump file') + "fsfast", + "generic", + argstr="-%s", + desc="unpack to specified directory structures", + ) + no_info_dump = traits.Bool(argstr="-noinfodump", desc="do not create infodump file") scan_only = File( exists=True, - argstr='-scanonly %s', - desc='only scan the directory and put result in file') - log_file = File( - exists=True, argstr='-log %s', desc='explicilty set log file') + argstr="-scanonly %s", + desc="only scan the directory and put result in file", + ) + log_file = File(exists=True, argstr="-log %s", desc="explicilty set log file") spm_zeropad = traits.Int( - argstr='-nspmzeropad %d', - desc='set frame number zero padding width for SPM') + argstr="-nspmzeropad %d", desc="set frame number zero padding width for SPM" + ) no_unpack_err = traits.Bool( - argstr='-no-unpackerr', desc='do not try to unpack runs with errors') + argstr="-no-unpackerr", desc="do not try to unpack runs with errors" + ) class UnpackSDICOMDir(FSCommand): @@ -146,272 +161,344 @@ class UnpackSDICOMDir(FSCommand): >>> unpack.cmdline 'unpacksdcmdir -generic -targ . -run 5 mprage nii struct -src .' """ - _cmd = 'unpacksdcmdir' + + _cmd = "unpacksdcmdir" input_spec = UnpackSDICOMDirInputSpec class MRIConvertInputSpec(FSTraitedSpec): - read_only = traits.Bool(argstr='--read_only', desc='read the input volume') - no_write = traits.Bool(argstr='--no_write', desc='do not write output') - in_info = traits.Bool(argstr='--in_info', desc='display input info') - out_info = traits.Bool(argstr='--out_info', desc='display output info') - in_stats = traits.Bool(argstr='--in_stats', desc='display input stats') - out_stats = traits.Bool(argstr='--out_stats', desc='display output stats') - in_matrix = traits.Bool(argstr='--in_matrix', desc='display input matrix') - out_matrix = traits.Bool( - argstr='--out_matrix', desc='display output matrix') - in_i_size = traits.Int(argstr='--in_i_size %d', desc='input i size') - in_j_size = traits.Int(argstr='--in_j_size %d', desc='input j size') - in_k_size = traits.Int(argstr='--in_k_size %d', desc='input k size') + read_only = traits.Bool(argstr="--read_only", desc="read the input volume") + no_write = traits.Bool(argstr="--no_write", desc="do not write output") + in_info = traits.Bool(argstr="--in_info", desc="display input info") + out_info = traits.Bool(argstr="--out_info", desc="display output info") + in_stats = traits.Bool(argstr="--in_stats", desc="display input stats") + out_stats = traits.Bool(argstr="--out_stats", desc="display output stats") + in_matrix = traits.Bool(argstr="--in_matrix", desc="display input matrix") + out_matrix = traits.Bool(argstr="--out_matrix", desc="display output matrix") + in_i_size = traits.Int(argstr="--in_i_size %d", desc="input i size") + in_j_size = traits.Int(argstr="--in_j_size %d", desc="input j size") + in_k_size = traits.Int(argstr="--in_k_size %d", desc="input k size") force_ras = traits.Bool( - argstr='--force_ras_good', - desc='use default when orientation info absent') + argstr="--force_ras_good", desc="use default when orientation info absent" + ) in_i_dir = traits.Tuple( traits.Float, traits.Float, traits.Float, - argstr='--in_i_direction %f %f %f', - desc=' ') + argstr="--in_i_direction %f %f %f", + desc=" ", + ) in_j_dir = traits.Tuple( traits.Float, traits.Float, traits.Float, - argstr='--in_j_direction %f %f %f', - desc=' ') + argstr="--in_j_direction %f %f %f", + desc=" ", + ) in_k_dir = traits.Tuple( traits.Float, traits.Float, traits.Float, - argstr='--in_k_direction %f %f %f', - desc=' ') + argstr="--in_k_direction %f %f %f", + desc=" ", + ) _orientations = [ - 'LAI', 'LIA', 'ALI', 'AIL', 'ILA', 'IAL', 'LAS', 'LSA', 'ALS', 'ASL', - 'SLA', 'SAL', 'LPI', 'LIP', 'PLI', 'PIL', 'ILP', 'IPL', 'LPS', 'LSP', - 'PLS', 'PSL', 'SLP', 'SPL', 'RAI', 'RIA', 'ARI', 'AIR', 'IRA', 'IAR', - 'RAS', 'RSA', 'ARS', 'ASR', 'SRA', 'SAR', 'RPI', 'RIP', 'PRI', 'PIR', - 'IRP', 'IPR', 'RPS', 'RSP', 'PRS', 'PSR', 'SRP', 'SPR' + "LAI", + "LIA", + "ALI", + "AIL", + "ILA", + "IAL", + "LAS", + "LSA", + "ALS", + "ASL", + "SLA", + "SAL", + "LPI", + "LIP", + "PLI", + "PIL", + "ILP", + "IPL", + "LPS", + "LSP", + "PLS", + "PSL", + "SLP", + "SPL", + "RAI", + "RIA", + "ARI", + "AIR", + "IRA", + "IAR", + "RAS", + "RSA", + "ARS", + "ASR", + "SRA", + "SAR", + "RPI", + "RIP", + "PRI", + "PIR", + "IRP", + "IPR", + "RPS", + "RSP", + "PRS", + "PSR", + "SRP", + "SPR", ] # _orientations = [comb for comb in itertools.chain(*[[''.join(c) for c in itertools.permutations(s)] for s in [a+b+c for a in 'LR' for b in 'AP' for c in 'IS']])] in_orientation = traits.Enum( _orientations, - argstr='--in_orientation %s', - desc='specify the input orientation') + argstr="--in_orientation %s", + desc="specify the input orientation", + ) in_center = traits.List( traits.Float, maxlen=3, - argstr='--in_center %s', - desc=' ') - sphinx = traits.Bool( - argstr='--sphinx', desc='change orientation info to sphinx') + argstr="--in_center %s", + desc=" ", + ) + sphinx = traits.Bool(argstr="--sphinx", desc="change orientation info to sphinx") out_i_count = traits.Int( - argstr='--out_i_count %d', desc='some count ?? in i direction') + argstr="--out_i_count %d", desc="some count ?? in i direction" + ) out_j_count = traits.Int( - argstr='--out_j_count %d', desc='some count ?? in j direction') + argstr="--out_j_count %d", desc="some count ?? in j direction" + ) out_k_count = traits.Int( - argstr='--out_k_count %d', desc='some count ?? in k direction') + argstr="--out_k_count %d", desc="some count ?? in k direction" + ) vox_size = traits.Tuple( traits.Float, traits.Float, traits.Float, - argstr='-voxsize %f %f %f', - desc= - ' specify the size (mm) - useful for upsampling or downsampling' + argstr="-voxsize %f %f %f", + desc=" specify the size (mm) - useful for upsampling or downsampling", ) - out_i_size = traits.Int(argstr='--out_i_size %d', desc='output i size') - out_j_size = traits.Int(argstr='--out_j_size %d', desc='output j size') - out_k_size = traits.Int(argstr='--out_k_size %d', desc='output k size') + out_i_size = traits.Int(argstr="--out_i_size %d", desc="output i size") + out_j_size = traits.Int(argstr="--out_j_size %d", desc="output j size") + out_k_size = traits.Int(argstr="--out_k_size %d", desc="output k size") out_i_dir = traits.Tuple( traits.Float, traits.Float, traits.Float, - argstr='--out_i_direction %f %f %f', - desc=' ') + argstr="--out_i_direction %f %f %f", + desc=" ", + ) out_j_dir = traits.Tuple( traits.Float, traits.Float, traits.Float, - argstr='--out_j_direction %f %f %f', - desc=' ') + argstr="--out_j_direction %f %f %f", + desc=" ", + ) out_k_dir = traits.Tuple( traits.Float, traits.Float, traits.Float, - argstr='--out_k_direction %f %f %f', - desc=' ') + argstr="--out_k_direction %f %f %f", + desc=" ", + ) out_orientation = traits.Enum( _orientations, - argstr='--out_orientation %s', - desc='specify the output orientation') + argstr="--out_orientation %s", + desc="specify the output orientation", + ) out_center = traits.Tuple( traits.Float, traits.Float, traits.Float, - argstr='--out_center %f %f %f', - desc=' ') + argstr="--out_center %f %f %f", + desc=" ", + ) out_datatype = traits.Enum( - 'uchar', - 'short', - 'int', - 'float', - argstr='--out_data_type %s', - desc='output data type ') + "uchar", + "short", + "int", + "float", + argstr="--out_data_type %s", + desc="output data type ", + ) resample_type = traits.Enum( - 'interpolate', - 'weighted', - 'nearest', - 'sinc', - 'cubic', - argstr='--resample_type %s', - desc= - ' (default is interpolate)') - no_scale = traits.Bool( - argstr='--no_scale 1', desc='dont rescale values for COR') + "interpolate", + "weighted", + "nearest", + "sinc", + "cubic", + argstr="--resample_type %s", + desc=" (default is interpolate)", + ) + no_scale = traits.Bool(argstr="--no_scale 1", desc="dont rescale values for COR") no_change = traits.Bool( - argstr='--nochange', - desc="don't change type of input to that of template") - tr = traits.Int(argstr='-tr %d', desc='TR in msec') - te = traits.Int(argstr='-te %d', desc='TE in msec') - ti = traits.Int(argstr='-ti %d', desc='TI in msec (note upper case flag)') + argstr="--nochange", desc="don't change type of input to that of template" + ) + tr = traits.Int(argstr="-tr %d", desc="TR in msec") + te = traits.Int(argstr="-te %d", desc="TE in msec") + ti = traits.Int(argstr="-ti %d", desc="TI in msec (note upper case flag)") autoalign_matrix = File( - exists=True, - argstr='--autoalign %s', - desc='text file with autoalign matrix') + exists=True, argstr="--autoalign %s", desc="text file with autoalign matrix" + ) unwarp_gradient = traits.Bool( - argstr='--unwarp_gradient_nonlinearity', - desc='unwarp gradient nonlinearity') + argstr="--unwarp_gradient_nonlinearity", desc="unwarp gradient nonlinearity" + ) apply_transform = File( - exists=True, argstr='--apply_transform %s', desc='apply xfm file') + exists=True, argstr="--apply_transform %s", desc="apply xfm file" + ) apply_inv_transform = File( exists=True, - argstr='--apply_inverse_transform %s', - desc='apply inverse transformation xfm file') - devolve_transform = traits.Str(argstr='--devolvexfm %s', desc='subject id') + argstr="--apply_inverse_transform %s", + desc="apply inverse transformation xfm file", + ) + devolve_transform = traits.Str(argstr="--devolvexfm %s", desc="subject id") crop_center = traits.Tuple( traits.Int, traits.Int, traits.Int, - argstr='--crop %d %d %d', - desc=' crop to 256 around center (x, y, z)') + argstr="--crop %d %d %d", + desc=" crop to 256 around center (x, y, z)", + ) crop_size = traits.Tuple( traits.Int, traits.Int, traits.Int, - argstr='--cropsize %d %d %d', - desc=' crop to size ') + argstr="--cropsize %d %d %d", + desc=" crop to size ", + ) cut_ends = traits.Int( - argstr='--cutends %d', desc='remove ncut slices from the ends') + argstr="--cutends %d", desc="remove ncut slices from the ends" + ) slice_crop = traits.Tuple( traits.Int, traits.Int, - argstr='--slice-crop %d %d', - desc='s_start s_end : keep slices s_start to s_end') + argstr="--slice-crop %d %d", + desc="s_start s_end : keep slices s_start to s_end", + ) slice_reverse = traits.Bool( - argstr='--slice-reverse', - desc='reverse order of slices, update vox2ras') + argstr="--slice-reverse", desc="reverse order of slices, update vox2ras" + ) slice_bias = traits.Float( - argstr='--slice-bias %f', desc='apply half-cosine bias field') - fwhm = traits.Float( - argstr='--fwhm %f', desc='smooth input volume by fwhm mm') + argstr="--slice-bias %f", desc="apply half-cosine bias field" + ) + fwhm = traits.Float(argstr="--fwhm %f", desc="smooth input volume by fwhm mm") _filetypes = [ - 'cor', 'mgh', 'mgz', 'minc', 'analyze', 'analyze4d', 'spm', 'afni', - 'brik', 'bshort', 'bfloat', 'sdt', 'outline', 'otl', 'gdf', 'nifti1', - 'nii', 'niigz' - ] - _infiletypes = [ - 'ge', 'gelx', 'lx', 'ximg', 'siemens', 'dicom', 'siemens_dicom' + "cor", + "mgh", + "mgz", + "minc", + "analyze", + "analyze4d", + "spm", + "afni", + "brik", + "bshort", + "bfloat", + "sdt", + "outline", + "otl", + "gdf", + "nifti1", + "nii", + "niigz", ] + _infiletypes = ["ge", "gelx", "lx", "ximg", "siemens", "dicom", "siemens_dicom"] in_type = traits.Enum( - _filetypes + _infiletypes, - argstr='--in_type %s', - desc='input file type') - out_type = traits.Enum( - _filetypes, argstr='--out_type %s', desc='output file type') + _filetypes + _infiletypes, argstr="--in_type %s", desc="input file type" + ) + out_type = traits.Enum(_filetypes, argstr="--out_type %s", desc="output file type") ascii = traits.Bool( - argstr='--ascii', desc='save output as ascii col>row>slice>frame') + argstr="--ascii", desc="save output as ascii col>row>slice>frame" + ) reorder = traits.Tuple( traits.Int, traits.Int, traits.Int, - argstr='--reorder %d %d %d', - desc='olddim1 olddim2 olddim3') + argstr="--reorder %d %d %d", + desc="olddim1 olddim2 olddim3", + ) invert_contrast = traits.Float( - argstr='--invert_contrast %f', - desc='threshold for inversting contrast') + argstr="--invert_contrast %f", desc="threshold for inversting contrast" + ) in_file = File( exists=True, mandatory=True, position=-2, - argstr='--input_volume %s', - desc='File to read/convert') + argstr="--input_volume %s", + desc="File to read/convert", + ) out_file = File( - argstr='--output_volume %s', + argstr="--output_volume %s", position=-1, genfile=True, - desc='output filename or True to generate one') + desc="output filename or True to generate one", + ) conform = traits.Bool( - argstr='--conform', - desc= - 'conform to 1mm voxel size in coronal slice direction with 256^3 or more' + argstr="--conform", + desc="conform to 1mm voxel size in coronal slice direction with 256^3 or more", ) - conform_min = traits.Bool( - argstr='--conform_min', desc='conform to smallest size') + conform_min = traits.Bool(argstr="--conform_min", desc="conform to smallest size") conform_size = traits.Float( - argstr='--conform_size %s', desc='conform to size_in_mm') - cw256 = traits.Bool( - argstr='--cw256', desc='confrom to dimensions of 256^3') - parse_only = traits.Bool(argstr='--parse_only', desc='parse input only') - subject_name = traits.Str( - argstr='--subject_name %s', desc='subject name ???') + argstr="--conform_size %s", desc="conform to size_in_mm" + ) + cw256 = traits.Bool(argstr="--cw256", desc="confrom to dimensions of 256^3") + parse_only = traits.Bool(argstr="--parse_only", desc="parse input only") + subject_name = traits.Str(argstr="--subject_name %s", desc="subject name ???") reslice_like = File( - exists=True, - argstr='--reslice_like %s', - desc='reslice output to match file') + exists=True, argstr="--reslice_like %s", desc="reslice output to match file" + ) template_type = traits.Enum( _filetypes + _infiletypes, - argstr='--template_type %s', - desc='template file type') + argstr="--template_type %s", + desc="template file type", + ) split = traits.Bool( - argstr='--split', - desc='split output frames into separate output files.') - frame = traits.Int( - argstr='--frame %d', desc='keep only 0-based frame number') - midframe = traits.Bool( - argstr='--mid-frame', desc='keep only the middle frame') - skip_n = traits.Int(argstr='--nskip %d', desc='skip the first n frames') - drop_n = traits.Int(argstr='--ndrop %d', desc='drop the last n frames') + argstr="--split", desc="split output frames into separate output files." + ) + frame = traits.Int(argstr="--frame %d", desc="keep only 0-based frame number") + midframe = traits.Bool(argstr="--mid-frame", desc="keep only the middle frame") + skip_n = traits.Int(argstr="--nskip %d", desc="skip the first n frames") + drop_n = traits.Int(argstr="--ndrop %d", desc="drop the last n frames") frame_subsample = traits.Tuple( traits.Int, traits.Int, traits.Int, - argstr='--fsubsample %d %d %d', - desc='start delta end : frame subsampling (end = -1 for end)') - in_scale = traits.Float( - argstr='--scale %f', desc='input intensity scale factor') + argstr="--fsubsample %d %d %d", + desc="start delta end : frame subsampling (end = -1 for end)", + ) + in_scale = traits.Float(argstr="--scale %f", desc="input intensity scale factor") out_scale = traits.Float( - argstr='--out-scale %d', desc='output intensity scale factor') - in_like = File(exists=True, argstr='--in_like %s', desc='input looks like') + argstr="--out-scale %d", desc="output intensity scale factor" + ) + in_like = File(exists=True, argstr="--in_like %s", desc="input looks like") fill_parcellation = traits.Bool( - argstr='--fill_parcellation', desc='fill parcellation') + argstr="--fill_parcellation", desc="fill parcellation" + ) smooth_parcellation = traits.Bool( - argstr='--smooth_parcellation', desc='smooth parcellation') - zero_outlines = traits.Bool(argstr='--zero_outlines', desc='zero outlines') - color_file = File(exists=True, argstr='--color_file %s', desc='color file') - no_translate = traits.Bool(argstr='--no_translate', desc='???') - status_file = File( - argstr='--status %s', desc='status file for DICOM conversion') + argstr="--smooth_parcellation", desc="smooth parcellation" + ) + zero_outlines = traits.Bool(argstr="--zero_outlines", desc="zero outlines") + color_file = File(exists=True, argstr="--color_file %s", desc="color file") + no_translate = traits.Bool(argstr="--no_translate", desc="???") + status_file = File(argstr="--status %s", desc="status file for DICOM conversion") sdcm_list = File( - exists=True, - argstr='--sdcmlist %s', - desc='list of DICOM files for conversion') + exists=True, argstr="--sdcmlist %s", desc="list of DICOM files for conversion" + ) template_info = traits.Bool( - argstr='--template_info', desc='dump info about template') - crop_gdf = traits.Bool(argstr='--crop_gdf', desc='apply GDF cropping') + argstr="--template_info", desc="dump info about template" + ) + crop_gdf = traits.Bool(argstr="--crop_gdf", desc="apply GDF cropping") zero_ge_z_offset = traits.Bool( - argstr='--zero_ge_z_offset', desc='zero ge z offset ???') + argstr="--zero_ge_z_offset", desc="zero ge z offset ???" + ) class MRIConvertOutputSpec(TraitedSpec): - out_file = OutputMultiPath(File(exists=True), desc='converted output file') + out_file = OutputMultiPath(File(exists=True), desc="converted output file") class MRIConvert(FSCommand): @@ -431,44 +518,44 @@ class MRIConvert(FSCommand): 'mri_convert --out_type mgz --input_volume structural.nii --output_volume outfile.mgz' """ - _cmd = 'mri_convert' + + _cmd = "mri_convert" input_spec = MRIConvertInputSpec output_spec = MRIConvertOutputSpec filemap = dict( - cor='cor', - mgh='mgh', - mgz='mgz', - minc='mnc', - afni='brik', - brik='brik', - bshort='bshort', - spm='img', - analyze='img', - analyze4d='img', - bfloat='bfloat', - nifti1='img', - nii='nii', - niigz='nii.gz') + cor="cor", + mgh="mgh", + mgz="mgz", + minc="mnc", + afni="brik", + brik="brik", + bshort="bshort", + spm="img", + analyze="img", + analyze4d="img", + bfloat="bfloat", + nifti1="img", + nii="nii", + niigz="nii.gz", + ) def _format_arg(self, name, spec, value): - if name in ['in_type', 'out_type', 'template_type']: - if value == 'niigz': - return spec.argstr % 'nii' + if name in ["in_type", "out_type", "template_type"]: + if value == "niigz": + return spec.argstr % "nii" return super(MRIConvert, self)._format_arg(name, spec, value) def _get_outfilename(self): outfile = self.inputs.out_file if not isdefined(outfile): if isdefined(self.inputs.out_type): - suffix = '_out.' + self.filemap[self.inputs.out_type] + suffix = "_out." + self.filemap[self.inputs.out_type] else: - suffix = '_out.nii.gz' + suffix = "_out.nii.gz" outfile = fname_presuffix( - self.inputs.in_file, - newpath=os.getcwd(), - suffix=suffix, - use_ext=False) + self.inputs.in_file, newpath=os.getcwd(), suffix=suffix, use_ext=False + ) return os.path.abspath(outfile) def _list_outputs(self): @@ -480,20 +567,20 @@ def _list_outputs(self): tp = 1 else: tp = size[-1] - if outfile.endswith('.mgz'): - stem = outfile.split('.mgz')[0] - ext = '.mgz' - elif outfile.endswith('.nii.gz'): - stem = outfile.split('.nii.gz')[0] - ext = '.nii.gz' + if outfile.endswith(".mgz"): + stem = outfile.split(".mgz")[0] + ext = ".mgz" + elif outfile.endswith(".nii.gz"): + stem = outfile.split(".nii.gz")[0] + ext = ".nii.gz" else: - stem = '.'.join(outfile.split('.')[:-1]) - ext = '.' + outfile.split('.')[-1] + stem = ".".join(outfile.split(".")[:-1]) + ext = "." + outfile.split(".")[-1] outfile = [] for idx in range(0, tp): - outfile.append(stem + '%04d' % idx + ext) + outfile.append(stem + "%04d" % idx + ext) if isdefined(self.inputs.out_type): - if self.inputs.out_type in ['spm', 'analyze']: + if self.inputs.out_type in ["spm", "analyze"]: # generate all outputs size = load(self.inputs.in_file).shape if len(size) == 3: @@ -502,19 +589,18 @@ def _list_outputs(self): tp = size[-1] # have to take care of all the frame manipulations raise Exception( - 'Not taking frame manipulations into account- please warn the developers' + "Not taking frame manipulations into account- please warn the developers" ) outfiles = [] outfile = self._get_outfilename() for i in range(tp): - outfiles.append( - fname_presuffix(outfile, suffix='%03d' % (i + 1))) + outfiles.append(fname_presuffix(outfile, suffix="%03d" % (i + 1))) outfile = outfiles - outputs['out_file'] = outfile + outputs["out_file"] = outfile return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._get_outfilename() return None @@ -523,31 +609,36 @@ class DICOMConvertInputSpec(FSTraitedSpec): dicom_dir = Directory( exists=True, mandatory=True, - desc='dicom directory from which to convert dicom files') + desc="dicom directory from which to convert dicom files", + ) base_output_dir = Directory( - mandatory=True, - desc='directory in which subject directories are created') + mandatory=True, desc="directory in which subject directories are created" + ) subject_dir_template = traits.Str( - 'S.%04d', usedefault=True, desc='template for subject directory name') - subject_id = traits.Any(desc='subject identifier to insert into template') + "S.%04d", usedefault=True, desc="template for subject directory name" + ) + subject_id = traits.Any(desc="subject identifier to insert into template") file_mapping = traits.List( traits.Tuple(traits.Str, traits.Str), - desc='defines the output fields of interface') + desc="defines the output fields of interface", + ) out_type = traits.Enum( - 'niigz', + "niigz", MRIConvertInputSpec._filetypes, usedefault=True, - desc='defines the type of output file produced') + desc="defines the type of output file produced", + ) dicom_info = File( - exists=True, - desc='File containing summary information from mri_parse_sdcmdir') + exists=True, desc="File containing summary information from mri_parse_sdcmdir" + ) seq_list = traits.List( traits.Str, - requires=['dicom_info'], - desc='list of pulse sequence names to be converted.') + requires=["dicom_info"], + desc="list of pulse sequence names to be converted.", + ) ignore_single_slice = traits.Bool( - requires=['dicom_info'], - desc='ignore volumes containing a single slice') + requires=["dicom_info"], desc="ignore volumes containing a single slice" + ) class DICOMConvert(FSCommand): @@ -562,27 +653,27 @@ class DICOMConvert(FSCommand): >>> cvt.inputs.file_mapping = [('nifti', '*.nii'), ('info', 'dicom*.txt'), ('dti', '*dti.bv*')] """ - _cmd = 'mri_convert' + + _cmd = "mri_convert" input_spec = DICOMConvertInputSpec def _get_dicomfiles(self): """validate fsl bet options if set to None ignore """ - return glob( - os.path.abspath(os.path.join(self.inputs.dicom_dir, '*-1.dcm'))) + return glob(os.path.abspath(os.path.join(self.inputs.dicom_dir, "*-1.dcm"))) def _get_outdir(self): """returns output directory""" subjid = self.inputs.subject_id if not isdefined(subjid): path, fname = os.path.split(self._get_dicomfiles()[0]) - subjid = int(fname.split('-')[0]) + subjid = int(fname.split("-")[0]) if isdefined(self.inputs.subject_dir_template): subjid = self.inputs.subject_dir_template % subjid basedir = self.inputs.base_output_dir if not isdefined(basedir): - basedir = os.path.abspath('.') + basedir = os.path.abspath(".") outdir = os.path.abspath(os.path.join(basedir, subjid)) return outdir @@ -598,11 +689,11 @@ def _get_runs(self): if self.inputs.seq_list: if self.inputs.ignore_single_slice: if (int(s[8]) > 1) and any( - [s[12].startswith(sn) for sn in self.inputs.seq_list]): + [s[12].startswith(sn) for sn in self.inputs.seq_list] + ): runs.append(int(s[2])) else: - if any( - [s[12].startswith(sn) for sn in self.inputs.seq_list]): + if any([s[12].startswith(sn) for sn in self.inputs.seq_list]): runs.append(int(s[2])) else: runs.append(int(s[2])) @@ -614,11 +705,12 @@ def _get_filelist(self, outdir): for f in self._get_dicomfiles(): head, fname = os.path.split(f) fname, ext = os.path.splitext(fname) - fileparts = fname.split('-') + fileparts = fname.split("-") runno = int(fileparts[1]) out_type = MRIConvert.filemap[self.inputs.out_type] - outfile = os.path.join(outdir, '.'.join( - ('%s-%02d' % (fileparts[0], runno), out_type))) + outfile = os.path.join( + outdir, ".".join(("%s-%02d" % (fileparts[0], runno), out_type)) + ) filemap[runno] = (f, outfile) if self.inputs.dicom_info: files = [filemap[r] for r in self._get_runs()] @@ -634,43 +726,48 @@ def cmdline(self): outdir = self._get_outdir() cmd = [] if not os.path.exists(outdir): - cmdstr = 'python -c "import os; os.makedirs(\'%s\')"' % outdir + cmdstr = "python -c \"import os; os.makedirs('%s')\"" % outdir cmd.extend([cmdstr]) - infofile = os.path.join(outdir, 'shortinfo.txt') + infofile = os.path.join(outdir, "shortinfo.txt") if not os.path.exists(infofile): - cmdstr = 'dcmdir-info-mgh %s > %s' % (self.inputs.dicom_dir, - infofile) + cmdstr = "dcmdir-info-mgh %s > %s" % (self.inputs.dicom_dir, infofile) cmd.extend([cmdstr]) files = self._get_filelist(outdir) for infile, outfile in files: if not os.path.exists(outfile): - single_cmd = '%s%s %s %s' % (self._cmd_prefix, self.cmd, - infile, os.path.join(outdir, - outfile)) + single_cmd = "%s%s %s %s" % ( + self._cmd_prefix, + self.cmd, + infile, + os.path.join(outdir, outfile), + ) cmd.extend([single_cmd]) - return '; '.join(cmd) + return "; ".join(cmd) class ResampleInputSpec(FSTraitedSpec): in_file = File( exists=True, - argstr='-i %s', + argstr="-i %s", mandatory=True, - desc='file to resample', - position=-2) + desc="file to resample", + position=-2, + ) resampled_file = File( - argstr='-o %s', desc='output filename', genfile=True, position=-1) + argstr="-o %s", desc="output filename", genfile=True, position=-1 + ) voxel_size = traits.Tuple( traits.Float, traits.Float, traits.Float, - argstr='-vs %.2f %.2f %.2f', - desc='triplet of output voxel sizes', - mandatory=True) + argstr="-vs %.2f %.2f %.2f", + desc="triplet of output voxel sizes", + mandatory=True, + ) class ResampleOutputSpec(TraitedSpec): - resampled_file = File(exists=True, desc='output filename') + resampled_file = File(exists=True, desc="output filename") class Resample(FSCommand): @@ -689,7 +786,7 @@ class Resample(FSCommand): """ - _cmd = 'mri_convert' + _cmd = "mri_convert" input_spec = ResampleInputSpec output_spec = ResampleOutputSpec @@ -698,179 +795,212 @@ def _get_outfilename(self): outfile = self.inputs.resampled_file else: outfile = fname_presuffix( - self.inputs.in_file, newpath=os.getcwd(), suffix='_resample') + self.inputs.in_file, newpath=os.getcwd(), suffix="_resample" + ) return outfile def _list_outputs(self): outputs = self.output_spec().get() - outputs['resampled_file'] = self._get_outfilename() + outputs["resampled_file"] = self._get_outfilename() return outputs def _gen_filename(self, name): - if name == 'resampled_file': + if name == "resampled_file": return self._get_outfilename() return None class ReconAllInputSpec(CommandLineInputSpec): subject_id = traits.Str( - "recon_all", argstr='-subjid %s', desc='subject name', usedefault=True) + "recon_all", argstr="-subjid %s", desc="subject name", usedefault=True + ) directive = traits.Enum( - 'all', - 'autorecon1', + "all", + "autorecon1", # autorecon2 variants - 'autorecon2', - 'autorecon2-volonly', - 'autorecon2-perhemi', - 'autorecon2-inflate1', - 'autorecon2-cp', - 'autorecon2-wm', + "autorecon2", + "autorecon2-volonly", + "autorecon2-perhemi", + "autorecon2-inflate1", + "autorecon2-cp", + "autorecon2-wm", # autorecon3 variants - 'autorecon3', - 'autorecon3-T2pial', + "autorecon3", + "autorecon3-T2pial", # Mix of autorecon2 and autorecon3 steps - 'autorecon-pial', - 'autorecon-hemi', + "autorecon-pial", + "autorecon-hemi", # Not "multi-stage flags" - 'localGI', - 'qcache', - argstr='-%s', - desc='process directive', + "localGI", + "qcache", + argstr="-%s", + desc="process directive", usedefault=True, - position=0) - hemi = traits.Enum( - 'lh', 'rh', desc='hemisphere to process', argstr="-hemi %s") + position=0, + ) + hemi = traits.Enum("lh", "rh", desc="hemisphere to process", argstr="-hemi %s") T1_files = InputMultiPath( - File(exists=True), - argstr='-i %s...', - desc='name of T1 file to process') + File(exists=True), argstr="-i %s...", desc="name of T1 file to process" + ) T2_file = File( exists=True, argstr="-T2 %s", - min_ver='5.3.0', - desc='Convert T2 image to orig directory') + min_ver="5.3.0", + desc="Convert T2 image to orig directory", + ) FLAIR_file = File( exists=True, argstr="-FLAIR %s", - min_ver='5.3.0', - desc='Convert FLAIR image to orig directory') + min_ver="5.3.0", + desc="Convert FLAIR image to orig directory", + ) use_T2 = traits.Bool( argstr="-T2pial", - min_ver='5.3.0', - xor=['use_FLAIR'], - desc='Use T2 image to refine the pial surface') + min_ver="5.3.0", + xor=["use_FLAIR"], + desc="Use T2 image to refine the pial surface", + ) use_FLAIR = traits.Bool( argstr="-FLAIRpial", - min_ver='5.3.0', - xor=['use_T2'], - desc='Use FLAIR image to refine the pial surface') + min_ver="5.3.0", + xor=["use_T2"], + desc="Use FLAIR image to refine the pial surface", + ) openmp = traits.Int( - argstr="-openmp %d", desc="Number of processors to use in parallel") - parallel = traits.Bool( - argstr="-parallel", desc="Enable parallel execution") + argstr="-openmp %d", desc="Number of processors to use in parallel" + ) + parallel = traits.Bool(argstr="-parallel", desc="Enable parallel execution") hires = traits.Bool( argstr="-hires", - min_ver='6.0.0', - desc="Conform to minimum voxel size (for voxels < 1mm)") + min_ver="6.0.0", + desc="Conform to minimum voxel size (for voxels < 1mm)", + ) mprage = traits.Bool( - argstr='-mprage', - desc=('Assume scan parameters are MGH MP-RAGE ' - 'protocol, which produces darker gray matter')) + argstr="-mprage", + desc=( + "Assume scan parameters are MGH MP-RAGE " + "protocol, which produces darker gray matter" + ), + ) big_ventricles = traits.Bool( - argstr='-bigventricles', - desc=('For use in subjects with enlarged ' - 'ventricles')) + argstr="-bigventricles", + desc=("For use in subjects with enlarged " "ventricles"), + ) brainstem = traits.Bool( - argstr='-brainstem-structures', desc='Segment brainstem structures') + argstr="-brainstem-structures", desc="Segment brainstem structures" + ) hippocampal_subfields_T1 = traits.Bool( - argstr='-hippocampal-subfields-T1', - min_ver='6.0.0', - desc='segment hippocampal subfields using input T1 scan') + argstr="-hippocampal-subfields-T1", + min_ver="6.0.0", + desc="segment hippocampal subfields using input T1 scan", + ) hippocampal_subfields_T2 = traits.Tuple( File(exists=True), traits.Str(), - argstr='-hippocampal-subfields-T2 %s %s', - min_ver='6.0.0', - desc=('segment hippocampal subfields using T2 scan, identified by ' - 'ID (may be combined with hippocampal_subfields_T1)')) + argstr="-hippocampal-subfields-T2 %s %s", + min_ver="6.0.0", + desc=( + "segment hippocampal subfields using T2 scan, identified by " + "ID (may be combined with hippocampal_subfields_T1)" + ), + ) expert = File( - exists=True, - argstr='-expert %s', - desc="Set parameters using expert file") + exists=True, argstr="-expert %s", desc="Set parameters using expert file" + ) xopts = traits.Enum( "use", "clean", "overwrite", - argstr='-xopts-%s', - desc="Use, delete or overwrite existing expert options file") + argstr="-xopts-%s", + desc="Use, delete or overwrite existing expert options file", + ) subjects_dir = Directory( exists=True, - argstr='-sd %s', + argstr="-sd %s", hash_files=False, - desc='path to subjects directory', - genfile=True) - flags = InputMultiPath( - traits.Str, argstr='%s', desc='additional parameters') + desc="path to subjects directory", + genfile=True, + ) + flags = InputMultiPath(traits.Str, argstr="%s", desc="additional parameters") # Expert options - talairach = traits.Str( - desc="Flags to pass to talairach commands", xor=['expert']) + talairach = traits.Str(desc="Flags to pass to talairach commands", xor=["expert"]) mri_normalize = traits.Str( - desc="Flags to pass to mri_normalize commands", xor=['expert']) + desc="Flags to pass to mri_normalize commands", xor=["expert"] + ) mri_watershed = traits.Str( - desc="Flags to pass to mri_watershed commands", xor=['expert']) + desc="Flags to pass to mri_watershed commands", xor=["expert"] + ) mri_em_register = traits.Str( - desc="Flags to pass to mri_em_register commands", xor=['expert']) + desc="Flags to pass to mri_em_register commands", xor=["expert"] + ) mri_ca_normalize = traits.Str( - desc="Flags to pass to mri_ca_normalize commands", xor=['expert']) + desc="Flags to pass to mri_ca_normalize commands", xor=["expert"] + ) mri_ca_register = traits.Str( - desc="Flags to pass to mri_ca_register commands", xor=['expert']) + desc="Flags to pass to mri_ca_register commands", xor=["expert"] + ) mri_remove_neck = traits.Str( - desc="Flags to pass to mri_remove_neck commands", xor=['expert']) + desc="Flags to pass to mri_remove_neck commands", xor=["expert"] + ) mri_ca_label = traits.Str( - desc="Flags to pass to mri_ca_label commands", xor=['expert']) + desc="Flags to pass to mri_ca_label commands", xor=["expert"] + ) mri_segstats = traits.Str( - desc="Flags to pass to mri_segstats commands", xor=['expert']) - mri_mask = traits.Str( - desc="Flags to pass to mri_mask commands", xor=['expert']) + desc="Flags to pass to mri_segstats commands", xor=["expert"] + ) + mri_mask = traits.Str(desc="Flags to pass to mri_mask commands", xor=["expert"]) mri_segment = traits.Str( - desc="Flags to pass to mri_segment commands", xor=['expert']) + desc="Flags to pass to mri_segment commands", xor=["expert"] + ) mri_edit_wm_with_aseg = traits.Str( - desc="Flags to pass to mri_edit_wm_with_aseg commands", xor=['expert']) + desc="Flags to pass to mri_edit_wm_with_aseg commands", xor=["expert"] + ) mri_pretess = traits.Str( - desc="Flags to pass to mri_pretess commands", xor=['expert']) - mri_fill = traits.Str( - desc="Flags to pass to mri_fill commands", xor=['expert']) + desc="Flags to pass to mri_pretess commands", xor=["expert"] + ) + mri_fill = traits.Str(desc="Flags to pass to mri_fill commands", xor=["expert"]) mri_tessellate = traits.Str( - desc="Flags to pass to mri_tessellate commands", xor=['expert']) + desc="Flags to pass to mri_tessellate commands", xor=["expert"] + ) mris_smooth = traits.Str( - desc="Flags to pass to mri_smooth commands", xor=['expert']) + desc="Flags to pass to mri_smooth commands", xor=["expert"] + ) mris_inflate = traits.Str( - desc="Flags to pass to mri_inflate commands", xor=['expert']) + desc="Flags to pass to mri_inflate commands", xor=["expert"] + ) mris_sphere = traits.Str( - desc="Flags to pass to mris_sphere commands", xor=['expert']) + desc="Flags to pass to mris_sphere commands", xor=["expert"] + ) mris_fix_topology = traits.Str( - desc="Flags to pass to mris_fix_topology commands", xor=['expert']) + desc="Flags to pass to mris_fix_topology commands", xor=["expert"] + ) mris_make_surfaces = traits.Str( - desc="Flags to pass to mris_make_surfaces commands", xor=['expert']) + desc="Flags to pass to mris_make_surfaces commands", xor=["expert"] + ) mris_surf2vol = traits.Str( - desc="Flags to pass to mris_surf2vol commands", xor=['expert']) + desc="Flags to pass to mris_surf2vol commands", xor=["expert"] + ) mris_register = traits.Str( - desc="Flags to pass to mris_register commands", xor=['expert']) + desc="Flags to pass to mris_register commands", xor=["expert"] + ) mrisp_paint = traits.Str( - desc="Flags to pass to mrisp_paint commands", xor=['expert']) + desc="Flags to pass to mrisp_paint commands", xor=["expert"] + ) mris_ca_label = traits.Str( - desc="Flags to pass to mris_ca_label commands", xor=['expert']) + desc="Flags to pass to mris_ca_label commands", xor=["expert"] + ) mris_anatomical_stats = traits.Str( - desc="Flags to pass to mris_anatomical_stats commands", xor=['expert']) + desc="Flags to pass to mris_anatomical_stats commands", xor=["expert"] + ) mri_aparc2aseg = traits.Str( - desc="Flags to pass to mri_aparc2aseg commands", xor=['expert']) + desc="Flags to pass to mri_aparc2aseg commands", xor=["expert"] + ) class ReconAllOutputSpec(FreeSurferSource.output_spec): - subjects_dir = Directory( - exists=True, desc='Freesurfer subjects directory.') - subject_id = traits.Str(desc='Subject name for whom to retrieve data') + subjects_dir = Directory(exists=True, desc="Freesurfer subjects directory.") + subject_id = traits.Str(desc="Subject name for whom to retrieve data") class ReconAll(CommandLine): @@ -928,8 +1058,8 @@ class ReconAll(CommandLine): 'recon-all -all -i structural.nii -hippocampal-subfields-T2 structural.nii test -subjid foo -sd .' """ - _cmd = 'recon-all' - _additional_metadata = ['loc', 'altkey'] + _cmd = "recon-all" + _additional_metadata = ["loc", "altkey"] input_spec = ReconAllInputSpec output_spec = ReconAllOutputSpec _can_resume = True @@ -948,214 +1078,309 @@ class ReconAll(CommandLine): # [0] https://surfer.nmr.mgh.harvard.edu/fswiki/ReconAllTableStableV5.3 # [1] https://surfer.nmr.mgh.harvard.edu/fswiki/ReconAllTableStableV6.0 _autorecon1_steps = [ - ('motioncor', ['mri/rawavg.mgz', 'mri/orig.mgz'], []), + ("motioncor", ["mri/rawavg.mgz", "mri/orig.mgz"], []), ( - 'talairach', + "talairach", [ - 'mri/orig_nu.mgz', - 'mri/transforms/talairach.auto.xfm', - 'mri/transforms/talairach.xfm', + "mri/orig_nu.mgz", + "mri/transforms/talairach.auto.xfm", + "mri/transforms/talairach.xfm", # 'mri/transforms/talairach_avi.log', ], - []), - ('nuintensitycor', ['mri/nu.mgz'], []), - ('normalization', ['mri/T1.mgz'], []), - ('skullstrip', [ - 'mri/transforms/talairach_with_skull.lta', - 'mri/brainmask.auto.mgz', 'mri/brainmask.mgz' - ], []), + [], + ), + ("nuintensitycor", ["mri/nu.mgz"], []), + ("normalization", ["mri/T1.mgz"], []), + ( + "skullstrip", + [ + "mri/transforms/talairach_with_skull.lta", + "mri/brainmask.auto.mgz", + "mri/brainmask.mgz", + ], + [], + ), ] if Info.looseversion() < LooseVersion("6.0.0"): _autorecon2_volonly_steps = [ - ('gcareg', ['mri/transforms/talairach.lta'], []), - ('canorm', ['mri/norm.mgz'], []), - ('careg', ['mri/transforms/talairach.m3z'], []), - ('careginv', [ - 'mri/transforms/talairach.m3z.inv.x.mgz', - 'mri/transforms/talairach.m3z.inv.y.mgz', - 'mri/transforms/talairach.m3z.inv.z.mgz', - ], []), - ('rmneck', ['mri/nu_noneck.mgz'], []), - ('skull-lta', ['mri/transforms/talairach_with_skull_2.lta'], []), - ('calabel', [ - 'mri/aseg.auto_noCCseg.mgz', 'mri/aseg.auto.mgz', - 'mri/aseg.mgz' - ], []), - ('normalization2', ['mri/brain.mgz'], []), - ('maskbfs', ['mri/brain.finalsurfs.mgz'], []), - ('segmentation', - ['mri/wm.seg.mgz', 'mri/wm.asegedit.mgz', 'mri/wm.mgz'], []), + ("gcareg", ["mri/transforms/talairach.lta"], []), + ("canorm", ["mri/norm.mgz"], []), + ("careg", ["mri/transforms/talairach.m3z"], []), + ( + "careginv", + [ + "mri/transforms/talairach.m3z.inv.x.mgz", + "mri/transforms/talairach.m3z.inv.y.mgz", + "mri/transforms/talairach.m3z.inv.z.mgz", + ], + [], + ), + ("rmneck", ["mri/nu_noneck.mgz"], []), + ("skull-lta", ["mri/transforms/talairach_with_skull_2.lta"], []), + ( + "calabel", + ["mri/aseg.auto_noCCseg.mgz", "mri/aseg.auto.mgz", "mri/aseg.mgz"], + [], + ), + ("normalization2", ["mri/brain.mgz"], []), + ("maskbfs", ["mri/brain.finalsurfs.mgz"], []), + ( + "segmentation", + ["mri/wm.seg.mgz", "mri/wm.asegedit.mgz", "mri/wm.mgz"], + [], + ), ( - 'fill', + "fill", [ - 'mri/filled.mgz', + "mri/filled.mgz", # 'scripts/ponscc.cut.log', ], - []), + [], + ), ] _autorecon2_lh_steps = [ - ('tessellate', ['surf/lh.orig.nofix'], []), - ('smooth1', ['surf/lh.smoothwm.nofix'], []), - ('inflate1', ['surf/lh.inflated.nofix'], []), - ('qsphere', ['surf/lh.qsphere.nofix'], []), - ('fix', ['surf/lh.orig'], []), - ('white', [ - 'surf/lh.white', 'surf/lh.curv', 'surf/lh.area', - 'label/lh.cortex.label' - ], []), - ('smooth2', ['surf/lh.smoothwm'], []), - ('inflate2', [ - 'surf/lh.inflated', 'surf/lh.sulc', 'surf/lh.inflated.H', - 'surf/lh.inflated.K' - ], []), + ("tessellate", ["surf/lh.orig.nofix"], []), + ("smooth1", ["surf/lh.smoothwm.nofix"], []), + ("inflate1", ["surf/lh.inflated.nofix"], []), + ("qsphere", ["surf/lh.qsphere.nofix"], []), + ("fix", ["surf/lh.orig"], []), + ( + "white", + [ + "surf/lh.white", + "surf/lh.curv", + "surf/lh.area", + "label/lh.cortex.label", + ], + [], + ), + ("smooth2", ["surf/lh.smoothwm"], []), + ( + "inflate2", + [ + "surf/lh.inflated", + "surf/lh.sulc", + "surf/lh.inflated.H", + "surf/lh.inflated.K", + ], + [], + ), # Undocumented in ReconAllTableStableV5.3 - ('curvstats', ['stats/lh.curv.stats'], []), + ("curvstats", ["stats/lh.curv.stats"], []), ] _autorecon3_lh_steps = [ - ('sphere', ['surf/lh.sphere'], []), - ('surfreg', ['surf/lh.sphere.reg'], []), - ('jacobian_white', ['surf/lh.jacobian_white'], []), - ('avgcurv', ['surf/lh.avg_curv'], []), - ('cortparc', ['label/lh.aparc.annot'], []), - ('pial', [ - 'surf/lh.pial', 'surf/lh.curv.pial', 'surf/lh.area.pial', - 'surf/lh.thickness' - ], []), + ("sphere", ["surf/lh.sphere"], []), + ("surfreg", ["surf/lh.sphere.reg"], []), + ("jacobian_white", ["surf/lh.jacobian_white"], []), + ("avgcurv", ["surf/lh.avg_curv"], []), + ("cortparc", ["label/lh.aparc.annot"], []), + ( + "pial", + [ + "surf/lh.pial", + "surf/lh.curv.pial", + "surf/lh.area.pial", + "surf/lh.thickness", + ], + [], + ), # Misnamed outputs in ReconAllTableStableV5.3: ?h.w-c.pct.mgz - ('pctsurfcon', ['surf/lh.w-g.pct.mgh'], []), - ('parcstats', ['stats/lh.aparc.stats'], []), - ('cortparc2', ['label/lh.aparc.a2009s.annot'], []), - ('parcstats2', ['stats/lh.aparc.a2009s.stats'], []), + ("pctsurfcon", ["surf/lh.w-g.pct.mgh"], []), + ("parcstats", ["stats/lh.aparc.stats"], []), + ("cortparc2", ["label/lh.aparc.a2009s.annot"], []), + ("parcstats2", ["stats/lh.aparc.a2009s.stats"], []), # Undocumented in ReconAllTableStableV5.3 - ('cortparc3', ['label/lh.aparc.DKTatlas40.annot'], []), + ("cortparc3", ["label/lh.aparc.DKTatlas40.annot"], []), # Undocumented in ReconAllTableStableV5.3 - ('parcstats3', ['stats/lh.aparc.a2009s.stats'], []), - ('label-exvivo-ec', ['label/lh.entorhinal_exvivo.label'], []), + ("parcstats3", ["stats/lh.aparc.a2009s.stats"], []), + ("label-exvivo-ec", ["label/lh.entorhinal_exvivo.label"], []), ] _autorecon3_added_steps = [ - ('cortribbon', - ['mri/lh.ribbon.mgz', 'mri/rh.ribbon.mgz', 'mri/ribbon.mgz'], []), - ('segstats', ['stats/aseg.stats'], []), - ('aparc2aseg', ['mri/aparc+aseg.mgz', 'mri/aparc.a2009s+aseg.mgz'], - []), - ('wmparc', ['mri/wmparc.mgz', 'stats/wmparc.stats'], []), - ('balabels', ['label/BA.ctab', 'label/BA.thresh.ctab'], []), + ( + "cortribbon", + ["mri/lh.ribbon.mgz", "mri/rh.ribbon.mgz", "mri/ribbon.mgz"], + [], + ), + ("segstats", ["stats/aseg.stats"], []), + ("aparc2aseg", ["mri/aparc+aseg.mgz", "mri/aparc.a2009s+aseg.mgz"], []), + ("wmparc", ["mri/wmparc.mgz", "stats/wmparc.stats"], []), + ("balabels", ["label/BA.ctab", "label/BA.thresh.ctab"], []), ] else: _autorecon2_volonly_steps = [ - ('gcareg', ['mri/transforms/talairach.lta'], []), - ('canorm', ['mri/norm.mgz'], []), - ('careg', ['mri/transforms/talairach.m3z'], []), - ('calabel', [ - 'mri/aseg.auto_noCCseg.mgz', 'mri/aseg.auto.mgz', - 'mri/aseg.mgz' - ], []), - ('normalization2', ['mri/brain.mgz'], []), - ('maskbfs', ['mri/brain.finalsurfs.mgz'], []), - ('segmentation', - ['mri/wm.seg.mgz', 'mri/wm.asegedit.mgz', 'mri/wm.mgz'], []), + ("gcareg", ["mri/transforms/talairach.lta"], []), + ("canorm", ["mri/norm.mgz"], []), + ("careg", ["mri/transforms/talairach.m3z"], []), + ( + "calabel", + ["mri/aseg.auto_noCCseg.mgz", "mri/aseg.auto.mgz", "mri/aseg.mgz"], + [], + ), + ("normalization2", ["mri/brain.mgz"], []), + ("maskbfs", ["mri/brain.finalsurfs.mgz"], []), + ( + "segmentation", + ["mri/wm.seg.mgz", "mri/wm.asegedit.mgz", "mri/wm.mgz"], + [], + ), ( - 'fill', + "fill", [ - 'mri/filled.mgz', + "mri/filled.mgz", # 'scripts/ponscc.cut.log', ], - []), + [], + ), ] _autorecon2_lh_steps = [ - ('tessellate', ['surf/lh.orig.nofix'], []), - ('smooth1', ['surf/lh.smoothwm.nofix'], []), - ('inflate1', ['surf/lh.inflated.nofix'], []), - ('qsphere', ['surf/lh.qsphere.nofix'], []), - ('fix', ['surf/lh.orig'], []), - ('white', [ - 'surf/lh.white.preaparc', 'surf/lh.curv', 'surf/lh.area', - 'label/lh.cortex.label' - ], []), - ('smooth2', ['surf/lh.smoothwm'], []), - ('inflate2', ['surf/lh.inflated', 'surf/lh.sulc'], []), - ('curvHK', [ - 'surf/lh.white.H', 'surf/lh.white.K', 'surf/lh.inflated.H', - 'surf/lh.inflated.K' - ], []), - ('curvstats', ['stats/lh.curv.stats'], []), + ("tessellate", ["surf/lh.orig.nofix"], []), + ("smooth1", ["surf/lh.smoothwm.nofix"], []), + ("inflate1", ["surf/lh.inflated.nofix"], []), + ("qsphere", ["surf/lh.qsphere.nofix"], []), + ("fix", ["surf/lh.orig"], []), + ( + "white", + [ + "surf/lh.white.preaparc", + "surf/lh.curv", + "surf/lh.area", + "label/lh.cortex.label", + ], + [], + ), + ("smooth2", ["surf/lh.smoothwm"], []), + ("inflate2", ["surf/lh.inflated", "surf/lh.sulc"], []), + ( + "curvHK", + [ + "surf/lh.white.H", + "surf/lh.white.K", + "surf/lh.inflated.H", + "surf/lh.inflated.K", + ], + [], + ), + ("curvstats", ["stats/lh.curv.stats"], []), ] _autorecon3_lh_steps = [ - ('sphere', ['surf/lh.sphere'], []), - ('surfreg', ['surf/lh.sphere.reg'], []), - ('jacobian_white', ['surf/lh.jacobian_white'], []), - ('avgcurv', ['surf/lh.avg_curv'], []), - ('cortparc', ['label/lh.aparc.annot'], []), - ('pial', [ - 'surf/lh.pial', 'surf/lh.curv.pial', 'surf/lh.area.pial', - 'surf/lh.thickness', 'surf/lh.white' - ], []), - ('parcstats', ['stats/lh.aparc.stats'], []), - ('cortparc2', ['label/lh.aparc.a2009s.annot'], []), - ('parcstats2', ['stats/lh.aparc.a2009s.stats'], []), - ('cortparc3', ['label/lh.aparc.DKTatlas.annot'], []), - ('parcstats3', ['stats/lh.aparc.DKTatlas.stats'], []), - ('pctsurfcon', ['surf/lh.w-g.pct.mgh'], []), + ("sphere", ["surf/lh.sphere"], []), + ("surfreg", ["surf/lh.sphere.reg"], []), + ("jacobian_white", ["surf/lh.jacobian_white"], []), + ("avgcurv", ["surf/lh.avg_curv"], []), + ("cortparc", ["label/lh.aparc.annot"], []), + ( + "pial", + [ + "surf/lh.pial", + "surf/lh.curv.pial", + "surf/lh.area.pial", + "surf/lh.thickness", + "surf/lh.white", + ], + [], + ), + ("parcstats", ["stats/lh.aparc.stats"], []), + ("cortparc2", ["label/lh.aparc.a2009s.annot"], []), + ("parcstats2", ["stats/lh.aparc.a2009s.stats"], []), + ("cortparc3", ["label/lh.aparc.DKTatlas.annot"], []), + ("parcstats3", ["stats/lh.aparc.DKTatlas.stats"], []), + ("pctsurfcon", ["surf/lh.w-g.pct.mgh"], []), ] _autorecon3_added_steps = [ - ('cortribbon', - ['mri/lh.ribbon.mgz', 'mri/rh.ribbon.mgz', 'mri/ribbon.mgz'], []), - ('hyporelabel', ['mri/aseg.presurf.hypos.mgz'], []), - ('aparc2aseg', [ - 'mri/aparc+aseg.mgz', 'mri/aparc.a2009s+aseg.mgz', - 'mri/aparc.DKTatlas+aseg.mgz' - ], []), - ('apas2aseg', ['mri/aseg.mgz'], ['mri/aparc+aseg.mgz']), - ('segstats', ['stats/aseg.stats'], []), - ('wmparc', ['mri/wmparc.mgz', 'stats/wmparc.stats'], []), + ( + "cortribbon", + ["mri/lh.ribbon.mgz", "mri/rh.ribbon.mgz", "mri/ribbon.mgz"], + [], + ), + ("hyporelabel", ["mri/aseg.presurf.hypos.mgz"], []), + ( + "aparc2aseg", + [ + "mri/aparc+aseg.mgz", + "mri/aparc.a2009s+aseg.mgz", + "mri/aparc.DKTatlas+aseg.mgz", + ], + [], + ), + ("apas2aseg", ["mri/aseg.mgz"], ["mri/aparc+aseg.mgz"]), + ("segstats", ["stats/aseg.stats"], []), + ("wmparc", ["mri/wmparc.mgz", "stats/wmparc.stats"], []), # Note that this is a very incomplete list; however the ctab # files are last to be touched, so this should be reasonable - ('balabels', [ - 'label/BA_exvivo.ctab', 'label/BA_exvivo.thresh.ctab', - 'label/lh.entorhinal_exvivo.label', - 'label/rh.entorhinal_exvivo.label' - ], []), + ( + "balabels", + [ + "label/BA_exvivo.ctab", + "label/BA_exvivo.thresh.ctab", + "label/lh.entorhinal_exvivo.label", + "label/rh.entorhinal_exvivo.label", + ], + [], + ), ] # Fill out autorecon2 steps - _autorecon2_rh_steps = [(step, [out.replace('lh', 'rh') - for out in outs], ins) - for step, outs, ins in _autorecon2_lh_steps] - _autorecon2_perhemi_steps = [(step, [ - of for out in outs for of in (out, out.replace('lh', 'rh')) - ], ins) for step, outs, ins in _autorecon2_lh_steps] + _autorecon2_rh_steps = [ + (step, [out.replace("lh", "rh") for out in outs], ins) + for step, outs, ins in _autorecon2_lh_steps + ] + _autorecon2_perhemi_steps = [ + (step, [of for out in outs for of in (out, out.replace("lh", "rh"))], ins) + for step, outs, ins in _autorecon2_lh_steps + ] _autorecon2_steps = _autorecon2_volonly_steps + _autorecon2_perhemi_steps # Fill out autorecon3 steps - _autorecon3_rh_steps = [(step, [out.replace('lh', 'rh') - for out in outs], ins) - for step, outs, ins in _autorecon3_lh_steps] - _autorecon3_perhemi_steps = [(step, [ - of for out in outs for of in (out, out.replace('lh', 'rh')) - ], ins) for step, outs, ins in _autorecon3_lh_steps] + _autorecon3_rh_steps = [ + (step, [out.replace("lh", "rh") for out in outs], ins) + for step, outs, ins in _autorecon3_lh_steps + ] + _autorecon3_perhemi_steps = [ + (step, [of for out in outs for of in (out, out.replace("lh", "rh"))], ins) + for step, outs, ins in _autorecon3_lh_steps + ] _autorecon3_steps = _autorecon3_perhemi_steps + _autorecon3_added_steps # Fill out autorecon-hemi lh/rh steps - _autorecon_lh_steps = (_autorecon2_lh_steps + _autorecon3_lh_steps) - _autorecon_rh_steps = (_autorecon2_rh_steps + _autorecon3_rh_steps) + _autorecon_lh_steps = _autorecon2_lh_steps + _autorecon3_lh_steps + _autorecon_rh_steps = _autorecon2_rh_steps + _autorecon3_rh_steps _steps = _autorecon1_steps + _autorecon2_steps + _autorecon3_steps _binaries = [ - 'talairach', 'mri_normalize', 'mri_watershed', 'mri_em_register', - 'mri_ca_normalize', 'mri_ca_register', 'mri_remove_neck', - 'mri_ca_label', 'mri_segstats', 'mri_mask', 'mri_segment', - 'mri_edit_wm_with_aseg', 'mri_pretess', 'mri_fill', 'mri_tessellate', - 'mris_smooth', 'mris_inflate', 'mris_sphere', 'mris_fix_topology', - 'mris_make_surfaces', 'mris_surf2vol', 'mris_register', 'mrisp_paint', - 'mris_ca_label', 'mris_anatomical_stats', 'mri_aparc2aseg' + "talairach", + "mri_normalize", + "mri_watershed", + "mri_em_register", + "mri_ca_normalize", + "mri_ca_register", + "mri_remove_neck", + "mri_ca_label", + "mri_segstats", + "mri_mask", + "mri_segment", + "mri_edit_wm_with_aseg", + "mri_pretess", + "mri_fill", + "mri_tessellate", + "mris_smooth", + "mris_inflate", + "mris_sphere", + "mris_fix_topology", + "mris_make_surfaces", + "mris_surf2vol", + "mris_register", + "mrisp_paint", + "mris_ca_label", + "mris_anatomical_stats", + "mri_aparc2aseg", ] def _gen_subjects_dir(self): return os.getcwd() def _gen_filename(self, name): - if name == 'subjects_dir': + if name == "subjects_dir": return self._gen_subjects_dir() return None @@ -1171,47 +1396,57 @@ def _list_outputs(self): if isdefined(self.inputs.hemi): hemi = self.inputs.hemi else: - hemi = 'both' + hemi = "both" outputs = self._outputs().get() outputs.update( FreeSurferSource( - subject_id=self.inputs.subject_id, - subjects_dir=subjects_dir, - hemi=hemi)._list_outputs()) - outputs['subject_id'] = self.inputs.subject_id - outputs['subjects_dir'] = subjects_dir + subject_id=self.inputs.subject_id, subjects_dir=subjects_dir, hemi=hemi + )._list_outputs() + ) + outputs["subject_id"] = self.inputs.subject_id + outputs["subjects_dir"] = subjects_dir return outputs def _is_resuming(self): subjects_dir = self.inputs.subjects_dir if not isdefined(subjects_dir): subjects_dir = self._gen_subjects_dir() - if os.path.isdir( - os.path.join(subjects_dir, self.inputs.subject_id, 'mri')): + if os.path.isdir(os.path.join(subjects_dir, self.inputs.subject_id, "mri")): return True return False def _format_arg(self, name, trait_spec, value): - if name == 'T1_files': + if name == "T1_files": if self._is_resuming(): return None - if name == 'hippocampal_subfields_T1' and \ - isdefined(self.inputs.hippocampal_subfields_T2): + if name == "hippocampal_subfields_T1" and isdefined( + self.inputs.hippocampal_subfields_T2 + ): return None - if all((name == 'hippocampal_subfields_T2', + if all( + ( + name == "hippocampal_subfields_T2", isdefined(self.inputs.hippocampal_subfields_T1) - and self.inputs.hippocampal_subfields_T1)): - argstr = trait_spec.argstr.replace('T2', 'T1T2') + and self.inputs.hippocampal_subfields_T1, + ) + ): + argstr = trait_spec.argstr.replace("T2", "T1T2") return argstr % value - if name == 'directive' and value == 'autorecon-hemi': + if name == "directive" and value == "autorecon-hemi": if not isdefined(self.inputs.hemi): - raise ValueError("Directive 'autorecon-hemi' requires hemi " - "input to be set") - value += ' ' + self.inputs.hemi - if all((name == 'hemi', isdefined(self.inputs.directive) - and self.inputs.directive == 'autorecon-hemi')): + raise ValueError( + "Directive 'autorecon-hemi' requires hemi " "input to be set" + ) + value += " " + self.inputs.hemi + if all( + ( + name == "hemi", + isdefined(self.inputs.directive) + and self.inputs.directive == "autorecon-hemi", + ) + ): return None return super(ReconAll, self)._format_arg(name, trait_spec, value) @@ -1233,28 +1468,26 @@ def cmdline(self): directive = self.inputs.directive if not isdefined(directive): steps = [] - elif directive == 'autorecon1': + elif directive == "autorecon1": steps = self._autorecon1_steps - elif directive == 'autorecon2-volonly': + elif directive == "autorecon2-volonly": steps = self._autorecon2_volonly_steps - elif directive == 'autorecon2-perhemi': + elif directive == "autorecon2-perhemi": steps = self._autorecon2_perhemi_steps - elif directive.startswith('autorecon2'): + elif directive.startswith("autorecon2"): if isdefined(self.inputs.hemi): - if self.inputs.hemi == 'lh': - steps = (self._autorecon2_volonly_steps + - self._autorecon2_lh_steps) + if self.inputs.hemi == "lh": + steps = self._autorecon2_volonly_steps + self._autorecon2_lh_steps else: - steps = (self._autorecon2_volonly_steps + - self._autorecon2_rh_steps) + steps = self._autorecon2_volonly_steps + self._autorecon2_rh_steps else: steps = self._autorecon2_steps - elif directive == 'autorecon-hemi': - if self.inputs.hemi == 'lh': + elif directive == "autorecon-hemi": + if self.inputs.hemi == "lh": steps = self._autorecon_lh_steps else: steps = self._autorecon_rh_steps - elif directive == 'autorecon3': + elif directive == "autorecon3": steps = self._autorecon3_steps else: steps = self._steps @@ -1262,8 +1495,8 @@ def cmdline(self): no_run = True flags = [] for step, outfiles, infiles in steps: - flag = '-{}'.format(step) - noflag = '-no{}'.format(step) + flag = "-{}".format(step) + noflag = "-no{}".format(step) if noflag in cmd: continue elif flag in cmd: @@ -1271,42 +1504,43 @@ def cmdline(self): continue subj_dir = os.path.join(subjects_dir, self.inputs.subject_id) - if check_depends([os.path.join(subj_dir, f) for f in outfiles], - [os.path.join(subj_dir, f) for f in infiles]): + if check_depends( + [os.path.join(subj_dir, f) for f in outfiles], + [os.path.join(subj_dir, f) for f in infiles], + ): flags.append(noflag) else: no_run = False if no_run and not self.force_run: - iflogger.info('recon-all complete : Not running') + iflogger.info("recon-all complete : Not running") return "echo recon-all: nothing to do" - cmd += ' ' + ' '.join(flags) - iflogger.info('resume recon-all : %s', cmd) + cmd += " " + " ".join(flags) + iflogger.info("resume recon-all : %s", cmd) return cmd def _prep_expert_file(self): if isdefined(self.inputs.expert): - return '' + return "" lines = [] for binary in self._binaries: args = getattr(self.inputs, binary) if isdefined(args): - lines.append('{} {}\n'.format(binary, args)) + lines.append("{} {}\n".format(binary, args)) if lines == []: - return '' + return "" - contents = ''.join(lines) - if not isdefined(self.inputs.xopts) and \ - self._get_expert_file() == contents: - return ' -xopts-use' + contents = "".join(lines) + if not isdefined(self.inputs.xopts) and self._get_expert_file() == contents: + return " -xopts-use" - expert_fname = os.path.abspath('expert.opts') - with open(expert_fname, 'w') as fobj: + expert_fname = os.path.abspath("expert.opts") + with open(expert_fname, "w") as fobj: fobj.write(contents) - return ' -expert {}'.format(expert_fname) + return " -expert {}".format(expert_fname) def _get_expert_file(self): # Read pre-existing options file, if it exists @@ -1315,11 +1549,12 @@ def _get_expert_file(self): else: subjects_dir = self._gen_subjects_dir() - xopts_file = os.path.join(subjects_dir, self.inputs.subject_id, - 'scripts', 'expert-options') + xopts_file = os.path.join( + subjects_dir, self.inputs.subject_id, "scripts", "expert-options" + ) if not os.path.exists(xopts_file): - return '' - with open(xopts_file, 'r') as fobj: + return "" + with open(xopts_file, "r") as fobj: return fobj.read() @property @@ -1331,110 +1566,123 @@ def version(self): class BBRegisterInputSpec(FSTraitedSpec): subject_id = traits.Str( - argstr='--s %s', desc='freesurfer subject id', mandatory=True) + argstr="--s %s", desc="freesurfer subject id", mandatory=True + ) source_file = File( - argstr='--mov %s', - desc='source file to be registered', + argstr="--mov %s", + desc="source file to be registered", mandatory=True, - copyfile=False) + copyfile=False, + ) init = traits.Enum( - 'spm', - 'fsl', - 'header', - argstr='--init-%s', + "spm", + "fsl", + "header", + argstr="--init-%s", mandatory=True, - xor=['init_reg_file'], - desc='initialize registration spm, fsl, header') + xor=["init_reg_file"], + desc="initialize registration spm, fsl, header", + ) init_reg_file = File( exists=True, - argstr='--init-reg %s', - desc='existing registration file', - xor=['init'], - mandatory=True) + argstr="--init-reg %s", + desc="existing registration file", + xor=["init"], + mandatory=True, + ) contrast_type = traits.Enum( - 't1', - 't2', - 'bold', - 'dti', - argstr='--%s', - desc='contrast type of image', - mandatory=True) + "t1", + "t2", + "bold", + "dti", + argstr="--%s", + desc="contrast type of image", + mandatory=True, + ) intermediate_file = File( exists=True, argstr="--int %s", - desc="Intermediate image, e.g. in case of partial FOV") + desc="Intermediate image, e.g. in case of partial FOV", + ) reg_frame = traits.Int( argstr="--frame %d", xor=["reg_middle_frame"], - desc="0-based frame index for 4D source file") + desc="0-based frame index for 4D source file", + ) reg_middle_frame = traits.Bool( argstr="--mid-frame", xor=["reg_frame"], - desc="Register middle frame of 4D source file") + desc="Register middle frame of 4D source file", + ) out_reg_file = File( - argstr='--reg %s', desc='output registration file', genfile=True) + argstr="--reg %s", desc="output registration file", genfile=True + ) spm_nifti = traits.Bool( - argstr="--spm-nii", - desc="force use of nifti rather than analyze with SPM") + argstr="--spm-nii", desc="force use of nifti rather than analyze with SPM" + ) epi_mask = traits.Bool( - argstr="--epi-mask", desc="mask out B0 regions in stages 1 and 2") + argstr="--epi-mask", desc="mask out B0 regions in stages 1 and 2" + ) dof = traits.Enum( - 6, 9, 12, argstr='--%d', desc='number of transform degrees of freedom') + 6, 9, 12, argstr="--%d", desc="number of transform degrees of freedom" + ) fsldof = traits.Int( - argstr='--fsl-dof %d', - desc='degrees of freedom for initial registration (FSL)') + argstr="--fsl-dof %d", desc="degrees of freedom for initial registration (FSL)" + ) out_fsl_file = traits.Either( traits.Bool, File, argstr="--fslmat %s", - desc="write the transformation matrix in FSL FLIRT format") + desc="write the transformation matrix in FSL FLIRT format", + ) out_lta_file = traits.Either( traits.Bool, File, argstr="--lta %s", - min_ver='5.2.0', - desc="write the transformation matrix in LTA format") + min_ver="5.2.0", + desc="write the transformation matrix in LTA format", + ) registered_file = traits.Either( traits.Bool, File, - argstr='--o %s', - desc='output warped sourcefile either True or filename') + argstr="--o %s", + desc="output warped sourcefile either True or filename", + ) init_cost_file = traits.Either( traits.Bool, File, - argstr='--initcost %s', - desc='output initial registration cost file') + argstr="--initcost %s", + desc="output initial registration cost file", + ) class BBRegisterInputSpec6(BBRegisterInputSpec): init = traits.Enum( - 'coreg', - 'rr', - 'spm', - 'fsl', - 'header', - 'best', - argstr='--init-%s', - xor=['init_reg_file'], - desc='initialize registration with mri_coreg, spm, fsl, or header') + "coreg", + "rr", + "spm", + "fsl", + "header", + "best", + argstr="--init-%s", + xor=["init_reg_file"], + desc="initialize registration with mri_coreg, spm, fsl, or header", + ) init_reg_file = File( exists=True, - argstr='--init-reg %s', - desc='existing registration file', - xor=['init']) + argstr="--init-reg %s", + desc="existing registration file", + xor=["init"], + ) class BBRegisterOutputSpec(TraitedSpec): - out_reg_file = File(exists=True, desc='Output registration file') - out_fsl_file = File( - exists=True, desc='Output FLIRT-style registration file') - out_lta_file = File(exists=True, desc='Output LTA-style registration file') - min_cost_file = File( - exists=True, desc='Output registration minimum cost file') - init_cost_file = File( - exists=True, desc='Output initial registration cost file') - registered_file = File( - exists=True, desc='Registered and resampled source file') + out_reg_file = File(exists=True, desc="Output registration file") + out_fsl_file = File(exists=True, desc="Output FLIRT-style registration file") + out_lta_file = File(exists=True, desc="Output LTA-style registration file") + min_cost_file = File(exists=True, desc="Output registration minimum cost file") + init_cost_file = File(exists=True, desc="Output initial registration cost file") + registered_file = File(exists=True, desc="Registered and resampled source file") class BBRegister(FSCommand): @@ -1454,8 +1702,8 @@ class BBRegister(FSCommand): """ - _cmd = 'bbregister' - if LooseVersion('0.0.0') < Info.looseversion() < LooseVersion("6.0.0"): + _cmd = "bbregister" + if LooseVersion("0.0.0") < Info.looseversion() < LooseVersion("6.0.0"): input_spec = BBRegisterInputSpec else: input_spec = BBRegisterInputSpec6 @@ -1467,56 +1715,63 @@ def _list_outputs(self): _in = self.inputs if isdefined(_in.out_reg_file): - outputs['out_reg_file'] = op.abspath(_in.out_reg_file) + outputs["out_reg_file"] = op.abspath(_in.out_reg_file) elif _in.source_file: - suffix = '_bbreg_%s.dat' % _in.subject_id - outputs['out_reg_file'] = fname_presuffix( - _in.source_file, suffix=suffix, use_ext=False) + suffix = "_bbreg_%s.dat" % _in.subject_id + outputs["out_reg_file"] = fname_presuffix( + _in.source_file, suffix=suffix, use_ext=False + ) if isdefined(_in.registered_file): if isinstance(_in.registered_file, bool): - outputs['registered_file'] = fname_presuffix( - _in.source_file, suffix='_bbreg') + outputs["registered_file"] = fname_presuffix( + _in.source_file, suffix="_bbreg" + ) else: - outputs['registered_file'] = op.abspath(_in.registered_file) + outputs["registered_file"] = op.abspath(_in.registered_file) if isdefined(_in.out_lta_file): if isinstance(_in.out_lta_file, bool): - suffix = '_bbreg_%s.lta' % _in.subject_id + suffix = "_bbreg_%s.lta" % _in.subject_id out_lta_file = fname_presuffix( - _in.source_file, suffix=suffix, use_ext=False) - outputs['out_lta_file'] = out_lta_file + _in.source_file, suffix=suffix, use_ext=False + ) + outputs["out_lta_file"] = out_lta_file else: - outputs['out_lta_file'] = op.abspath(_in.out_lta_file) + outputs["out_lta_file"] = op.abspath(_in.out_lta_file) if isdefined(_in.out_fsl_file): if isinstance(_in.out_fsl_file, bool): - suffix = '_bbreg_%s.mat' % _in.subject_id + suffix = "_bbreg_%s.mat" % _in.subject_id out_fsl_file = fname_presuffix( - _in.source_file, suffix=suffix, use_ext=False) - outputs['out_fsl_file'] = out_fsl_file + _in.source_file, suffix=suffix, use_ext=False + ) + outputs["out_fsl_file"] = out_fsl_file else: - outputs['out_fsl_file'] = op.abspath(_in.out_fsl_file) + outputs["out_fsl_file"] = op.abspath(_in.out_fsl_file) if isdefined(_in.init_cost_file): if isinstance(_in.out_fsl_file, bool): - outputs[ - 'init_cost_file'] = outputs['out_reg_file'] + '.initcost' + outputs["init_cost_file"] = outputs["out_reg_file"] + ".initcost" else: - outputs['init_cost_file'] = op.abspath(_in.init_cost_file) + outputs["init_cost_file"] = op.abspath(_in.init_cost_file) - outputs['min_cost_file'] = outputs['out_reg_file'] + '.mincost' + outputs["min_cost_file"] = outputs["out_reg_file"] + ".mincost" return outputs def _format_arg(self, name, spec, value): - if name in ('registered_file', 'out_fsl_file', 'out_lta_file', - 'init_cost_file') and isinstance(value, bool): + if name in ( + "registered_file", + "out_fsl_file", + "out_lta_file", + "init_cost_file", + ) and isinstance(value, bool): value = self._list_outputs()[name] return super(BBRegister, self)._format_arg(name, spec, value) def _gen_filename(self, name): - if name == 'out_reg_file': + if name == "out_reg_file": return self._list_outputs()[name] return None @@ -1524,123 +1779,150 @@ def _gen_filename(self, name): class ApplyVolTransformInputSpec(FSTraitedSpec): source_file = File( exists=True, - argstr='--mov %s', + argstr="--mov %s", copyfile=False, mandatory=True, - desc='Input volume you wish to transform') - transformed_file = File( - desc='Output volume', argstr='--o %s', genfile=True) - _targ_xor = ('target_file', 'tal', 'fs_target') + desc="Input volume you wish to transform", + ) + transformed_file = File(desc="Output volume", argstr="--o %s", genfile=True) + _targ_xor = ("target_file", "tal", "fs_target") target_file = File( exists=True, - argstr='--targ %s', + argstr="--targ %s", xor=_targ_xor, - desc='Output template volume', - mandatory=True) + desc="Output template volume", + mandatory=True, + ) tal = traits.Bool( - argstr='--tal', + argstr="--tal", xor=_targ_xor, mandatory=True, - desc='map to a sub FOV of MNI305 (with --reg only)') + desc="map to a sub FOV of MNI305 (with --reg only)", + ) tal_resolution = traits.Float( - argstr="--talres %.10f", desc="Resolution to sample when using tal") + argstr="--talres %.10f", desc="Resolution to sample when using tal" + ) fs_target = traits.Bool( - argstr='--fstarg', + argstr="--fstarg", xor=_targ_xor, mandatory=True, - requires=['reg_file'], - desc='use orig.mgz from subject in regfile as target') - _reg_xor = ('reg_file', 'lta_file', 'lta_inv_file', 'fsl_reg_file', - 'xfm_reg_file', 'reg_header', 'mni_152_reg', 'subject') + requires=["reg_file"], + desc="use orig.mgz from subject in regfile as target", + ) + _reg_xor = ( + "reg_file", + "lta_file", + "lta_inv_file", + "fsl_reg_file", + "xfm_reg_file", + "reg_header", + "mni_152_reg", + "subject", + ) reg_file = File( exists=True, xor=_reg_xor, - argstr='--reg %s', + argstr="--reg %s", mandatory=True, - desc='tkRAS-to-tkRAS matrix (tkregister2 format)') + desc="tkRAS-to-tkRAS matrix (tkregister2 format)", + ) lta_file = File( exists=True, xor=_reg_xor, - argstr='--lta %s', + argstr="--lta %s", mandatory=True, - desc='Linear Transform Array file') + desc="Linear Transform Array file", + ) lta_inv_file = File( exists=True, xor=_reg_xor, - argstr='--lta-inv %s', + argstr="--lta-inv %s", mandatory=True, - desc='LTA, invert') + desc="LTA, invert", + ) reg_file = File( exists=True, xor=_reg_xor, - argstr='--reg %s', + argstr="--reg %s", mandatory=True, - desc='tkRAS-to-tkRAS matrix (tkregister2 format)') + desc="tkRAS-to-tkRAS matrix (tkregister2 format)", + ) fsl_reg_file = File( exists=True, xor=_reg_xor, - argstr='--fsl %s', + argstr="--fsl %s", mandatory=True, - desc='fslRAS-to-fslRAS matrix (FSL format)') + desc="fslRAS-to-fslRAS matrix (FSL format)", + ) xfm_reg_file = File( exists=True, xor=_reg_xor, - argstr='--xfm %s', + argstr="--xfm %s", mandatory=True, - desc='ScannerRAS-to-ScannerRAS matrix (MNI format)') + desc="ScannerRAS-to-ScannerRAS matrix (MNI format)", + ) reg_header = traits.Bool( xor=_reg_xor, - argstr='--regheader', + argstr="--regheader", mandatory=True, - desc='ScannerRAS-to-ScannerRAS matrix = identity') + desc="ScannerRAS-to-ScannerRAS matrix = identity", + ) mni_152_reg = traits.Bool( - xor=_reg_xor, - argstr='--regheader', - mandatory=True, - desc='target MNI152 space') + xor=_reg_xor, argstr="--regheader", mandatory=True, desc="target MNI152 space" + ) subject = traits.Str( xor=_reg_xor, - argstr='--s %s', + argstr="--s %s", mandatory=True, - desc='set matrix = identity and use subject for any templates') - inverse = traits.Bool(desc='sample from target to source', argstr='--inv') + desc="set matrix = identity and use subject for any templates", + ) + inverse = traits.Bool(desc="sample from target to source", argstr="--inv") interp = traits.Enum( - 'trilin', - 'nearest', - 'cubic', - argstr='--interp %s', - desc='Interpolation method ( or nearest)') + "trilin", + "nearest", + "cubic", + argstr="--interp %s", + desc="Interpolation method ( or nearest)", + ) no_resample = traits.Bool( - desc='Do not resample; just change vox2ras matrix', - argstr='--no-resample') + desc="Do not resample; just change vox2ras matrix", argstr="--no-resample" + ) m3z_file = File( argstr="--m3z %s", - desc=('This is the morph to be applied to the volume. ' - 'Unless the morph is in mri/transforms (eg.: for ' - 'talairach.m3z computed by reconall), you will need ' - 'to specify the full path to this morph and use the ' - '--noDefM3zPath flag.')) + desc=( + "This is the morph to be applied to the volume. " + "Unless the morph is in mri/transforms (eg.: for " + "talairach.m3z computed by reconall), you will need " + "to specify the full path to this morph and use the " + "--noDefM3zPath flag." + ), + ) no_ded_m3z_path = traits.Bool( argstr="--noDefM3zPath", - requires=['m3z_file'], - desc=('To be used with the m3z flag. ' - 'Instructs the code not to look for the' - 'm3z morph in the default location ' - '(SUBJECTS_DIR/subj/mri/transforms), ' - 'but instead just use the path ' - 'indicated in --m3z.')) + requires=["m3z_file"], + desc=( + "To be used with the m3z flag. " + "Instructs the code not to look for the" + "m3z morph in the default location " + "(SUBJECTS_DIR/subj/mri/transforms), " + "but instead just use the path " + "indicated in --m3z." + ), + ) invert_morph = traits.Bool( argstr="--inv-morph", - requires=['m3z_file'], - desc=('Compute and use the inverse of the ' - 'non-linear morph to resample the input ' - 'volume. To be used by --m3z.')) + requires=["m3z_file"], + desc=( + "Compute and use the inverse of the " + "non-linear morph to resample the input " + "volume. To be used by --m3z." + ), + ) class ApplyVolTransformOutputSpec(TraitedSpec): - transformed_file = File( - exists=True, desc='Path to output file if used normally') + transformed_file = File(exists=True, desc="Path to output file if used normally") class ApplyVolTransform(FSCommand): @@ -1660,7 +1942,7 @@ class ApplyVolTransform(FSCommand): """ - _cmd = 'mri_vol2vol' + _cmd = "mri_vol2vol" input_spec = ApplyVolTransformInputSpec output_spec = ApplyVolTransformOutputSpec @@ -1669,67 +1951,69 @@ def _get_outfile(self): if not isdefined(outfile): if self.inputs.inverse is True: if self.inputs.fs_target is True: - src = 'orig.mgz' + src = "orig.mgz" else: src = self.inputs.target_file else: src = self.inputs.source_file - outfile = fname_presuffix( - src, newpath=os.getcwd(), suffix='_warped') + outfile = fname_presuffix(src, newpath=os.getcwd(), suffix="_warped") return outfile def _list_outputs(self): outputs = self.output_spec().get() - outputs['transformed_file'] = os.path.abspath(self._get_outfile()) + outputs["transformed_file"] = os.path.abspath(self._get_outfile()) return outputs def _gen_filename(self, name): - if name == 'transformed_file': + if name == "transformed_file": return self._get_outfile() return None class SmoothInputSpec(FSTraitedSpec): - in_file = File( - exists=True, desc='source volume', argstr='--i %s', mandatory=True) + in_file = File(exists=True, desc="source volume", argstr="--i %s", mandatory=True) reg_file = File( - desc='registers volume to surface anatomical ', - argstr='--reg %s', + desc="registers volume to surface anatomical ", + argstr="--reg %s", mandatory=True, - exists=True) - smoothed_file = File(desc='output volume', argstr='--o %s', genfile=True) + exists=True, + ) + smoothed_file = File(desc="output volume", argstr="--o %s", genfile=True) proj_frac_avg = traits.Tuple( traits.Float, traits.Float, traits.Float, - xor=['proj_frac'], - desc='average a long normal min max delta', - argstr='--projfrac-avg %.2f %.2f %.2f') + xor=["proj_frac"], + desc="average a long normal min max delta", + argstr="--projfrac-avg %.2f %.2f %.2f", + ) proj_frac = traits.Float( - desc='project frac of thickness a long surface normal', - xor=['proj_frac_avg'], - argstr='--projfrac %s') + desc="project frac of thickness a long surface normal", + xor=["proj_frac_avg"], + argstr="--projfrac %s", + ) surface_fwhm = traits.Range( low=0.0, - requires=['reg_file'], + requires=["reg_file"], mandatory=True, - xor=['num_iters'], - desc='surface FWHM in mm', - argstr='--fwhm %f') + xor=["num_iters"], + desc="surface FWHM in mm", + argstr="--fwhm %f", + ) num_iters = traits.Range( low=1, - xor=['surface_fwhm'], + xor=["surface_fwhm"], mandatory=True, - argstr='--niters %d', - desc='number of iterations instead of fwhm') + argstr="--niters %d", + desc="number of iterations instead of fwhm", + ) vol_fwhm = traits.Range( - low=0.0, - argstr='--vol-fwhm %f', - desc='volume smoothing outside of surface') + low=0.0, argstr="--vol-fwhm %f", desc="volume smoothing outside of surface" + ) class SmoothOutputSpec(TraitedSpec): - smoothed_file = File(exists=True, desc='smoothed input volume') + smoothed_file = File(exists=True, desc="smoothed input volume") class Smooth(FSCommand): @@ -1754,7 +2038,7 @@ class Smooth(FSCommand): """ - _cmd = 'mris_volsmooth' + _cmd = "mris_volsmooth" input_spec = SmoothInputSpec output_spec = SmoothOutputSpec @@ -1762,12 +2046,12 @@ def _list_outputs(self): outputs = self.output_spec().get() outfile = self.inputs.smoothed_file if not isdefined(outfile): - outfile = self._gen_fname(self.inputs.in_file, suffix='_smooth') - outputs['smoothed_file'] = outfile + outfile = self._gen_fname(self.inputs.in_file, suffix="_smooth") + outputs["smoothed_file"] = outfile return outputs def _gen_filename(self, name): - if name == 'smoothed_file': + if name == "smoothed_file": return self._list_outputs()[name] return None @@ -1775,130 +2059,139 @@ def _gen_filename(self, name): class RobustRegisterInputSpec(FSTraitedSpec): source_file = File( - exists=True, - mandatory=True, - argstr='--mov %s', - desc='volume to be registered') + exists=True, mandatory=True, argstr="--mov %s", desc="volume to be registered" + ) target_file = File( exists=True, mandatory=True, - argstr='--dst %s', - desc='target volume for the registration') + argstr="--dst %s", + desc="target volume for the registration", + ) out_reg_file = traits.Either( True, File, default=True, usedefault=True, - argstr='--lta %s', - desc='registration file; either True or filename') + argstr="--lta %s", + desc="registration file; either True or filename", + ) registered_file = traits.Either( traits.Bool, File, - argstr='--warp %s', - desc='registered image; either True or filename') + argstr="--warp %s", + desc="registered image; either True or filename", + ) weights_file = traits.Either( traits.Bool, File, - argstr='--weights %s', - desc='weights image to write; either True or filename') + argstr="--weights %s", + desc="weights image to write; either True or filename", + ) est_int_scale = traits.Bool( - argstr='--iscale', - desc='estimate intensity scale (recommended for unnormalized images)') + argstr="--iscale", + desc="estimate intensity scale (recommended for unnormalized images)", + ) trans_only = traits.Bool( - argstr='--transonly', desc='find 3 parameter translation only') + argstr="--transonly", desc="find 3 parameter translation only" + ) in_xfm_file = File( - exists=True, - argstr='--transform', - desc='use initial transform on source') + exists=True, argstr="--transform", desc="use initial transform on source" + ) half_source = traits.Either( traits.Bool, File, - argstr='--halfmov %s', - desc="write source volume mapped to halfway space") + argstr="--halfmov %s", + desc="write source volume mapped to halfway space", + ) half_targ = traits.Either( traits.Bool, File, argstr="--halfdst %s", - desc="write target volume mapped to halfway space") + desc="write target volume mapped to halfway space", + ) half_weights = traits.Either( traits.Bool, File, argstr="--halfweights %s", - desc="write weights volume mapped to halfway space") + desc="write weights volume mapped to halfway space", + ) half_source_xfm = traits.Either( traits.Bool, File, argstr="--halfmovlta %s", - desc="write transform from source to halfway space") + desc="write transform from source to halfway space", + ) half_targ_xfm = traits.Either( traits.Bool, File, argstr="--halfdstlta %s", - desc="write transform from target to halfway space") + desc="write transform from target to halfway space", + ) auto_sens = traits.Bool( - argstr='--satit', - xor=['outlier_sens'], + argstr="--satit", + xor=["outlier_sens"], mandatory=True, - desc='auto-detect good sensitivity') + desc="auto-detect good sensitivity", + ) outlier_sens = traits.Float( - argstr='--sat %.4f', - xor=['auto_sens'], + argstr="--sat %.4f", + xor=["auto_sens"], mandatory=True, - desc='set outlier sensitivity explicitly') + desc="set outlier sensitivity explicitly", + ) least_squares = traits.Bool( - argstr='--leastsquares', - desc='use least squares instead of robust estimator') - no_init = traits.Bool(argstr='--noinit', desc='skip transform init') + argstr="--leastsquares", desc="use least squares instead of robust estimator" + ) + no_init = traits.Bool(argstr="--noinit", desc="skip transform init") init_orient = traits.Bool( - argstr='--initorient', - desc='use moments for initial orient (recommended for stripped brains)' + argstr="--initorient", + desc="use moments for initial orient (recommended for stripped brains)", ) max_iterations = traits.Int( - argstr='--maxit %d', desc='maximum # of times on each resolution') + argstr="--maxit %d", desc="maximum # of times on each resolution" + ) high_iterations = traits.Int( - argstr='--highit %d', desc='max # of times on highest resolution') + argstr="--highit %d", desc="max # of times on highest resolution" + ) iteration_thresh = traits.Float( - argstr='--epsit %.3f', desc='stop iterations when below threshold') + argstr="--epsit %.3f", desc="stop iterations when below threshold" + ) subsample_thresh = traits.Int( - argstr='--subsample %d', - desc='subsample if dimension is above threshold size') + argstr="--subsample %d", desc="subsample if dimension is above threshold size" + ) outlier_limit = traits.Float( - argstr='--wlimit %.3f', desc='set maximal outlier limit in satit') + argstr="--wlimit %.3f", desc="set maximal outlier limit in satit" + ) write_vo2vox = traits.Bool( - argstr='--vox2vox', desc='output vox2vox matrix (default is RAS2RAS)') - no_multi = traits.Bool( - argstr='--nomulti', desc='work on highest resolution') + argstr="--vox2vox", desc="output vox2vox matrix (default is RAS2RAS)" + ) + no_multi = traits.Bool(argstr="--nomulti", desc="work on highest resolution") mask_source = File( - exists=True, - argstr='--maskmov %s', - desc='image to mask source volume with') + exists=True, argstr="--maskmov %s", desc="image to mask source volume with" + ) mask_target = File( - exists=True, - argstr='--maskdst %s', - desc='image to mask target volume with') + exists=True, argstr="--maskdst %s", desc="image to mask target volume with" + ) force_double = traits.Bool( - argstr='--doubleprec', desc='use double-precision intensities') - force_float = traits.Bool( - argstr='--floattype', desc='use float intensities') + argstr="--doubleprec", desc="use double-precision intensities" + ) + force_float = traits.Bool(argstr="--floattype", desc="use float intensities") class RobustRegisterOutputSpec(TraitedSpec): out_reg_file = File(exists=True, desc="output registration file") - registered_file = File( - exists=True, desc="output image with registration applied") + registered_file = File(exists=True, desc="output image with registration applied") weights_file = File(exists=True, desc="image of weights used") - half_source = File( - exists=True, desc="source image mapped to halfway space") + half_source = File(exists=True, desc="source image mapped to halfway space") half_targ = File(exists=True, desc="target image mapped to halfway space") - half_weights = File( - exists=True, desc="weights image mapped to halfway space") + half_weights = File(exists=True, desc="weights image mapped to halfway space") half_source_xfm = File( - exists=True, - desc="transform file to map source image to halfway space") + exists=True, desc="transform file to map source image to halfway space" + ) half_targ_xfm = File( - exists=True, - desc="transform file to map target image to halfway space") + exists=True, desc="transform file to map target image to halfway space" + ) class RobustRegister(FSCommand): @@ -1923,14 +2216,21 @@ class RobustRegister(FSCommand): """ - _cmd = 'mri_robust_register' + _cmd = "mri_robust_register" input_spec = RobustRegisterInputSpec output_spec = RobustRegisterOutputSpec def _format_arg(self, name, spec, value): - options = ("out_reg_file", "registered_file", "weights_file", - "half_source", "half_targ", "half_weights", - "half_source_xfm", "half_targ_xfm") + options = ( + "out_reg_file", + "registered_file", + "weights_file", + "half_source", + "half_targ", + "half_weights", + "half_source_xfm", + "half_targ_xfm", + ) if name in options and isinstance(value, bool): value = self._list_outputs()[name] return super(RobustRegister, self)._format_arg(name, spec, value) @@ -1938,8 +2238,7 @@ def _format_arg(self, name, spec, value): def _list_outputs(self): outputs = self.output_spec().get() cwd = os.getcwd() - prefices = dict( - src=self.inputs.source_file, trg=self.inputs.target_file) + prefices = dict(src=self.inputs.source_file, trg=self.inputs.target_file) suffices = dict( out_reg_file=("src", "_robustreg.lta", False), registered_file=("src", "_robustreg", True), @@ -1948,7 +2247,8 @@ def _list_outputs(self): half_targ=("trg", "_halfway", True), half_weights=("src", "_halfweights", True), half_source_xfm=("src", "_robustxfm.lta", False), - half_targ_xfm=("trg", "_robustxfm.lta", False)) + half_targ_xfm=("trg", "_robustxfm.lta", False), + ) for name, sufftup in list(suffices.items()): value = getattr(self.inputs, name) if value: @@ -1957,7 +2257,8 @@ def _list_outputs(self): prefices[sufftup[0]], suffix=sufftup[1], newpath=cwd, - use_ext=sufftup[2]) + use_ext=sufftup[2], + ) else: outputs[name] = os.path.abspath(value) return outputs @@ -1970,29 +2271,23 @@ class FitMSParamsInputSpec(FSTraitedSpec): argstr="%s", position=-2, mandatory=True, - desc="list of FLASH images (must be in mgh format)") - tr_list = traits.List( - traits.Int, desc="list of TRs of the input files (in msec)") - te_list = traits.List( - traits.Float, desc="list of TEs of the input files (in msec)") - flip_list = traits.List( - traits.Int, desc="list of flip angles of the input files") + desc="list of FLASH images (must be in mgh format)", + ) + tr_list = traits.List(traits.Int, desc="list of TRs of the input files (in msec)") + te_list = traits.List(traits.Float, desc="list of TEs of the input files (in msec)") + flip_list = traits.List(traits.Int, desc="list of flip angles of the input files") xfm_list = traits.List( - File(exists=True), - desc="list of transform files to apply to each FLASH image") + File(exists=True), desc="list of transform files to apply to each FLASH image" + ) out_dir = Directory( - argstr="%s", - position=-1, - genfile=True, - desc="directory to store output in") + argstr="%s", position=-1, genfile=True, desc="directory to store output in" + ) class FitMSParamsOutputSpec(TraitedSpec): - t1_image = File( - exists=True, desc="image of estimated T1 relaxation values") - pd_image = File( - exists=True, desc="image of estimated proton density values") + t1_image = File(exists=True, desc="image of estimated T1 relaxation values") + pd_image = File(exists=True, desc="image of estimated proton density values") t2star_image = File(exists=True, desc="image of estimated T2* values") @@ -2009,6 +2304,7 @@ class FitMSParams(FSCommand): 'mri_ms_fitparms flash_05.mgz flash_30.mgz flash_parameters' """ + _cmd = "mri_ms_fitparms" input_spec = FitMSParamsInputSpec output_spec = FitMSParamsOutputSpec @@ -2022,8 +2318,7 @@ def _format_arg(self, name, spec, value): if isdefined(self.inputs.te_list): cmd = " ".join((cmd, "-te %.3f" % self.inputs.te_list[i])) if isdefined(self.inputs.flip_list): - cmd = " ".join((cmd, - "-fa %.1f" % self.inputs.flip_list[i])) + cmd = " ".join((cmd, "-fa %.1f" % self.inputs.flip_list[i])) if isdefined(self.inputs.xfm_list): cmd = " ".join((cmd, "-at %s" % self.inputs.xfm_list[i])) cmd = " ".join((cmd, file)) @@ -2052,31 +2347,27 @@ class SynthesizeFLASHInputSpec(FSTraitedSpec): fixed_weighting = traits.Bool( position=1, argstr="-w", - desc="use a fixed weighting to generate optimal gray/white contrast") + desc="use a fixed weighting to generate optimal gray/white contrast", + ) tr = traits.Float( - mandatory=True, - position=2, - argstr="%.2f", - desc="repetition time (in msec)") + mandatory=True, position=2, argstr="%.2f", desc="repetition time (in msec)" + ) flip_angle = traits.Float( - mandatory=True, - position=3, - argstr="%.2f", - desc="flip angle (in degrees)") + mandatory=True, position=3, argstr="%.2f", desc="flip angle (in degrees)" + ) te = traits.Float( - mandatory=True, position=4, argstr="%.3f", desc="echo time (in msec)") + mandatory=True, position=4, argstr="%.3f", desc="echo time (in msec)" + ) t1_image = File( - exists=True, - mandatory=True, - position=5, - argstr="%s", - desc="image of T1 values") + exists=True, mandatory=True, position=5, argstr="%s", desc="image of T1 values" + ) pd_image = File( exists=True, mandatory=True, position=6, argstr="%s", - desc="image of proton density values") + desc="image of proton density values", + ) out_file = File(genfile=True, argstr="%s", desc="image to write") @@ -2099,6 +2390,7 @@ class SynthesizeFLASH(FSCommand): 'mri_synthesize 20.00 30.00 3.000 T1.mgz PD.mgz flash_30syn.mgz' """ + _cmd = "mri_synthesize" input_spec = SynthesizeFLASHInputSpec output_spec = SynthesizeFLASHOutputSpec @@ -2109,7 +2401,8 @@ def _list_outputs(self): outputs["out_file"] = self.inputs.out_file else: outputs["out_file"] = self._gen_fname( - "synth-flash_%02d.mgz" % self.inputs.flip_angle, suffix="") + "synth-flash_%02d.mgz" % self.inputs.flip_angle, suffix="" + ) return outputs def _gen_filename(self, name): @@ -2124,55 +2417,53 @@ class MNIBiasCorrectionInputSpec(FSTraitedSpec): exists=True, mandatory=True, argstr="--i %s", - desc="input volume. Input can be any format accepted by mri_convert.") + desc="input volume. Input can be any format accepted by mri_convert.", + ) # optional out_file = File( argstr="--o %s", - name_source=['in_file'], - name_template='%s_output', + name_source=["in_file"], + name_template="%s_output", hash_files=False, keep_extension=True, desc="output volume. Output can be any format accepted by mri_convert. " - + "If the output format is COR, then the directory must exist.") + + "If the output format is COR, then the directory must exist.", + ) iterations = traits.Int( - 4, usedefault=True, + 4, + usedefault=True, argstr="--n %d", - desc= - "Number of iterations to run nu_correct. Default is 4. This is the number of times " - + - "that nu_correct is repeated (ie, using the output from the previous run as the input for " - + - "the next). This is different than the -iterations option to nu_correct." + desc="Number of iterations to run nu_correct. Default is 4. This is the number of times " + + "that nu_correct is repeated (ie, using the output from the previous run as the input for " + + "the next). This is different than the -iterations option to nu_correct.", ) protocol_iterations = traits.Int( argstr="--proto-iters %d", - desc= - "Passes Np as argument of the -iterations flag of nu_correct. This is different " - + - "than the --n flag above. Default is not to pass nu_correct the -iterations flag." + desc="Passes Np as argument of the -iterations flag of nu_correct. This is different " + + "than the --n flag above. Default is not to pass nu_correct the -iterations flag.", ) distance = traits.Int(argstr="--distance %d", desc="N3 -distance option") no_rescale = traits.Bool( argstr="--no-rescale", - desc="do not rescale so that global mean of output == input global mean" + desc="do not rescale so that global mean of output == input global mean", ) mask = File( exists=True, argstr="--mask %s", - desc= - "brainmask volume. Input can be any format accepted by mri_convert.") + desc="brainmask volume. Input can be any format accepted by mri_convert.", + ) transform = File( exists=True, argstr="--uchar %s", - desc="tal.xfm. Use mri_make_uchar instead of conforming") + desc="tal.xfm. Use mri_make_uchar instead of conforming", + ) stop = traits.Float( argstr="--stop %f", - desc= - "Convergence threshold below which iteration stops (suggest 0.01 to 0.0001)" + desc="Convergence threshold below which iteration stops (suggest 0.01 to 0.0001)", ) shrink = traits.Int( - argstr="--shrink %d", - desc="Shrink parameter for finer sampling (default is 4)") + argstr="--shrink %d", desc="Shrink parameter for finer sampling (default is 4)" + ) class MNIBiasCorrectionOutputSpec(TraitedSpec): @@ -2207,6 +2498,7 @@ class MNIBiasCorrection(FSCommand): [https://github.com/BIC-MNI/N3] """ + _cmd = "mri_nu_correct.mni" input_spec = MNIBiasCorrectionInputSpec output_spec = MNIBiasCorrectionOutputSpec @@ -2215,26 +2507,21 @@ class MNIBiasCorrection(FSCommand): class WatershedSkullStripInputSpec(FSTraitedSpec): # required in_file = File( - argstr="%s", - exists=True, - mandatory=True, - position=-2, - desc="input volume") + argstr="%s", exists=True, mandatory=True, position=-2, desc="input volume" + ) out_file = File( - 'brainmask.auto.mgz', + "brainmask.auto.mgz", argstr="%s", exists=False, mandatory=True, position=-1, usedefault=True, - desc="output volume") + desc="output volume", + ) # optional - t1 = traits.Bool( - argstr="-T1", desc="specify T1 input volume (T1 grey value = 110)") - brain_atlas = File( - argstr="-brain_atlas %s", exists=True, position=-4, desc="") - transform = File( - argstr="%s", exists=False, position=-3, desc="undocumented") + t1 = traits.Bool(argstr="-T1", desc="specify T1 input volume (T1 grey value = 110)") + brain_atlas = File(argstr="-brain_atlas %s", exists=True, position=-4, desc="") + transform = File(argstr="%s", exists=False, position=-3, desc="undocumented") class WatershedSkullStripOutputSpec(TraitedSpec): @@ -2266,46 +2553,48 @@ class WatershedSkullStrip(FSCommand): >>> skullstrip.cmdline 'mri_watershed -T1 transforms/talairach_with_skull.lta T1.mgz brainmask.auto.mgz' """ - _cmd = 'mri_watershed' + + _cmd = "mri_watershed" input_spec = WatershedSkullStripInputSpec output_spec = WatershedSkullStripOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class NormalizeInputSpec(FSTraitedSpec): # required in_file = File( - argstr='%s', + argstr="%s", exists=True, mandatory=True, position=-2, - desc="The input file for Normalize") + desc="The input file for Normalize", + ) out_file = File( - argstr='%s', + argstr="%s", position=-1, - name_source=['in_file'], - name_template='%s_norm', + name_source=["in_file"], + name_template="%s_norm", hash_files=False, keep_extension=True, - desc="The output file for Normalize") + desc="The output file for Normalize", + ) # optional gradient = traits.Int( - argstr="-g %d", - desc="use max intensity/mm gradient g (default=1)") + argstr="-g %d", desc="use max intensity/mm gradient g (default=1)" + ) mask = File( - argstr="-mask %s", - exists=True, - desc="The input mask file for Normalize") + argstr="-mask %s", exists=True, desc="The input mask file for Normalize" + ) segmentation = File( - argstr="-aseg %s", - exists=True, - desc="The input segmentation for Normalize") + argstr="-aseg %s", exists=True, desc="The input segmentation for Normalize" + ) transform = File( - exists=True, desc="Tranform file from the header of the input file") + exists=True, desc="Tranform file from the header of the input file" + ) class NormalizeOutputSpec(TraitedSpec): @@ -2327,57 +2616,61 @@ class Normalize(FSCommand): >>> normalize.cmdline 'mri_normalize -g 1 T1.mgz T1_norm.mgz' """ + _cmd = "mri_normalize" input_spec = NormalizeInputSpec output_spec = NormalizeOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class CANormalizeInputSpec(FSTraitedSpec): in_file = File( - argstr='%s', + argstr="%s", exists=True, mandatory=True, position=-4, - desc="The input file for CANormalize") + desc="The input file for CANormalize", + ) out_file = File( - argstr='%s', + argstr="%s", position=-1, - name_source=['in_file'], - name_template='%s_norm', + name_source=["in_file"], + name_template="%s_norm", hash_files=False, keep_extension=True, - desc="The output file for CANormalize") + desc="The output file for CANormalize", + ) atlas = File( - argstr='%s', + argstr="%s", exists=True, mandatory=True, position=-3, - desc="The atlas file in gca format") + desc="The atlas file in gca format", + ) transform = File( - argstr='%s', + argstr="%s", exists=True, mandatory=True, position=-2, - desc="The tranform file in lta format") + desc="The tranform file in lta format", + ) # optional - mask = File( - argstr='-mask %s', exists=True, desc="Specifies volume to use as mask") + mask = File(argstr="-mask %s", exists=True, desc="Specifies volume to use as mask") control_points = File( - argstr='-c %s', desc="File name for the output control points") + argstr="-c %s", desc="File name for the output control points" + ) long_file = File( - argstr='-long %s', - desc='undocumented flag used in longitudinal processing') + argstr="-long %s", desc="undocumented flag used in longitudinal processing" + ) class CANormalizeOutputSpec(TraitedSpec): out_file = File(exists=False, desc="The output file for Normalize") - control_points = File( - exists=False, desc="The output control points for Normalize") + control_points = File(exists=False, desc="The output control points for Normalize") class CANormalize(FSCommand): @@ -2397,62 +2690,59 @@ class CANormalize(FSCommand): >>> ca_normalize.cmdline 'mri_ca_normalize T1.mgz atlas.nii.gz trans.mat T1_norm.mgz' """ + _cmd = "mri_ca_normalize" input_spec = CANormalizeInputSpec output_spec = CANormalizeOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) - outputs['control_points'] = os.path.abspath(self.inputs.control_points) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) + outputs["control_points"] = os.path.abspath(self.inputs.control_points) return outputs class CARegisterInputSpec(FSTraitedSpecOpenMP): # required in_file = File( - argstr='%s', + argstr="%s", exists=True, mandatory=True, position=-3, - desc="The input volume for CARegister") + desc="The input volume for CARegister", + ) out_file = File( - argstr='%s', - position=-1, - genfile=True, - desc="The output volume for CARegister") + argstr="%s", position=-1, genfile=True, desc="The output volume for CARegister" + ) template = File( - argstr='%s', - exists=True, - position=-2, - desc="The template file in gca format") + argstr="%s", exists=True, position=-2, desc="The template file in gca format" + ) # optional - mask = File( - argstr='-mask %s', exists=True, desc="Specifies volume to use as mask") + mask = File(argstr="-mask %s", exists=True, desc="Specifies volume to use as mask") invert_and_save = traits.Bool( - argstr='-invert-and-save', + argstr="-invert-and-save", position=-4, - desc= - "Invert and save the .m3z multi-dimensional talaraich transform to x, y, and z .mgz files" + desc="Invert and save the .m3z multi-dimensional talaraich transform to x, y, and z .mgz files", ) - no_big_ventricles = traits.Bool( - argstr='-nobigventricles', desc="No big ventricles") + no_big_ventricles = traits.Bool(argstr="-nobigventricles", desc="No big ventricles") transform = File( - argstr='-T %s', exists=True, desc="Specifies transform in lta format") + argstr="-T %s", exists=True, desc="Specifies transform in lta format" + ) align = traits.String( - argstr='-align-%s', desc="Specifies when to perform alignment") + argstr="-align-%s", desc="Specifies when to perform alignment" + ) levels = traits.Int( - argstr='-levels %d', - desc= - "defines how many surrounding voxels will be used in interpolations, default is 6" + argstr="-levels %d", + desc="defines how many surrounding voxels will be used in interpolations, default is 6", ) A = traits.Int( - argstr='-A %d', - desc='undocumented flag used in longitudinal processing') + argstr="-A %d", desc="undocumented flag used in longitudinal processing" + ) l_files = InputMultiPath( File(exists=False), - argstr='-l %s', - desc='undocumented flag used in longitudinal processing') + argstr="-l %s", + desc="undocumented flag used in longitudinal processing", + ) class CARegisterOutputSpec(TraitedSpec): @@ -2473,23 +2763,24 @@ class CARegister(FSCommandOpenMP): >>> ca_register.cmdline 'mri_ca_register norm.mgz talairach.m3z' """ + _cmd = "mri_ca_register" input_spec = CARegisterInputSpec output_spec = CARegisterOutputSpec def _format_arg(self, name, spec, value): if name == "l_files" and len(value) == 1: - value.append('identity.nofile') + value.append("identity.nofile") return super(CARegister, self)._format_arg(name, spec, value) def _gen_fname(self, name): - if name == 'out_file': - return os.path.abspath('talairach.m3z') + if name == "out_file": + return os.path.abspath("talairach.m3z") return None def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs @@ -2500,53 +2791,58 @@ class CALabelInputSpec(FSTraitedSpecOpenMP): position=-4, mandatory=True, exists=True, - desc="Input volume for CALabel") + desc="Input volume for CALabel", + ) out_file = File( argstr="%s", position=-1, mandatory=True, exists=False, - desc="Output file for CALabel") + desc="Output file for CALabel", + ) transform = File( argstr="%s", position=-3, mandatory=True, exists=True, - desc="Input transform for CALabel") + desc="Input transform for CALabel", + ) template = File( argstr="%s", position=-2, mandatory=True, exists=True, - desc="Input template for CALabel") + desc="Input template for CALabel", + ) # optional in_vol = File(argstr="-r %s", exists=True, desc="set input volume") intensities = File( argstr="-r %s", exists=True, - desc="input label intensities file(used in longitudinal processing)") - no_big_ventricles = traits.Bool( - argstr="-nobigventricles", desc="No big ventricles") + desc="input label intensities file(used in longitudinal processing)", + ) + no_big_ventricles = traits.Bool(argstr="-nobigventricles", desc="No big ventricles") align = traits.Bool(argstr="-align", desc="Align CALabel") prior = traits.Float(argstr="-prior %.1f", desc="Prior for CALabel") relabel_unlikely = traits.Tuple( traits.Int, traits.Float, argstr="-relabel_unlikely %d %.1f", - desc=("Reclassify voxels at least some std" - " devs from the mean using some size" - " Gaussian window")) + desc=( + "Reclassify voxels at least some std" + " devs from the mean using some size" + " Gaussian window" + ), + ) label = File( argstr="-l %s", exists=True, - desc= - "Undocumented flag. Autorecon3 uses ../label/{hemisphere}.cortex.label as input file" + desc="Undocumented flag. Autorecon3 uses ../label/{hemisphere}.cortex.label as input file", ) aseg = File( argstr="-aseg %s", exists=True, - desc= - "Undocumented flag. Autorecon3 uses ../mri/aseg.presurf.mgz as input file" + desc="Undocumented flag. Autorecon3 uses ../mri/aseg.presurf.mgz as input file", ) @@ -2570,78 +2866,81 @@ class CALabel(FSCommandOpenMP): >>> ca_label.cmdline 'mri_ca_label norm.mgz trans.mat Template_6.nii out.mgz' """ + _cmd = "mri_ca_label" input_spec = CALabelInputSpec output_spec = CALabelOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class MRIsCALabelInputSpec(FSTraitedSpecOpenMP): # required subject_id = traits.String( - 'subject_id', + "subject_id", argstr="%s", position=-5, usedefault=True, mandatory=True, - desc="Subject name or ID") + desc="Subject name or ID", + ) hemisphere = traits.Enum( - 'lh', - 'rh', + "lh", + "rh", argstr="%s", position=-4, mandatory=True, - desc="Hemisphere ('lh' or 'rh')") + desc="Hemisphere ('lh' or 'rh')", + ) canonsurf = File( argstr="%s", position=-3, mandatory=True, exists=True, - desc="Input canonical surface file") + desc="Input canonical surface file", + ) classifier = File( argstr="%s", position=-2, mandatory=True, exists=True, - desc="Classifier array input file") + desc="Classifier array input file", + ) smoothwm = File( - mandatory=True, - exists=True, - desc="implicit input {hemisphere}.smoothwm") - curv = File( - mandatory=True, exists=True, desc="implicit input {hemisphere}.curv") - sulc = File( - mandatory=True, exists=True, desc="implicit input {hemisphere}.sulc") + mandatory=True, exists=True, desc="implicit input {hemisphere}.smoothwm" + ) + curv = File(mandatory=True, exists=True, desc="implicit input {hemisphere}.curv") + sulc = File(mandatory=True, exists=True, desc="implicit input {hemisphere}.sulc") out_file = File( argstr="%s", position=-1, exists=False, - name_source=['hemisphere'], + name_source=["hemisphere"], keep_extension=True, hash_files=False, name_template="%s.aparc.annot", - desc="Annotated surface output file") + desc="Annotated surface output file", + ) # optional label = File( argstr="-l %s", exists=True, - desc= - "Undocumented flag. Autorecon3 uses ../label/{hemisphere}.cortex.label as input file" + desc="Undocumented flag. Autorecon3 uses ../label/{hemisphere}.cortex.label as input file", ) aseg = File( argstr="-aseg %s", exists=True, - desc= - "Undocumented flag. Autorecon3 uses ../mri/aseg.presurf.mgz as input file" + desc="Undocumented flag. Autorecon3 uses ../mri/aseg.presurf.mgz as input file", ) seed = traits.Int(argstr="-seed %d", desc="") - copy_inputs = traits.Bool(desc="Copies implicit inputs to node directory " - + "and creates a temp subjects_directory. " + - "Use this when running as a node") + copy_inputs = traits.Bool( + desc="Copies implicit inputs to node directory " + + "and creates a temp subjects_directory. " + + "Use this when running as a node" + ) class MRIsCALabelOutputSpec(TraitedSpec): @@ -2674,6 +2973,7 @@ class MRIsCALabel(FSCommandOpenMP): >>> ca_label.cmdline 'mris_ca_label test lh lh.pial im1.nii lh.aparc.annot' """ + _cmd = "mris_ca_label" input_spec = MRIsCALabelInputSpec output_spec = MRIsCALabelOutputSpec @@ -2681,28 +2981,32 @@ class MRIsCALabel(FSCommandOpenMP): def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir - copy2subjdir(self, self.inputs.canonsurf, folder='surf') + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir + copy2subjdir(self, self.inputs.canonsurf, folder="surf") copy2subjdir( self, self.inputs.smoothwm, - folder='surf', - basename='{0}.smoothwm'.format(self.inputs.hemisphere)) + folder="surf", + basename="{0}.smoothwm".format(self.inputs.hemisphere), + ) copy2subjdir( self, self.inputs.curv, - folder='surf', - basename='{0}.curv'.format(self.inputs.hemisphere)) + folder="surf", + basename="{0}.curv".format(self.inputs.hemisphere), + ) copy2subjdir( self, self.inputs.sulc, - folder='surf', - basename='{0}.sulc'.format(self.inputs.hemisphere)) + folder="surf", + basename="{0}.sulc".format(self.inputs.hemisphere), + ) # The label directory must exist in order for an output to be written - label_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'label') + label_dir = os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id, "label" + ) if not os.path.isdir(label_dir): os.makedirs(label_dir) @@ -2711,9 +3015,9 @@ def run(self, **inputs): def _list_outputs(self): outputs = self.output_spec().get() out_basename = os.path.basename(self.inputs.out_file) - outputs['out_file'] = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'label', - out_basename) + outputs["out_file"] = os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id, "label", out_basename + ) return outputs @@ -2722,39 +3026,45 @@ class SegmentCCInputSpec(FSTraitedSpec): argstr="-aseg %s", mandatory=True, exists=True, - desc="Input aseg file to read from subjects directory") + desc="Input aseg file to read from subjects directory", + ) in_norm = File( mandatory=True, exists=True, - desc="Required undocumented input {subject}/mri/norm.mgz") + desc="Required undocumented input {subject}/mri/norm.mgz", + ) out_file = File( argstr="-o %s", exists=False, - name_source=['in_file'], - name_template='%s.auto.mgz', + name_source=["in_file"], + name_template="%s.auto.mgz", hash_files=False, keep_extension=False, - desc="Filename to write aseg including CC") + desc="Filename to write aseg including CC", + ) out_rotation = File( argstr="-lta %s", mandatory=True, exists=False, - desc="Global filepath for writing rotation lta") + desc="Global filepath for writing rotation lta", + ) subject_id = traits.String( - 'subject_id', + "subject_id", argstr="%s", mandatory=True, position=-1, usedefault=True, - desc="Subject name") + desc="Subject name", + ) copy_inputs = traits.Bool( - desc="If running as a node, set this to True." + - "This will copy the input files to the node " + "directory.") + desc="If running as a node, set this to True." + + "This will copy the input files to the node " + + "directory." + ) class SegmentCCOutputSpec(TraitedSpec): - out_file = File( - exists=False, desc="Output segmentation uncluding corpus collosum") + out_file = File(exists=False, desc="Output segmentation uncluding corpus collosum") out_rotation = File(exists=False, desc="Output lta rotation file") @@ -2798,38 +3108,37 @@ def _format_arg(self, name, spec, value): def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) - outputs['out_rotation'] = os.path.abspath(self.inputs.out_rotation) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) + outputs["out_rotation"] = os.path.abspath(self.inputs.out_rotation) return outputs def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir for originalfile in [self.inputs.in_file, self.inputs.in_norm]: - copy2subjdir(self, originalfile, folder='mri') + copy2subjdir(self, originalfile, folder="mri") return super(SegmentCC, self).run(**inputs) def aggregate_outputs(self, runtime=None, needed_outputs=None): # it is necessary to find the output files and move # them to the correct loacation predicted_outputs = self._list_outputs() - for name in ['out_file', 'out_rotation']: + for name in ["out_file", "out_rotation"]: out_file = predicted_outputs[name] if not os.path.isfile(out_file): out_base = os.path.basename(out_file) if isdefined(self.inputs.subjects_dir): - subj_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id) + subj_dir = os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id + ) else: - subj_dir = os.path.join(os.getcwd(), - self.inputs.subject_id) - if name == 'out_file': - out_tmp = os.path.join(subj_dir, 'mri', out_base) - elif name == 'out_rotation': - out_tmp = os.path.join(subj_dir, 'mri', 'transforms', - out_base) + subj_dir = os.path.join(os.getcwd(), self.inputs.subject_id) + if name == "out_file": + out_tmp = os.path.join(subj_dir, "mri", out_base) + elif name == "out_rotation": + out_tmp = os.path.join(subj_dir, "mri", "transforms", out_base) else: out_tmp = None # move the file to correct location @@ -2837,8 +3146,7 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): if not os.path.isdir(os.path.dirname(out_tmp)): os.makedirs(os.path.dirname(out_tmp)) shutil.move(out_tmp, out_file) - return super(SegmentCC, self).aggregate_outputs( - runtime, needed_outputs) + return super(SegmentCC, self).aggregate_outputs(runtime, needed_outputs) class SegmentWMInputSpec(FSTraitedSpec): @@ -2847,13 +3155,15 @@ class SegmentWMInputSpec(FSTraitedSpec): exists=True, mandatory=True, position=-2, - desc="Input file for SegmentWM") + desc="Input file for SegmentWM", + ) out_file = File( argstr="%s", exists=False, mandatory=True, position=-1, - desc="File to be written as output for SegmentWM") + desc="File to be written as output for SegmentWM", + ) class SegmentWMOutputSpec(TraitedSpec): @@ -2883,7 +3193,7 @@ class SegmentWM(FSCommand): def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs @@ -2893,28 +3203,31 @@ class EditWMwithAsegInputSpec(FSTraitedSpec): position=-4, mandatory=True, exists=True, - desc="Input white matter segmentation file") + desc="Input white matter segmentation file", + ) brain_file = File( argstr="%s", position=-3, mandatory=True, exists=True, - desc="Input brain/T1 file") + desc="Input brain/T1 file", + ) seg_file = File( argstr="%s", position=-2, mandatory=True, exists=True, - desc="Input presurf segmentation file") + desc="Input presurf segmentation file", + ) out_file = File( argstr="%s", position=-1, mandatory=True, exists=False, - desc="File to be written as output") + desc="File to be written as output", + ) # optional - keep_in = traits.Bool( - argstr="-keep-in", desc="Keep edits as found in input volume") + keep_in = traits.Bool(argstr="-keep-in", desc="Keep edits as found in input volume") class EditWMwithAsegOutputSpec(TraitedSpec): @@ -2937,13 +3250,14 @@ class EditWMwithAseg(FSCommand): >>> editwm.cmdline 'mri_edit_wm_with_aseg -keep-in T1.mgz norm.mgz aseg.mgz wm.asegedit.mgz' """ - _cmd = 'mri_edit_wm_with_aseg' + + _cmd = "mri_edit_wm_with_aseg" input_spec = EditWMwithAsegInputSpec output_spec = EditWMwithAsegOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs @@ -2952,57 +3266,61 @@ class ConcatenateLTAInputSpec(FSTraitedSpec): in_lta1 = File( exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-3, - desc='maps some src1 to dst1') + desc="maps some src1 to dst1", + ) in_lta2 = traits.Either( File(exists=True), - 'identity.nofile', - argstr='%s', + "identity.nofile", + argstr="%s", position=-2, mandatory=True, - desc='maps dst1(src2) to dst2') + desc="maps dst1(src2) to dst2", + ) out_file = File( position=-1, - argstr='%s', + argstr="%s", hash_files=False, - name_source=['in_lta1'], - name_template='%s_concat', + name_source=["in_lta1"], + name_template="%s_concat", keep_extension=True, - desc='the combined LTA maps: src1 to dst2 = LTA2*LTA1') + desc="the combined LTA maps: src1 to dst2 = LTA2*LTA1", + ) # Inversion and transform type - invert_1 = traits.Bool( - argstr='-invert1', desc='invert in_lta1 before applying it') - invert_2 = traits.Bool( - argstr='-invert2', desc='invert in_lta2 before applying it') - invert_out = traits.Bool(argstr='-invertout', desc='invert output LTA') + invert_1 = traits.Bool(argstr="-invert1", desc="invert in_lta1 before applying it") + invert_2 = traits.Bool(argstr="-invert2", desc="invert in_lta2 before applying it") + invert_out = traits.Bool(argstr="-invertout", desc="invert output LTA") out_type = traits.Enum( - 'VOX2VOX', 'RAS2RAS', argstr='-out_type %d', desc='set final LTA type') + "VOX2VOX", "RAS2RAS", argstr="-out_type %d", desc="set final LTA type" + ) # Talairach options tal_source_file = File( exists=True, - argstr='-tal %s', + argstr="-tal %s", position=-5, - requires=['tal_template_file'], - desc='if in_lta2 is talairach.xfm, specify source for talairach') + requires=["tal_template_file"], + desc="if in_lta2 is talairach.xfm, specify source for talairach", + ) tal_template_file = File( exists=True, - argstr='%s', + argstr="%s", position=-4, - requires=['tal_source_file'], - desc='if in_lta2 is talairach.xfm, specify template for talairach') + requires=["tal_source_file"], + desc="if in_lta2 is talairach.xfm, specify template for talairach", + ) - subject = traits.Str( - argstr='-subject %s', desc='set subject in output LTA') + subject = traits.Str(argstr="-subject %s", desc="set subject in output LTA") # Note rmsdiff would be xor out_file, and would be most easily dealt with # in a new interface. -CJM 2017.10.05 class ConcatenateLTAOutputSpec(TraitedSpec): out_file = File( - exists=False, desc='the combined LTA maps: src1 to dst2 = LTA2*LTA1') + exists=False, desc="the combined LTA maps: src1 to dst2 = LTA2*LTA1" + ) class ConcatenateLTA(FSCommand): @@ -3035,11 +3353,11 @@ class ConcatenateLTA(FSCommand): 'mri_concatenate_lta -invert1 -out_type 1 lta1.lta identity.nofile inv1.lta' """ - _cmd = 'mri_concatenate_lta' + _cmd = "mri_concatenate_lta" input_spec = ConcatenateLTAInputSpec output_spec = ConcatenateLTAOutputSpec def _format_arg(self, name, spec, value): - if name == 'out_type': - value = {'VOX2VOX': 0, 'RAS2RAS': 1}[value] + if name == "out_type": + value = {"VOX2VOX": 0, "RAS2RAS": 1}[value] return super(ConcatenateLTA, self)._format_arg(name, spec, value) diff --git a/nipype/interfaces/freesurfer/registration.py b/nipype/interfaces/freesurfer/registration.py index 1b919485e2..c93f813088 100644 --- a/nipype/interfaces/freesurfer/registration.py +++ b/nipype/interfaces/freesurfer/registration.py @@ -10,32 +10,37 @@ from ... import logging from ...utils.filemanip import split_filename, copyfile -from .base import (FSCommand, FSTraitedSpec, FSScriptCommand, - FSScriptOutputSpec, FSCommandOpenMP, FSTraitedSpecOpenMP) -from ..base import (isdefined, TraitedSpec, File, traits, Directory) +from .base import ( + FSCommand, + FSTraitedSpec, + FSScriptCommand, + FSScriptOutputSpec, + FSCommandOpenMP, + FSTraitedSpecOpenMP, +) +from ..base import isdefined, TraitedSpec, File, traits, Directory -__docformat__ = 'restructuredtext' -iflogger = logging.getLogger('nipype.interface') +__docformat__ = "restructuredtext" +iflogger = logging.getLogger("nipype.interface") class MPRtoMNI305InputSpec(FSTraitedSpec): # environment variables, required # usedefault=True is hack for on_trait_change in __init__ reference_dir = Directory( - "", exists=True, mandatory=True, usedefault=True, desc="TODO") - target = traits.String( - "", mandatory=True, usedefault=True, desc="input atlas file") + "", exists=True, mandatory=True, usedefault=True, desc="TODO" + ) + target = traits.String("", mandatory=True, usedefault=True, desc="input atlas file") # required in_file = File( - argstr='%s', - usedefault=True, - desc="the input file prefix for MPRtoMNI305") + argstr="%s", usedefault=True, desc="the input file prefix for MPRtoMNI305" + ) class MPRtoMNI305OutputSpec(FSScriptOutputSpec): out_file = File( - exists=False, - desc="The output file '_to__t4_vox2vox.txt'") + exists=False, desc="The output file '_to__t4_vox2vox.txt'" + ) class MPRtoMNI305(FSScriptCommand): @@ -61,26 +66,25 @@ class MPRtoMNI305(FSScriptCommand): >>> mprtomni305.run() # doctest: +SKIP """ + _cmd = "mpr2mni305" input_spec = MPRtoMNI305InputSpec output_spec = MPRtoMNI305OutputSpec def __init__(self, **inputs): super(MPRtoMNI305, self).__init__(**inputs) - self.inputs.on_trait_change(self._environ_update, 'target') - self.inputs.on_trait_change(self._environ_update, 'reference_dir') + self.inputs.on_trait_change(self._environ_update, "target") + self.inputs.on_trait_change(self._environ_update, "reference_dir") def _format_arg(self, opt, spec, val): - if opt in ['target', 'reference_dir']: + if opt in ["target", "reference_dir"]: return "" - elif opt == 'in_file': + elif opt == "in_file": _, retval, ext = split_filename(val) # Need to copy file to working cache directory! copyfile( - val, - os.path.abspath(retval + ext), - copy=True, - hashmethod='content') + val, os.path.abspath(retval + ext), copy=True, hashmethod="content" + ) return retval return super(MPRtoMNI305, self)._format_arg(opt, spec, val) @@ -88,7 +92,7 @@ def _environ_update(self): # refdir = os.path.join(Info.home(), val) refdir = self.inputs.reference_dir target = self.inputs.target - self.inputs.environ['MPR2MNI305_TARGET'] = target + self.inputs.environ["MPR2MNI305_TARGET"] = target self.inputs.environ["REFDIR"] = refdir def _get_fname(self, fname): @@ -96,44 +100,40 @@ def _get_fname(self, fname): def _list_outputs(self): outputs = super(MPRtoMNI305, self)._list_outputs() - fullname = "_".join([ - self._get_fname(self.inputs.in_file), "to", self.inputs.target, - "t4", "vox2vox.txt" - ]) - outputs['out_file'] = os.path.abspath(fullname) + fullname = "_".join( + [ + self._get_fname(self.inputs.in_file), + "to", + self.inputs.target, + "t4", + "vox2vox.txt", + ] + ) + outputs["out_file"] = os.path.abspath(fullname) return outputs class RegisterAVItoTalairachInputSpec(FSTraitedSpec): in_file = File( - argstr='%s', - exists=True, - mandatory=True, - position=0, - desc="The input file") + argstr="%s", exists=True, mandatory=True, position=0, desc="The input file" + ) target = File( - argstr='%s', - exists=True, - mandatory=True, - position=1, - desc="The target file") + argstr="%s", exists=True, mandatory=True, position=1, desc="The target file" + ) vox2vox = File( - argstr='%s', - exists=True, - mandatory=True, - position=2, - desc="The vox2vox file") + argstr="%s", exists=True, mandatory=True, position=2, desc="The vox2vox file" + ) out_file = File( - 'talairach.auto.xfm', + "talairach.auto.xfm", usedefault=True, - argstr='%s', + argstr="%s", position=3, - desc="The transform output") + desc="The transform output", + ) class RegisterAVItoTalairachOutputSpec(FSScriptOutputSpec): - out_file = File( - exists=False, desc="The output file for RegisterAVItoTalairach") + out_file = File(exists=False, desc="The output file for RegisterAVItoTalairach") class RegisterAVItoTalairach(FSScriptCommand): @@ -168,50 +168,43 @@ class RegisterAVItoTalairach(FSScriptCommand): >>> register.run() # doctest: +SKIP """ + _cmd = "avi2talxfm" input_spec = RegisterAVItoTalairachInputSpec output_spec = RegisterAVItoTalairachOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class EMRegisterInputSpec(FSTraitedSpecOpenMP): # required in_file = File( - argstr="%s", - exists=True, - mandatory=True, - position=-3, - desc="in brain volume") + argstr="%s", exists=True, mandatory=True, position=-3, desc="in brain volume" + ) template = File( - argstr="%s", - exists=True, - mandatory=True, - position=-2, - desc="template gca") + argstr="%s", exists=True, mandatory=True, position=-2, desc="template gca" + ) out_file = File( argstr="%s", exists=False, - name_source=['in_file'], + name_source=["in_file"], name_template="%s_transform.lta", hash_files=False, keep_extension=False, position=-1, - desc="output transform") + desc="output transform", + ) # optional - skull = traits.Bool( - argstr="-skull", desc="align to atlas containing skull (uns=5)") + skull = traits.Bool(argstr="-skull", desc="align to atlas containing skull (uns=5)") mask = File(argstr="-mask %s", exists=True, desc="use volume as a mask") nbrspacing = traits.Int( argstr="-uns %d", - desc= - "align to atlas containing skull setting unknown_nbr_spacing = nbrspacing" + desc="align to atlas containing skull setting unknown_nbr_spacing = nbrspacing", ) - transform = File( - argstr="-t %s", exists=True, desc="Previously computed transform") + transform = File(argstr="-t %s", exists=True, desc="Previously computed transform") class EMRegisterOutputSpec(TraitedSpec): @@ -233,13 +226,14 @@ class EMRegister(FSCommandOpenMP): >>> register.cmdline 'mri_em_register -uns 9 -skull norm.mgz aseg.mgz norm_transform.lta' """ - _cmd = 'mri_em_register' + + _cmd = "mri_em_register" input_spec = EMRegisterInputSpec output_spec = EMRegisterOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs @@ -251,42 +245,44 @@ class RegisterInputSpec(FSTraitedSpec): mandatory=True, position=-3, copyfile=True, - desc="Surface to register, often {hemi}.sphere") + desc="Surface to register, often {hemi}.sphere", + ) target = File( argstr="%s", exists=True, mandatory=True, position=-2, - desc="The data to register to. In normal recon-all usage, " + - "this is a template file for average surface.") + desc="The data to register to. In normal recon-all usage, " + + "this is a template file for average surface.", + ) in_sulc = File( exists=True, mandatory=True, copyfile=True, - desc= - "Undocumented mandatory input file ${SUBJECTS_DIR}/surf/{hemisphere}.sulc " + desc="Undocumented mandatory input file ${SUBJECTS_DIR}/surf/{hemisphere}.sulc ", ) out_file = File( argstr="%s", exists=False, position=-1, genfile=True, - desc="Output surface file to capture registration") + desc="Output surface file to capture registration", + ) # optional curv = traits.Bool( argstr="-curv", - requires=['in_smoothwm'], - desc="Use smoothwm curvature for final alignment") + requires=["in_smoothwm"], + desc="Use smoothwm curvature for final alignment", + ) in_smoothwm = File( exists=True, copyfile=True, - desc= - "Undocumented input file ${SUBJECTS_DIR}/surf/{hemisphere}.smoothwm ") + desc="Undocumented input file ${SUBJECTS_DIR}/surf/{hemisphere}.smoothwm ", + ) class RegisterOutputSpec(TraitedSpec): - out_file = File( - exists=False, desc="Output surface file to capture registration") + out_file = File(exists=False, desc="Output surface file to capture registration") class Register(FSCommand): @@ -306,26 +302,26 @@ class Register(FSCommand): 'mris_register -curv lh.pial aseg.mgz lh.pial.reg' """ - _cmd = 'mris_register' + _cmd = "mris_register" input_spec = RegisterInputSpec output_spec = RegisterOutputSpec def _format_arg(self, opt, spec, val): - if opt == 'curv': + if opt == "curv": return spec.argstr return super(Register, self)._format_arg(opt, spec, val) def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._list_outputs()[name] return None def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_file): - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) else: - outputs['out_file'] = os.path.abspath(self.inputs.in_surf) + '.reg' + outputs["out_file"] = os.path.abspath(self.inputs.in_surf) + ".reg" return outputs @@ -336,14 +332,12 @@ class PaintInputSpec(FSTraitedSpec): exists=True, mandatory=True, position=-2, - desc="Surface file with grid (vertices) onto which the " + - "template data is to be sampled or 'painted'") + desc="Surface file with grid (vertices) onto which the " + + "template data is to be sampled or 'painted'", + ) template = File( - argstr="%s", - exists=True, - mandatory=True, - position=-3, - desc="Template file") + argstr="%s", exists=True, mandatory=True, position=-3, desc="Template file" + ) # optional template_param = traits.Int(desc="Frame number of the input template") averages = traits.Int(argstr="-a %d", desc="Average curvature patterns") @@ -353,17 +347,17 @@ class PaintInputSpec(FSTraitedSpec): position=-1, name_template="%s.avg_curv", hash_files=False, - name_source=['in_surf'], + name_source=["in_surf"], keep_extension=False, - desc="File containing a surface-worth of per-vertex values, " + - "saved in 'curvature' format.") + desc="File containing a surface-worth of per-vertex values, " + + "saved in 'curvature' format.", + ) class PaintOutputSpec(TraitedSpec): out_file = File( exists=False, - desc= - "File containing a surface-worth of per-vertex values, saved in 'curvature' format." + desc="File containing a surface-worth of per-vertex values, saved in 'curvature' format.", ) @@ -387,150 +381,166 @@ class Paint(FSCommand): 'mrisp_paint -a 5 aseg.mgz lh.pial lh.avg_curv' """ - _cmd = 'mrisp_paint' + _cmd = "mrisp_paint" input_spec = PaintInputSpec output_spec = PaintOutputSpec def _format_arg(self, opt, spec, val): - if opt == 'template': + if opt == "template": if isdefined(self.inputs.template_param): - return spec.argstr % ( - val + '#' + str(self.inputs.template_param)) + return spec.argstr % (val + "#" + str(self.inputs.template_param)) return super(Paint, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class MRICoregInputSpec(FSTraitedSpec): source_file = File( - argstr='--mov %s', - desc='source file to be registered', + argstr="--mov %s", + desc="source file to be registered", mandatory=True, - copyfile=False) + copyfile=False, + ) reference_file = File( - argstr='--ref %s', - desc='reference (target) file', + argstr="--ref %s", + desc="reference (target) file", mandatory=True, copyfile=False, - xor=['subject_id']) + xor=["subject_id"], + ) out_lta_file = traits.Either( True, File, - argstr='--lta %s', + argstr="--lta %s", default=True, usedefault=True, - desc='output registration file (LTA format)') + desc="output registration file (LTA format)", + ) out_reg_file = traits.Either( - True, - File, - argstr='--regdat %s', - desc='output registration file (REG format)') + True, File, argstr="--regdat %s", desc="output registration file (REG format)" + ) out_params_file = traits.Either( - True, File, argstr='--params %s', desc='output parameters file') + True, File, argstr="--params %s", desc="output parameters file" + ) subjects_dir = Directory( - exists=True, argstr='--sd %s', desc='FreeSurfer SUBJECTS_DIR') + exists=True, argstr="--sd %s", desc="FreeSurfer SUBJECTS_DIR" + ) subject_id = traits.Str( - argstr='--s %s', + argstr="--s %s", position=1, mandatory=True, - xor=['reference_file'], - requires=['subjects_dir'], - desc='freesurfer subject ID (implies ``reference_mask == ' - 'aparc+aseg.mgz`` unless otherwise specified)') + xor=["reference_file"], + requires=["subjects_dir"], + desc="freesurfer subject ID (implies ``reference_mask == " + "aparc+aseg.mgz`` unless otherwise specified)", + ) dof = traits.Enum( - 6, - 9, - 12, - argstr='--dof %d', - desc='number of transform degrees of freedom') + 6, 9, 12, argstr="--dof %d", desc="number of transform degrees of freedom" + ) reference_mask = traits.Either( False, traits.Str, - argstr='--ref-mask %s', + argstr="--ref-mask %s", position=2, - desc='mask reference volume with given mask, or None if ``False``') + desc="mask reference volume with given mask, or None if ``False``", + ) source_mask = traits.Str( - argstr='--mov-mask', desc='mask source file with given mask') - num_threads = traits.Int( - argstr='--threads %d', desc='number of OpenMP threads') + argstr="--mov-mask", desc="mask source file with given mask" + ) + num_threads = traits.Int(argstr="--threads %d", desc="number of OpenMP threads") no_coord_dithering = traits.Bool( - argstr='--no-coord-dither', desc='turn off coordinate dithering') + argstr="--no-coord-dither", desc="turn off coordinate dithering" + ) no_intensity_dithering = traits.Bool( - argstr='--no-intensity-dither', desc='turn off intensity dithering') + argstr="--no-intensity-dither", desc="turn off intensity dithering" + ) sep = traits.List( - argstr='--sep %s...', + argstr="--sep %s...", minlen=1, maxlen=2, - desc='set spatial scales, in voxels (default [2, 4])') + desc="set spatial scales, in voxels (default [2, 4])", + ) initial_translation = traits.Tuple( traits.Float, traits.Float, traits.Float, - argstr='--trans %g %g %g', - desc='initial translation in mm (implies no_cras0)') + argstr="--trans %g %g %g", + desc="initial translation in mm (implies no_cras0)", + ) initial_rotation = traits.Tuple( traits.Float, traits.Float, traits.Float, - argstr='--rot %g %g %g', - desc='initial rotation in degrees') + argstr="--rot %g %g %g", + desc="initial rotation in degrees", + ) initial_scale = traits.Tuple( traits.Float, traits.Float, traits.Float, - argstr='--scale %g %g %g', - desc='initial scale') + argstr="--scale %g %g %g", + desc="initial scale", + ) initial_shear = traits.Tuple( traits.Float, traits.Float, traits.Float, - argstr='--shear %g %g %g', - desc='initial shear (Hxy, Hxz, Hyz)') + argstr="--shear %g %g %g", + desc="initial shear (Hxy, Hxz, Hyz)", + ) no_cras0 = traits.Bool( - argstr='--no-cras0', - desc='do not set translation parameters to align ' - 'centers of source and reference files') + argstr="--no-cras0", + desc="do not set translation parameters to align " + "centers of source and reference files", + ) max_iters = traits.Range( - low=1, argstr='--nitersmax %d', desc='maximum iterations (default: 4)') + low=1, argstr="--nitersmax %d", desc="maximum iterations (default: 4)" + ) ftol = traits.Float( - argstr='--ftol %e', desc='floating-point tolerance (default=1e-7)') - linmintol = traits.Float(argstr='--linmintol %e') + argstr="--ftol %e", desc="floating-point tolerance (default=1e-7)" + ) + linmintol = traits.Float(argstr="--linmintol %e") saturation_threshold = traits.Range( low=0.0, high=100.0, - argstr='--sat %g', - desc='saturation threshold (default=9.999)') + argstr="--sat %g", + desc="saturation threshold (default=9.999)", + ) conform_reference = traits.Bool( - argstr='--conf-ref', desc='conform reference without rescaling') - no_brute_force = traits.Bool( - argstr='--no-bf', desc='do not brute force search') + argstr="--conf-ref", desc="conform reference without rescaling" + ) + no_brute_force = traits.Bool(argstr="--no-bf", desc="do not brute force search") brute_force_limit = traits.Float( - argstr='--bf-lim %g', - xor=['no_brute_force'], - desc='constrain brute force search to +/- lim') + argstr="--bf-lim %g", + xor=["no_brute_force"], + desc="constrain brute force search to +/- lim", + ) brute_force_samples = traits.Int( - argstr='--bf-nsamp %d', - xor=['no_brute_force'], - desc='number of samples in brute force search') + argstr="--bf-nsamp %d", + xor=["no_brute_force"], + desc="number of samples in brute force search", + ) no_smooth = traits.Bool( - argstr='--no-smooth', - desc='do not apply smoothing to either reference or source file') + argstr="--no-smooth", + desc="do not apply smoothing to either reference or source file", + ) ref_fwhm = traits.Float( - argstr='--ref-fwhm', desc='apply smoothing to reference file') + argstr="--ref-fwhm", desc="apply smoothing to reference file" + ) source_oob = traits.Bool( - argstr='--mov-oob', - desc='count source voxels that are out-of-bounds as 0') + argstr="--mov-oob", desc="count source voxels that are out-of-bounds as 0" + ) # Skipping mat2par class MRICoregOutputSpec(TraitedSpec): - out_reg_file = File(exists=True, desc='output registration file') - out_lta_file = File(exists=True, desc='output LTA-style registration file') - out_params_file = File(exists=True, desc='output parameters file') + out_reg_file = File(exists=True, desc="output registration file") + out_lta_file = File(exists=True, desc="output LTA-style registration file") + out_params_file = File(exists=True, desc="output parameters file") class MRICoreg(FSCommand): @@ -569,16 +579,15 @@ class MRICoreg(FSCommand): 'mri_coreg --s fsaverage --no-ref-mask --lta .../registration.lta --sep 4 --sep 5 --mov moving1.nii --sd .' """ - _cmd = 'mri_coreg' + _cmd = "mri_coreg" input_spec = MRICoregInputSpec output_spec = MRICoregOutputSpec def _format_arg(self, opt, spec, val): - if opt in ('out_reg_file', 'out_lta_file', - 'out_params_file') and val is True: + if opt in ("out_reg_file", "out_lta_file", "out_params_file") and val is True: val = self._list_outputs()[opt] - elif opt == 'reference_mask' and val is False: - return '--no-ref-mask' + elif opt == "reference_mask" and val is False: + return "--no-ref-mask" return super(MRICoreg, self)._format_arg(opt, spec, val) def _list_outputs(self): @@ -587,19 +596,19 @@ def _list_outputs(self): out_lta_file = self.inputs.out_lta_file if isdefined(out_lta_file): if out_lta_file is True: - out_lta_file = 'registration.lta' - outputs['out_lta_file'] = os.path.abspath(out_lta_file) + out_lta_file = "registration.lta" + outputs["out_lta_file"] = os.path.abspath(out_lta_file) out_reg_file = self.inputs.out_reg_file if isdefined(out_reg_file): if out_reg_file is True: - out_reg_file = 'registration.dat' - outputs['out_reg_file'] = os.path.abspath(out_reg_file) + out_reg_file = "registration.dat" + outputs["out_reg_file"] = os.path.abspath(out_reg_file) out_params_file = self.inputs.out_params_file if isdefined(out_params_file): if out_params_file is True: - out_params_file = 'registration.par' - outputs['out_params_file'] = os.path.abspath(out_params_file) + out_params_file = "registration.par" + outputs["out_params_file"] = os.path.abspath(out_params_file) return outputs diff --git a/nipype/interfaces/freesurfer/tests/test_BBRegister.py b/nipype/interfaces/freesurfer/tests/test_BBRegister.py index 81a67742e2..b9ed6a8bcd 100644 --- a/nipype/interfaces/freesurfer/tests/test_BBRegister.py +++ b/nipype/interfaces/freesurfer/tests/test_BBRegister.py @@ -3,111 +3,47 @@ def test_BBRegister_inputs(): input_map_5_3 = dict( - args=dict(argstr='%s', ), - contrast_type=dict( - argstr='--%s', - mandatory=True, - ), - dof=dict(argstr='--%d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - epi_mask=dict(argstr='--epi-mask', ), - fsldof=dict(argstr='--fsl-dof %d', ), - init=dict( - argstr='--init-%s', - mandatory=True, - xor=['init_reg_file'], - ), - init_cost_file=dict(argstr='--initcost %s', ), - init_reg_file=dict( - argstr='--init-reg %s', - mandatory=True, - xor=['init'], - ), - intermediate_file=dict(argstr='--int %s', ), - out_fsl_file=dict(argstr='--fslmat %s', ), - out_lta_file=dict( - argstr='--lta %s', - min_ver='5.2.0', - ), - out_reg_file=dict( - argstr='--reg %s', - genfile=True, - ), - reg_frame=dict( - argstr='--frame %d', - xor=['reg_middle_frame'], - ), - reg_middle_frame=dict( - argstr='--mid-frame', - xor=['reg_frame'], - ), - registered_file=dict(argstr='--o %s', ), - source_file=dict( - argstr='--mov %s', - copyfile=False, - mandatory=True, - ), - spm_nifti=dict(argstr='--spm-nii', ), - subject_id=dict( - argstr='--s %s', - mandatory=True, - ), + args=dict(argstr="%s",), + contrast_type=dict(argstr="--%s", mandatory=True,), + dof=dict(argstr="--%d",), + environ=dict(nohash=True, usedefault=True,), + epi_mask=dict(argstr="--epi-mask",), + fsldof=dict(argstr="--fsl-dof %d",), + init=dict(argstr="--init-%s", mandatory=True, xor=["init_reg_file"],), + init_cost_file=dict(argstr="--initcost %s",), + init_reg_file=dict(argstr="--init-reg %s", mandatory=True, xor=["init"],), + intermediate_file=dict(argstr="--int %s",), + out_fsl_file=dict(argstr="--fslmat %s",), + out_lta_file=dict(argstr="--lta %s", min_ver="5.2.0",), + out_reg_file=dict(argstr="--reg %s", genfile=True,), + reg_frame=dict(argstr="--frame %d", xor=["reg_middle_frame"],), + reg_middle_frame=dict(argstr="--mid-frame", xor=["reg_frame"],), + registered_file=dict(argstr="--o %s",), + source_file=dict(argstr="--mov %s", copyfile=False, mandatory=True,), + spm_nifti=dict(argstr="--spm-nii",), + subject_id=dict(argstr="--s %s", mandatory=True,), subjects_dir=dict(), ) input_map_6_0 = dict( - args=dict(argstr='%s', ), - contrast_type=dict( - argstr='--%s', - mandatory=True, - ), - dof=dict(argstr='--%d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - epi_mask=dict(argstr='--epi-mask', ), - fsldof=dict(argstr='--fsl-dof %d', ), - init=dict( - argstr='--init-%s', - xor=['init_reg_file'], - ), - init_reg_file=dict( - argstr='--init-reg %s', - xor=['init'], - ), - init_cost_file=dict(argstr='--initcost %s', ), - intermediate_file=dict(argstr='--int %s', ), - out_fsl_file=dict(argstr='--fslmat %s', ), - out_lta_file=dict( - argstr='--lta %s', - min_ver='5.2.0', - ), - out_reg_file=dict( - argstr='--reg %s', - genfile=True, - ), - reg_frame=dict( - argstr='--frame %d', - xor=['reg_middle_frame'], - ), - reg_middle_frame=dict( - argstr='--mid-frame', - xor=['reg_frame'], - ), - registered_file=dict(argstr='--o %s', ), - source_file=dict( - argstr='--mov %s', - copyfile=False, - mandatory=True, - ), - spm_nifti=dict(argstr='--spm-nii', ), - subject_id=dict( - argstr='--s %s', - mandatory=True, - ), + args=dict(argstr="%s",), + contrast_type=dict(argstr="--%s", mandatory=True,), + dof=dict(argstr="--%d",), + environ=dict(nohash=True, usedefault=True,), + epi_mask=dict(argstr="--epi-mask",), + fsldof=dict(argstr="--fsl-dof %d",), + init=dict(argstr="--init-%s", xor=["init_reg_file"],), + init_reg_file=dict(argstr="--init-reg %s", xor=["init"],), + init_cost_file=dict(argstr="--initcost %s",), + intermediate_file=dict(argstr="--int %s",), + out_fsl_file=dict(argstr="--fslmat %s",), + out_lta_file=dict(argstr="--lta %s", min_ver="5.2.0",), + out_reg_file=dict(argstr="--reg %s", genfile=True,), + reg_frame=dict(argstr="--frame %d", xor=["reg_middle_frame"],), + reg_middle_frame=dict(argstr="--mid-frame", xor=["reg_frame"],), + registered_file=dict(argstr="--o %s",), + source_file=dict(argstr="--mov %s", copyfile=False, mandatory=True,), + spm_nifti=dict(argstr="--spm-nii",), + subject_id=dict(argstr="--s %s", mandatory=True,), subjects_dir=dict(), ) diff --git a/nipype/interfaces/freesurfer/tests/test_FSSurfaceCommand.py b/nipype/interfaces/freesurfer/tests/test_FSSurfaceCommand.py index bc34a8d7aa..6c377c9579 100644 --- a/nipype/interfaces/freesurfer/tests/test_FSSurfaceCommand.py +++ b/nipype/interfaces/freesurfer/tests/test_FSSurfaceCommand.py @@ -8,7 +8,7 @@ def test_FSSurfaceCommand_inputs(): input_map = dict( - args=dict(argstr='%s'), + args=dict(argstr="%s"), environ=dict(nohash=True, usedefault=True), subjects_dir=dict(), ) @@ -22,21 +22,25 @@ def test_FSSurfaceCommand_inputs(): @pytest.mark.skipif(fs.no_freesurfer(), reason="freesurfer is not installed") def test_associated_file(tmpdir): fssrc = FreeSurferSource( - subjects_dir=fs.Info.subjectsdir(), subject_id='fsaverage', hemi='lh') + subjects_dir=fs.Info.subjectsdir(), subject_id="fsaverage", hemi="lh" + ) fssrc.base_dir = tmpdir.strpath fssrc.resource_monitor = False fsavginfo = fssrc.run().outputs.get() # Pairs of white/pial files in the same directories - for white, pial in [('lh.white', 'lh.pial'), ('./lh.white', './lh.pial'), - (fsavginfo['white'], fsavginfo['pial'])]: + for white, pial in [ + ("lh.white", "lh.pial"), + ("./lh.white", "./lh.pial"), + (fsavginfo["white"], fsavginfo["pial"]), + ]: # Unspecified paths, possibly with missing hemisphere information, # are equivalent to using the same directory and hemisphere - for name in ('pial', 'lh.pial', pial): + for name in ("pial", "lh.pial", pial): assert FSSurfaceCommand._associated_file(white, name) == pial # With path information, no changes are made - for name in ('./pial', './lh.pial', fsavginfo['pial']): + for name in ("./pial", "./lh.pial", fsavginfo["pial"]): assert FSSurfaceCommand._associated_file(white, name) == name diff --git a/nipype/interfaces/freesurfer/tests/test_auto_AddXFormToHeader.py b/nipype/interfaces/freesurfer/tests/test_auto_AddXFormToHeader.py index f3841d3fc2..499d85a437 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_AddXFormToHeader.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_AddXFormToHeader.py @@ -4,40 +4,24 @@ def test_AddXFormToHeader_inputs(): input_map = dict( - args=dict(argstr='%s', ), - copy_name=dict(argstr='-c', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - out_file=dict( - argstr='%s', - extensions=None, - position=-1, - usedefault=True, - ), + args=dict(argstr="%s",), + copy_name=dict(argstr="-c",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + out_file=dict(argstr="%s", extensions=None, position=-1, usedefault=True,), subjects_dir=dict(), - transform=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), - verbose=dict(argstr='-v', ), + transform=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + verbose=dict(argstr="-v",), ) inputs = AddXFormToHeader.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AddXFormToHeader_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = AddXFormToHeader.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Aparc2Aseg.py b/nipype/interfaces/freesurfer/tests/test_auto_Aparc2Aseg.py index 9135472d60..2c0d0ad5ef 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Aparc2Aseg.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Aparc2Aseg.py @@ -4,84 +4,39 @@ def test_Aparc2Aseg_inputs(): input_map = dict( - a2009s=dict(argstr='--a2009s', ), - args=dict(argstr='%s', ), - aseg=dict( - argstr='--aseg %s', - extensions=None, - ), + a2009s=dict(argstr="--a2009s",), + args=dict(argstr="%s",), + aseg=dict(argstr="--aseg %s", extensions=None,), copy_inputs=dict(), - ctxseg=dict( - argstr='--ctxseg %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - filled=dict(extensions=None, ), - hypo_wm=dict(argstr='--hypo-as-wm', ), - label_wm=dict(argstr='--labelwm', ), - lh_annotation=dict( - extensions=None, - mandatory=True, - ), - lh_pial=dict( - extensions=None, - mandatory=True, - ), - lh_ribbon=dict( - extensions=None, - mandatory=True, - ), - lh_white=dict( - extensions=None, - mandatory=True, - ), - out_file=dict( - argstr='--o %s', - extensions=None, - mandatory=True, - ), - rh_annotation=dict( - extensions=None, - mandatory=True, - ), - rh_pial=dict( - extensions=None, - mandatory=True, - ), - rh_ribbon=dict( - extensions=None, - mandatory=True, - ), - rh_white=dict( - extensions=None, - mandatory=True, - ), - ribbon=dict( - extensions=None, - mandatory=True, - ), - rip_unknown=dict(argstr='--rip-unknown', ), - subject_id=dict( - argstr='--s %s', - mandatory=True, - usedefault=True, - ), + ctxseg=dict(argstr="--ctxseg %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + filled=dict(extensions=None,), + hypo_wm=dict(argstr="--hypo-as-wm",), + label_wm=dict(argstr="--labelwm",), + lh_annotation=dict(extensions=None, mandatory=True,), + lh_pial=dict(extensions=None, mandatory=True,), + lh_ribbon=dict(extensions=None, mandatory=True,), + lh_white=dict(extensions=None, mandatory=True,), + out_file=dict(argstr="--o %s", extensions=None, mandatory=True,), + rh_annotation=dict(extensions=None, mandatory=True,), + rh_pial=dict(extensions=None, mandatory=True,), + rh_ribbon=dict(extensions=None, mandatory=True,), + rh_white=dict(extensions=None, mandatory=True,), + ribbon=dict(extensions=None, mandatory=True,), + rip_unknown=dict(argstr="--rip-unknown",), + subject_id=dict(argstr="--s %s", mandatory=True, usedefault=True,), subjects_dir=dict(), - volmask=dict(argstr='--volmask', ), + volmask=dict(argstr="--volmask",), ) inputs = Aparc2Aseg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Aparc2Aseg_outputs(): - output_map = dict(out_file=dict( - argstr='%s', - extensions=None, - ), ) + output_map = dict(out_file=dict(argstr="%s", extensions=None,),) outputs = Aparc2Aseg.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Apas2Aseg.py b/nipype/interfaces/freesurfer/tests/test_auto_Apas2Aseg.py index 1fde211396..295a376884 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Apas2Aseg.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Apas2Aseg.py @@ -4,21 +4,10 @@ def test_Apas2Aseg_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='--i %s', - extensions=None, - mandatory=True, - ), - out_file=dict( - argstr='--o %s', - extensions=None, - mandatory=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="--i %s", extensions=None, mandatory=True,), + out_file=dict(argstr="--o %s", extensions=None, mandatory=True,), subjects_dir=dict(), ) inputs = Apas2Aseg.input_spec() @@ -26,11 +15,10 @@ def test_Apas2Aseg_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Apas2Aseg_outputs(): - output_map = dict(out_file=dict( - argstr='%s', - extensions=None, - ), ) + output_map = dict(out_file=dict(argstr="%s", extensions=None,),) outputs = Apas2Aseg.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_ApplyMask.py b/nipype/interfaces/freesurfer/tests/test_auto_ApplyMask.py index fb3a8d956f..44bd9eba25 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_ApplyMask.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_ApplyMask.py @@ -4,58 +4,38 @@ def test_ApplyMask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), - invert_xfm=dict(argstr='-invert', ), - keep_mask_deletion_edits=dict(argstr='-keep_mask_deletion_edits', ), - mask_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - mask_thresh=dict(argstr='-T %.4f', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + invert_xfm=dict(argstr="-invert",), + keep_mask_deletion_edits=dict(argstr="-keep_mask_deletion_edits",), + mask_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + mask_thresh=dict(argstr="-T %.4f",), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, hash_files=True, keep_extension=True, - name_source=['in_file'], - name_template='%s_masked', + name_source=["in_file"], + name_template="%s_masked", position=-1, ), subjects_dir=dict(), - transfer=dict(argstr='-transfer %d', ), - use_abs=dict(argstr='-abs', ), - xfm_file=dict( - argstr='-xform %s', - extensions=None, - ), - xfm_source=dict( - argstr='-lta_src %s', - extensions=None, - ), - xfm_target=dict( - argstr='-lta_dst %s', - extensions=None, - ), + transfer=dict(argstr="-transfer %d",), + use_abs=dict(argstr="-abs",), + xfm_file=dict(argstr="-xform %s", extensions=None,), + xfm_source=dict(argstr="-lta_src %s", extensions=None,), + xfm_target=dict(argstr="-lta_dst %s", extensions=None,), ) inputs = ApplyMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyMask_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ApplyMask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_ApplyVolTransform.py b/nipype/interfaces/freesurfer/tests/test_auto_ApplyVolTransform.py index 40b59358f5..1eedade07a 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_ApplyVolTransform.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_ApplyVolTransform.py @@ -4,108 +4,151 @@ def test_ApplyVolTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), fs_target=dict( - argstr='--fstarg', + argstr="--fstarg", mandatory=True, - requires=['reg_file'], - xor=('target_file', 'tal', 'fs_target'), + requires=["reg_file"], + xor=("target_file", "tal", "fs_target"), ), fsl_reg_file=dict( - argstr='--fsl %s', + argstr="--fsl %s", extensions=None, mandatory=True, - xor=('reg_file', 'lta_file', 'lta_inv_file', 'fsl_reg_file', - 'xfm_reg_file', 'reg_header', 'mni_152_reg', 'subject'), - ), - interp=dict(argstr='--interp %s', ), - inverse=dict(argstr='--inv', ), - invert_morph=dict( - argstr='--inv-morph', - requires=['m3z_file'], - ), + xor=( + "reg_file", + "lta_file", + "lta_inv_file", + "fsl_reg_file", + "xfm_reg_file", + "reg_header", + "mni_152_reg", + "subject", + ), + ), + interp=dict(argstr="--interp %s",), + inverse=dict(argstr="--inv",), + invert_morph=dict(argstr="--inv-morph", requires=["m3z_file"],), lta_file=dict( - argstr='--lta %s', + argstr="--lta %s", extensions=None, mandatory=True, - xor=('reg_file', 'lta_file', 'lta_inv_file', 'fsl_reg_file', - 'xfm_reg_file', 'reg_header', 'mni_152_reg', 'subject'), + xor=( + "reg_file", + "lta_file", + "lta_inv_file", + "fsl_reg_file", + "xfm_reg_file", + "reg_header", + "mni_152_reg", + "subject", + ), ), lta_inv_file=dict( - argstr='--lta-inv %s', + argstr="--lta-inv %s", extensions=None, mandatory=True, - xor=('reg_file', 'lta_file', 'lta_inv_file', 'fsl_reg_file', - 'xfm_reg_file', 'reg_header', 'mni_152_reg', 'subject'), - ), - m3z_file=dict( - argstr='--m3z %s', - extensions=None, - ), + xor=( + "reg_file", + "lta_file", + "lta_inv_file", + "fsl_reg_file", + "xfm_reg_file", + "reg_header", + "mni_152_reg", + "subject", + ), + ), + m3z_file=dict(argstr="--m3z %s", extensions=None,), mni_152_reg=dict( - argstr='--regheader', + argstr="--regheader", mandatory=True, - xor=('reg_file', 'lta_file', 'lta_inv_file', 'fsl_reg_file', - 'xfm_reg_file', 'reg_header', 'mni_152_reg', 'subject'), - ), - no_ded_m3z_path=dict( - argstr='--noDefM3zPath', - requires=['m3z_file'], - ), - no_resample=dict(argstr='--no-resample', ), + xor=( + "reg_file", + "lta_file", + "lta_inv_file", + "fsl_reg_file", + "xfm_reg_file", + "reg_header", + "mni_152_reg", + "subject", + ), + ), + no_ded_m3z_path=dict(argstr="--noDefM3zPath", requires=["m3z_file"],), + no_resample=dict(argstr="--no-resample",), reg_file=dict( - argstr='--reg %s', + argstr="--reg %s", extensions=None, mandatory=True, - xor=('reg_file', 'lta_file', 'lta_inv_file', 'fsl_reg_file', - 'xfm_reg_file', 'reg_header', 'mni_152_reg', 'subject'), + xor=( + "reg_file", + "lta_file", + "lta_inv_file", + "fsl_reg_file", + "xfm_reg_file", + "reg_header", + "mni_152_reg", + "subject", + ), ), reg_header=dict( - argstr='--regheader', + argstr="--regheader", mandatory=True, - xor=('reg_file', 'lta_file', 'lta_inv_file', 'fsl_reg_file', - 'xfm_reg_file', 'reg_header', 'mni_152_reg', 'subject'), + xor=( + "reg_file", + "lta_file", + "lta_inv_file", + "fsl_reg_file", + "xfm_reg_file", + "reg_header", + "mni_152_reg", + "subject", + ), ), source_file=dict( - argstr='--mov %s', - copyfile=False, - extensions=None, - mandatory=True, + argstr="--mov %s", copyfile=False, extensions=None, mandatory=True, ), subject=dict( - argstr='--s %s', + argstr="--s %s", mandatory=True, - xor=('reg_file', 'lta_file', 'lta_inv_file', 'fsl_reg_file', - 'xfm_reg_file', 'reg_header', 'mni_152_reg', 'subject'), + xor=( + "reg_file", + "lta_file", + "lta_inv_file", + "fsl_reg_file", + "xfm_reg_file", + "reg_header", + "mni_152_reg", + "subject", + ), ), subjects_dir=dict(), tal=dict( - argstr='--tal', - mandatory=True, - xor=('target_file', 'tal', 'fs_target'), + argstr="--tal", mandatory=True, xor=("target_file", "tal", "fs_target"), ), - tal_resolution=dict(argstr='--talres %.10f', ), + tal_resolution=dict(argstr="--talres %.10f",), target_file=dict( - argstr='--targ %s', + argstr="--targ %s", extensions=None, mandatory=True, - xor=('target_file', 'tal', 'fs_target'), - ), - transformed_file=dict( - argstr='--o %s', - extensions=None, - genfile=True, + xor=("target_file", "tal", "fs_target"), ), + transformed_file=dict(argstr="--o %s", extensions=None, genfile=True,), xfm_reg_file=dict( - argstr='--xfm %s', + argstr="--xfm %s", extensions=None, mandatory=True, - xor=('reg_file', 'lta_file', 'lta_inv_file', 'fsl_reg_file', - 'xfm_reg_file', 'reg_header', 'mni_152_reg', 'subject'), + xor=( + "reg_file", + "lta_file", + "lta_inv_file", + "fsl_reg_file", + "xfm_reg_file", + "reg_header", + "mni_152_reg", + "subject", + ), ), ) inputs = ApplyVolTransform.input_spec() @@ -113,8 +156,10 @@ def test_ApplyVolTransform_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyVolTransform_outputs(): - output_map = dict(transformed_file=dict(extensions=None, ), ) + output_map = dict(transformed_file=dict(extensions=None,),) outputs = ApplyVolTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Binarize.py b/nipype/interfaces/freesurfer/tests/test_auto_Binarize.py index 1e81688dcb..28211d63db 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Binarize.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Binarize.py @@ -4,72 +4,46 @@ def test_Binarize_inputs(): input_map = dict( - abs=dict(argstr='--abs', ), - args=dict(argstr='%s', ), - bin_col_num=dict(argstr='--bincol', ), - bin_val=dict(argstr='--binval %d', ), - bin_val_not=dict(argstr='--binvalnot %d', ), - binary_file=dict( - argstr='--o %s', - extensions=None, - genfile=True, - ), - count_file=dict(argstr='--count %s', ), - dilate=dict(argstr='--dilate %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - erode=dict(argstr='--erode %d', ), - erode2d=dict(argstr='--erode2d %d', ), - frame_no=dict(argstr='--frame %s', ), - in_file=dict( - argstr='--i %s', - copyfile=False, - extensions=None, - mandatory=True, - ), - invert=dict(argstr='--inv', ), - mask_file=dict( - argstr='--mask maskvol', - extensions=None, - ), - mask_thresh=dict(argstr='--mask-thresh %f', ), - match=dict(argstr='--match %d...', ), - max=dict( - argstr='--max %f', - xor=['wm_ven_csf'], - ), - merge_file=dict( - argstr='--merge %s', - extensions=None, - ), - min=dict( - argstr='--min %f', - xor=['wm_ven_csf'], - ), - out_type=dict(argstr='', ), - rmax=dict(argstr='--rmax %f', ), - rmin=dict(argstr='--rmin %f', ), + abs=dict(argstr="--abs",), + args=dict(argstr="%s",), + bin_col_num=dict(argstr="--bincol",), + bin_val=dict(argstr="--binval %d",), + bin_val_not=dict(argstr="--binvalnot %d",), + binary_file=dict(argstr="--o %s", extensions=None, genfile=True,), + count_file=dict(argstr="--count %s",), + dilate=dict(argstr="--dilate %d",), + environ=dict(nohash=True, usedefault=True,), + erode=dict(argstr="--erode %d",), + erode2d=dict(argstr="--erode2d %d",), + frame_no=dict(argstr="--frame %s",), + in_file=dict(argstr="--i %s", copyfile=False, extensions=None, mandatory=True,), + invert=dict(argstr="--inv",), + mask_file=dict(argstr="--mask maskvol", extensions=None,), + mask_thresh=dict(argstr="--mask-thresh %f",), + match=dict(argstr="--match %d...",), + max=dict(argstr="--max %f", xor=["wm_ven_csf"],), + merge_file=dict(argstr="--merge %s", extensions=None,), + min=dict(argstr="--min %f", xor=["wm_ven_csf"],), + out_type=dict(argstr="",), + rmax=dict(argstr="--rmax %f",), + rmin=dict(argstr="--rmin %f",), subjects_dir=dict(), - ventricles=dict(argstr='--ventricles', ), - wm=dict(argstr='--wm', ), - wm_ven_csf=dict( - argstr='--wm+vcsf', - xor=['min', 'max'], - ), - zero_edges=dict(argstr='--zero-edges', ), - zero_slice_edge=dict(argstr='--zero-slice-edges', ), + ventricles=dict(argstr="--ventricles",), + wm=dict(argstr="--wm",), + wm_ven_csf=dict(argstr="--wm+vcsf", xor=["min", "max"],), + zero_edges=dict(argstr="--zero-edges",), + zero_slice_edge=dict(argstr="--zero-slice-edges",), ) inputs = Binarize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Binarize_outputs(): output_map = dict( - binary_file=dict(extensions=None, ), - count_file=dict(extensions=None, ), + binary_file=dict(extensions=None,), count_file=dict(extensions=None,), ) outputs = Binarize.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_CALabel.py b/nipype/interfaces/freesurfer/tests/test_auto_CALabel.py index 92af7daff1..dfc1f86d97 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_CALabel.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_CALabel.py @@ -4,65 +4,32 @@ def test_CALabel_inputs(): input_map = dict( - align=dict(argstr='-align', ), - args=dict(argstr='%s', ), - aseg=dict( - argstr='-aseg %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-4, - ), - in_vol=dict( - argstr='-r %s', - extensions=None, - ), - intensities=dict( - argstr='-r %s', - extensions=None, - ), - label=dict( - argstr='-l %s', - extensions=None, - ), - no_big_ventricles=dict(argstr='-nobigventricles', ), + align=dict(argstr="-align",), + args=dict(argstr="%s",), + aseg=dict(argstr="-aseg %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-4,), + in_vol=dict(argstr="-r %s", extensions=None,), + intensities=dict(argstr="-r %s", extensions=None,), + label=dict(argstr="-l %s", extensions=None,), + no_big_ventricles=dict(argstr="-nobigventricles",), num_threads=dict(), - out_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - ), - prior=dict(argstr='-prior %.1f', ), - relabel_unlikely=dict(argstr='-relabel_unlikely %d %.1f', ), + out_file=dict(argstr="%s", extensions=None, mandatory=True, position=-1,), + prior=dict(argstr="-prior %.1f",), + relabel_unlikely=dict(argstr="-relabel_unlikely %d %.1f",), subjects_dir=dict(), - template=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - transform=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), + template=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + transform=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), ) inputs = CALabel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CALabel_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = CALabel.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_CANormalize.py b/nipype/interfaces/freesurfer/tests/test_auto_CANormalize.py index 086fc8326d..84018f9a01 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_CANormalize.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_CANormalize.py @@ -4,61 +4,35 @@ def test_CANormalize_inputs(): input_map = dict( - args=dict(argstr='%s', ), - atlas=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), - control_points=dict( - argstr='-c %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-4, - ), - long_file=dict( - argstr='-long %s', - extensions=None, - ), - mask=dict( - argstr='-mask %s', - extensions=None, - ), + args=dict(argstr="%s",), + atlas=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + control_points=dict(argstr="-c %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-4,), + long_file=dict(argstr="-long %s", extensions=None,), + mask=dict(argstr="-mask %s", extensions=None,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, hash_files=False, keep_extension=True, - name_source=['in_file'], - name_template='%s_norm', + name_source=["in_file"], + name_template="%s_norm", position=-1, ), subjects_dir=dict(), - transform=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), + transform=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), ) inputs = CANormalize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CANormalize_outputs(): output_map = dict( - control_points=dict(extensions=None, ), - out_file=dict(extensions=None, ), + control_points=dict(extensions=None,), out_file=dict(extensions=None,), ) outputs = CANormalize.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_CARegister.py b/nipype/interfaces/freesurfer/tests/test_auto_CARegister.py index e26b0951e2..5ce22ecfa0 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_CARegister.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_CARegister.py @@ -4,55 +4,31 @@ def test_CARegister_inputs(): input_map = dict( - A=dict(argstr='-A %d', ), - align=dict(argstr='-align-%s', ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), - invert_and_save=dict( - argstr='-invert-and-save', - position=-4, - ), - l_files=dict(argstr='-l %s', ), - levels=dict(argstr='-levels %d', ), - mask=dict( - argstr='-mask %s', - extensions=None, - ), - no_big_ventricles=dict(argstr='-nobigventricles', ), + A=dict(argstr="-A %d",), + align=dict(argstr="-align-%s",), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + invert_and_save=dict(argstr="-invert-and-save", position=-4,), + l_files=dict(argstr="-l %s",), + levels=dict(argstr="-levels %d",), + mask=dict(argstr="-mask %s", extensions=None,), + no_big_ventricles=dict(argstr="-nobigventricles",), num_threads=dict(), - out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, - ), + out_file=dict(argstr="%s", extensions=None, genfile=True, position=-1,), subjects_dir=dict(), - template=dict( - argstr='%s', - extensions=None, - position=-2, - ), - transform=dict( - argstr='-T %s', - extensions=None, - ), + template=dict(argstr="%s", extensions=None, position=-2,), + transform=dict(argstr="-T %s", extensions=None,), ) inputs = CARegister.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CARegister_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = CARegister.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_CheckTalairachAlignment.py b/nipype/interfaces/freesurfer/tests/test_auto_CheckTalairachAlignment.py index ef75f59c2a..8dcede267e 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_CheckTalairachAlignment.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_CheckTalairachAlignment.py @@ -4,37 +4,28 @@ def test_CheckTalairachAlignment_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='-xfm %s', + argstr="-xfm %s", extensions=None, mandatory=True, position=-1, - xor=['subject'], - ), - subject=dict( - argstr='-subj %s', - mandatory=True, - position=-1, - xor=['in_file'], + xor=["subject"], ), + subject=dict(argstr="-subj %s", mandatory=True, position=-1, xor=["in_file"],), subjects_dir=dict(), - threshold=dict( - argstr='-T %.3f', - usedefault=True, - ), + threshold=dict(argstr="-T %.3f", usedefault=True,), ) inputs = CheckTalairachAlignment.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CheckTalairachAlignment_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = CheckTalairachAlignment.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Concatenate.py b/nipype/interfaces/freesurfer/tests/test_auto_Concatenate.py index baee02187b..c841be04ad 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Concatenate.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Concatenate.py @@ -4,50 +4,36 @@ def test_Concatenate_inputs(): input_map = dict( - add_val=dict(argstr='--add %f', ), - args=dict(argstr='%s', ), - combine=dict(argstr='--combine', ), - concatenated_file=dict( - argstr='--o %s', - extensions=None, - genfile=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - gmean=dict(argstr='--gmean %d', ), - in_files=dict( - argstr='--i %s...', - mandatory=True, - ), - keep_dtype=dict(argstr='--keep-datatype', ), - mask_file=dict( - argstr='--mask %s', - extensions=None, - ), - max_bonfcor=dict(argstr='--max-bonfcor', ), - max_index=dict(argstr='--max-index', ), - mean_div_n=dict(argstr='--mean-div-n', ), - multiply_by=dict(argstr='--mul %f', ), - multiply_matrix_file=dict( - argstr='--mtx %s', - extensions=None, - ), - paired_stats=dict(argstr='--paired-%s', ), - sign=dict(argstr='--%s', ), - sort=dict(argstr='--sort', ), - stats=dict(argstr='--%s', ), + add_val=dict(argstr="--add %f",), + args=dict(argstr="%s",), + combine=dict(argstr="--combine",), + concatenated_file=dict(argstr="--o %s", extensions=None, genfile=True,), + environ=dict(nohash=True, usedefault=True,), + gmean=dict(argstr="--gmean %d",), + in_files=dict(argstr="--i %s...", mandatory=True,), + keep_dtype=dict(argstr="--keep-datatype",), + mask_file=dict(argstr="--mask %s", extensions=None,), + max_bonfcor=dict(argstr="--max-bonfcor",), + max_index=dict(argstr="--max-index",), + mean_div_n=dict(argstr="--mean-div-n",), + multiply_by=dict(argstr="--mul %f",), + multiply_matrix_file=dict(argstr="--mtx %s", extensions=None,), + paired_stats=dict(argstr="--paired-%s",), + sign=dict(argstr="--%s",), + sort=dict(argstr="--sort",), + stats=dict(argstr="--%s",), subjects_dir=dict(), - vote=dict(argstr='--vote', ), + vote=dict(argstr="--vote",), ) inputs = Concatenate.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Concatenate_outputs(): - output_map = dict(concatenated_file=dict(extensions=None, ), ) + output_map = dict(concatenated_file=dict(extensions=None,),) outputs = Concatenate.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_ConcatenateLTA.py b/nipype/interfaces/freesurfer/tests/test_auto_ConcatenateLTA.py index 07ea2e0d2f..b9750aecbf 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_ConcatenateLTA.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_ConcatenateLTA.py @@ -4,48 +4,33 @@ def test_ConcatenateLTA_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_lta1=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), - in_lta2=dict( - argstr='%s', - mandatory=True, - position=-2, - ), - invert_1=dict(argstr='-invert1', ), - invert_2=dict(argstr='-invert2', ), - invert_out=dict(argstr='-invertout', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_lta1=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + in_lta2=dict(argstr="%s", mandatory=True, position=-2,), + invert_1=dict(argstr="-invert1",), + invert_2=dict(argstr="-invert2",), + invert_out=dict(argstr="-invertout",), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, hash_files=False, keep_extension=True, - name_source=['in_lta1'], - name_template='%s_concat', + name_source=["in_lta1"], + name_template="%s_concat", position=-1, ), - out_type=dict(argstr='-out_type %d', ), - subject=dict(argstr='-subject %s', ), + out_type=dict(argstr="-out_type %d",), + subject=dict(argstr="-subject %s",), subjects_dir=dict(), tal_source_file=dict( - argstr='-tal %s', + argstr="-tal %s", extensions=None, position=-5, - requires=['tal_template_file'], + requires=["tal_template_file"], ), tal_template_file=dict( - argstr='%s', - extensions=None, - position=-4, - requires=['tal_source_file'], + argstr="%s", extensions=None, position=-4, requires=["tal_source_file"], ), ) inputs = ConcatenateLTA.input_spec() @@ -53,8 +38,10 @@ def test_ConcatenateLTA_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ConcatenateLTA_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ConcatenateLTA.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Contrast.py b/nipype/interfaces/freesurfer/tests/test_auto_Contrast.py index d074b8b803..68e450315d 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Contrast.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Contrast.py @@ -4,57 +4,31 @@ def test_Contrast_inputs(): input_map = dict( - annotation=dict( - extensions=None, - mandatory=True, - ), - args=dict(argstr='%s', ), + annotation=dict(extensions=None, mandatory=True,), + args=dict(argstr="%s",), copy_inputs=dict(), - cortex=dict( - extensions=None, - mandatory=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - hemisphere=dict( - argstr='--%s-only', - mandatory=True, - ), - orig=dict( - extensions=None, - mandatory=True, - ), - rawavg=dict( - extensions=None, - mandatory=True, - ), - subject_id=dict( - argstr='--s %s', - mandatory=True, - usedefault=True, - ), + cortex=dict(extensions=None, mandatory=True,), + environ=dict(nohash=True, usedefault=True,), + hemisphere=dict(argstr="--%s-only", mandatory=True,), + orig=dict(extensions=None, mandatory=True,), + rawavg=dict(extensions=None, mandatory=True,), + subject_id=dict(argstr="--s %s", mandatory=True, usedefault=True,), subjects_dir=dict(), - thickness=dict( - extensions=None, - mandatory=True, - ), - white=dict( - extensions=None, - mandatory=True, - ), + thickness=dict(extensions=None, mandatory=True,), + white=dict(extensions=None, mandatory=True,), ) inputs = Contrast.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Contrast_outputs(): output_map = dict( - out_contrast=dict(extensions=None, ), - out_log=dict(extensions=None, ), - out_stats=dict(extensions=None, ), + out_contrast=dict(extensions=None,), + out_log=dict(extensions=None,), + out_stats=dict(extensions=None,), ) outputs = Contrast.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Curvature.py b/nipype/interfaces/freesurfer/tests/test_auto_Curvature.py index 60351cb36e..906d961740 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Curvature.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Curvature.py @@ -4,35 +4,29 @@ def test_Curvature_inputs(): input_map = dict( - args=dict(argstr='%s', ), - averages=dict(argstr='-a %d', ), + args=dict(argstr="%s",), + averages=dict(argstr="-a %d",), copy_input=dict(), - distances=dict(argstr='-distances %d %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), + distances=dict(argstr="-distances %d %d",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=True, - extensions=None, - mandatory=True, - position=-2, + argstr="%s", copyfile=True, extensions=None, mandatory=True, position=-2, ), - n=dict(argstr='-n', ), - save=dict(argstr='-w', ), + n=dict(argstr="-n",), + save=dict(argstr="-w",), subjects_dir=dict(), - threshold=dict(argstr='-thresh %.3f', ), + threshold=dict(argstr="-thresh %.3f",), ) inputs = Curvature.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Curvature_outputs(): output_map = dict( - out_gauss=dict(extensions=None, ), - out_mean=dict(extensions=None, ), + out_gauss=dict(extensions=None,), out_mean=dict(extensions=None,), ) outputs = Curvature.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_CurvatureStats.py b/nipype/interfaces/freesurfer/tests/test_auto_CurvatureStats.py index 5750487216..3b69b41def 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_CurvatureStats.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_CurvatureStats.py @@ -4,58 +4,35 @@ def test_CurvatureStats_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), copy_inputs=dict(), - curvfile1=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - curvfile2=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - hemisphere=dict( - argstr='%s', - mandatory=True, - position=-3, - ), - min_max=dict(argstr='-m', ), + curvfile1=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + curvfile2=dict(argstr="%s", extensions=None, mandatory=True, position=-1,), + environ=dict(nohash=True, usedefault=True,), + hemisphere=dict(argstr="%s", mandatory=True, position=-3,), + min_max=dict(argstr="-m",), out_file=dict( - argstr='-o %s', + argstr="-o %s", extensions=None, hash_files=False, - name_source=['hemisphere'], - name_template='%s.curv.stats', - ), - subject_id=dict( - argstr='%s', - mandatory=True, - position=-4, - usedefault=True, + name_source=["hemisphere"], + name_template="%s.curv.stats", ), + subject_id=dict(argstr="%s", mandatory=True, position=-4, usedefault=True,), subjects_dir=dict(), - surface=dict( - argstr='-F %s', - extensions=None, - ), - values=dict(argstr='-G', ), - write=dict(argstr='--writeCurvatureFiles', ), + surface=dict(argstr="-F %s", extensions=None,), + values=dict(argstr="-G",), + write=dict(argstr="--writeCurvatureFiles",), ) inputs = CurvatureStats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CurvatureStats_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = CurvatureStats.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_DICOMConvert.py b/nipype/interfaces/freesurfer/tests/test_auto_DICOMConvert.py index 8a25f9b33a..3eae700dc6 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_DICOMConvert.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_DICOMConvert.py @@ -4,19 +4,16 @@ def test_DICOMConvert_inputs(): input_map = dict( - args=dict(argstr='%s', ), - base_output_dir=dict(mandatory=True, ), - dicom_dir=dict(mandatory=True, ), - dicom_info=dict(extensions=None, ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + base_output_dir=dict(mandatory=True,), + dicom_dir=dict(mandatory=True,), + dicom_info=dict(extensions=None,), + environ=dict(nohash=True, usedefault=True,), file_mapping=dict(), - ignore_single_slice=dict(requires=['dicom_info'], ), - out_type=dict(usedefault=True, ), - seq_list=dict(requires=['dicom_info'], ), - subject_dir_template=dict(usedefault=True, ), + ignore_single_slice=dict(requires=["dicom_info"],), + out_type=dict(usedefault=True,), + seq_list=dict(requires=["dicom_info"],), + subject_dir_template=dict(usedefault=True,), subject_id=dict(), subjects_dir=dict(), ) diff --git a/nipype/interfaces/freesurfer/tests/test_auto_EMRegister.py b/nipype/interfaces/freesurfer/tests/test_auto_EMRegister.py index ee33e3fcf2..d87052cebc 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_EMRegister.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_EMRegister.py @@ -4,52 +4,35 @@ def test_EMRegister_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), - mask=dict( - argstr='-mask %s', - extensions=None, - ), - nbrspacing=dict(argstr='-uns %d', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + mask=dict(argstr="-mask %s", extensions=None,), + nbrspacing=dict(argstr="-uns %d",), num_threads=dict(), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, hash_files=False, keep_extension=False, - name_source=['in_file'], - name_template='%s_transform.lta', + name_source=["in_file"], + name_template="%s_transform.lta", position=-1, ), - skull=dict(argstr='-skull', ), + skull=dict(argstr="-skull",), subjects_dir=dict(), - template=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - transform=dict( - argstr='-t %s', - extensions=None, - ), + template=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + transform=dict(argstr="-t %s", extensions=None,), ) inputs = EMRegister.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EMRegister_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = EMRegister.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_EditWMwithAseg.py b/nipype/interfaces/freesurfer/tests/test_auto_EditWMwithAseg.py index 2794f3960a..725980b7ab 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_EditWMwithAseg.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_EditWMwithAseg.py @@ -4,36 +4,13 @@ def test_EditWMwithAseg_inputs(): input_map = dict( - args=dict(argstr='%s', ), - brain_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-4, - ), - keep_in=dict(argstr='-keep-in', ), - out_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - ), - seg_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), + args=dict(argstr="%s",), + brain_file=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-4,), + keep_in=dict(argstr="-keep-in",), + out_file=dict(argstr="%s", extensions=None, mandatory=True, position=-1,), + seg_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), subjects_dir=dict(), ) inputs = EditWMwithAseg.input_spec() @@ -41,8 +18,10 @@ def test_EditWMwithAseg_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EditWMwithAseg_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = EditWMwithAseg.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_EulerNumber.py b/nipype/interfaces/freesurfer/tests/test_auto_EulerNumber.py index 47084fc1a3..d31c9278bc 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_EulerNumber.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_EulerNumber.py @@ -4,17 +4,9 @@ def test_EulerNumber_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-1,), subjects_dir=dict(), ) inputs = EulerNumber.input_spec() @@ -22,8 +14,10 @@ def test_EulerNumber_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EulerNumber_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = EulerNumber.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_ExtractMainComponent.py b/nipype/interfaces/freesurfer/tests/test_auto_ExtractMainComponent.py index e664c9feb9..424d6bdb23 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_ExtractMainComponent.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_ExtractMainComponent.py @@ -4,22 +4,14 @@ def test_ExtractMainComponent_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=1,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, - name_source='in_file', - name_template='%s.maincmp', + name_source="in_file", + name_template="%s.maincmp", position=2, ), ) @@ -28,8 +20,10 @@ def test_ExtractMainComponent_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ExtractMainComponent_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ExtractMainComponent.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_FSCommand.py b/nipype/interfaces/freesurfer/tests/test_auto_FSCommand.py index dc7678e7e6..87f836e34b 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_FSCommand.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_FSCommand.py @@ -4,11 +4,8 @@ def test_FSCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), subjects_dir=dict(), ) inputs = FSCommand.input_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_FSCommandOpenMP.py b/nipype/interfaces/freesurfer/tests/test_auto_FSCommandOpenMP.py index b4c56e8aeb..165191e96c 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_FSCommandOpenMP.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_FSCommandOpenMP.py @@ -4,11 +4,8 @@ def test_FSCommandOpenMP_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), num_threads=dict(), subjects_dir=dict(), ) diff --git a/nipype/interfaces/freesurfer/tests/test_auto_FSScriptCommand.py b/nipype/interfaces/freesurfer/tests/test_auto_FSScriptCommand.py index 39b16b845e..162962f578 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_FSScriptCommand.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_FSScriptCommand.py @@ -4,11 +4,8 @@ def test_FSScriptCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), subjects_dir=dict(), ) inputs = FSScriptCommand.input_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_FitMSParams.py b/nipype/interfaces/freesurfer/tests/test_auto_FitMSParams.py index 098f20f8b1..3133f52445 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_FitMSParams.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_FitMSParams.py @@ -4,22 +4,11 @@ def test_FitMSParams_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), flip_list=dict(), - in_files=dict( - argstr='%s', - mandatory=True, - position=-2, - ), - out_dir=dict( - argstr='%s', - genfile=True, - position=-1, - ), + in_files=dict(argstr="%s", mandatory=True, position=-2,), + out_dir=dict(argstr="%s", genfile=True, position=-1,), subjects_dir=dict(), te_list=dict(), tr_list=dict(), @@ -30,11 +19,13 @@ def test_FitMSParams_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FitMSParams_outputs(): output_map = dict( - pd_image=dict(extensions=None, ), - t1_image=dict(extensions=None, ), - t2star_image=dict(extensions=None, ), + pd_image=dict(extensions=None,), + t1_image=dict(extensions=None,), + t2star_image=dict(extensions=None,), ) outputs = FitMSParams.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_FixTopology.py b/nipype/interfaces/freesurfer/tests/test_auto_FixTopology.py index a95e5bbc5a..bfdb140216 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_FixTopology.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_FixTopology.py @@ -4,46 +4,19 @@ def test_FixTopology_inputs(): input_map = dict( - args=dict(argstr='%s', ), - copy_inputs=dict(mandatory=True, ), - environ=dict( - nohash=True, - usedefault=True, - ), - ga=dict(argstr='-ga', ), - hemisphere=dict( - argstr='%s', - mandatory=True, - position=-1, - ), - in_brain=dict( - extensions=None, - mandatory=True, - ), - in_inflated=dict( - extensions=None, - mandatory=True, - ), - in_orig=dict( - extensions=None, - mandatory=True, - ), - in_wm=dict( - extensions=None, - mandatory=True, - ), - mgz=dict(argstr='-mgz', ), - seed=dict(argstr='-seed %d', ), - sphere=dict( - argstr='-sphere %s', - extensions=None, - ), - subject_id=dict( - argstr='%s', - mandatory=True, - position=-2, - usedefault=True, - ), + args=dict(argstr="%s",), + copy_inputs=dict(mandatory=True,), + environ=dict(nohash=True, usedefault=True,), + ga=dict(argstr="-ga",), + hemisphere=dict(argstr="%s", mandatory=True, position=-1,), + in_brain=dict(extensions=None, mandatory=True,), + in_inflated=dict(extensions=None, mandatory=True,), + in_orig=dict(extensions=None, mandatory=True,), + in_wm=dict(extensions=None, mandatory=True,), + mgz=dict(argstr="-mgz",), + seed=dict(argstr="-seed %d",), + sphere=dict(argstr="-sphere %s", extensions=None,), + subject_id=dict(argstr="%s", mandatory=True, position=-2, usedefault=True,), subjects_dir=dict(), ) inputs = FixTopology.input_spec() @@ -51,8 +24,10 @@ def test_FixTopology_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FixTopology_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = FixTopology.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_FuseSegmentations.py b/nipype/interfaces/freesurfer/tests/test_auto_FuseSegmentations.py index 7b5c940935..2ffc84eada 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_FuseSegmentations.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_FuseSegmentations.py @@ -4,46 +4,25 @@ def test_FuseSegmentations_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_norms=dict( - argstr='-n %s', - mandatory=True, - ), - in_segmentations=dict( - argstr='-a %s', - mandatory=True, - ), - in_segmentations_noCC=dict( - argstr='-c %s', - mandatory=True, - ), - out_file=dict( - extensions=None, - mandatory=True, - position=-1, - ), - subject_id=dict( - argstr='%s', - position=-3, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_norms=dict(argstr="-n %s", mandatory=True,), + in_segmentations=dict(argstr="-a %s", mandatory=True,), + in_segmentations_noCC=dict(argstr="-c %s", mandatory=True,), + out_file=dict(extensions=None, mandatory=True, position=-1,), + subject_id=dict(argstr="%s", position=-3,), subjects_dir=dict(), - timepoints=dict( - argstr='%s', - mandatory=True, - position=-2, - ), + timepoints=dict(argstr="%s", mandatory=True, position=-2,), ) inputs = FuseSegmentations.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FuseSegmentations_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = FuseSegmentations.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_GLMFit.py b/nipype/interfaces/freesurfer/tests/test_auto_GLMFit.py index 1a4e966239..a883f39732 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_GLMFit.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_GLMFit.py @@ -4,128 +4,72 @@ def test_GLMFit_inputs(): input_map = dict( - allow_ill_cond=dict(argstr='--illcond', ), - allow_repeated_subjects=dict(argstr='--allowsubjrep', ), - args=dict(argstr='%s', ), - calc_AR1=dict(argstr='--tar1', ), - check_opts=dict(argstr='--checkopts', ), - compute_log_y=dict(argstr='--logy', ), - contrast=dict(argstr='--C %s...', ), - cortex=dict( - argstr='--cortex', - xor=['label_file'], - ), - debug=dict(argstr='--debug', ), + allow_ill_cond=dict(argstr="--illcond",), + allow_repeated_subjects=dict(argstr="--allowsubjrep",), + args=dict(argstr="%s",), + calc_AR1=dict(argstr="--tar1",), + check_opts=dict(argstr="--checkopts",), + compute_log_y=dict(argstr="--logy",), + contrast=dict(argstr="--C %s...",), + cortex=dict(argstr="--cortex", xor=["label_file"],), + debug=dict(argstr="--debug",), design=dict( - argstr='--X %s', - extensions=None, - xor=('fsgd', 'design', 'one_sample'), - ), - diag=dict(argstr='--diag %d', ), - diag_cluster=dict(argstr='--diag-cluster', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixed_fx_dof=dict( - argstr='--ffxdof %d', - xor=['fixed_fx_dof_file'], + argstr="--X %s", extensions=None, xor=("fsgd", "design", "one_sample"), ), + diag=dict(argstr="--diag %d",), + diag_cluster=dict(argstr="--diag-cluster",), + environ=dict(nohash=True, usedefault=True,), + fixed_fx_dof=dict(argstr="--ffxdof %d", xor=["fixed_fx_dof_file"],), fixed_fx_dof_file=dict( - argstr='--ffxdofdat %d', - extensions=None, - xor=['fixed_fx_dof'], - ), - fixed_fx_var=dict( - argstr='--yffxvar %s', - extensions=None, - ), - force_perm=dict(argstr='--perm-force', ), - fsgd=dict( - argstr='--fsgd %s %s', - xor=('fsgd', 'design', 'one_sample'), - ), - fwhm=dict(argstr='--fwhm %f', ), - glm_dir=dict( - argstr='--glmdir %s', - genfile=True, + argstr="--ffxdofdat %d", extensions=None, xor=["fixed_fx_dof"], ), + fixed_fx_var=dict(argstr="--yffxvar %s", extensions=None,), + force_perm=dict(argstr="--perm-force",), + fsgd=dict(argstr="--fsgd %s %s", xor=("fsgd", "design", "one_sample"),), + fwhm=dict(argstr="--fwhm %f",), + glm_dir=dict(argstr="--glmdir %s", genfile=True,), hemi=dict(), - in_file=dict( - argstr='--y %s', - copyfile=False, - extensions=None, - mandatory=True, - ), - invert_mask=dict(argstr='--mask-inv', ), - label_file=dict( - argstr='--label %s', - extensions=None, - xor=['cortex'], - ), - mask_file=dict( - argstr='--mask %s', - extensions=None, - ), - no_contrast_ok=dict(argstr='--no-contrasts-ok', ), - no_est_fwhm=dict(argstr='--no-est-fwhm', ), - no_mask_smooth=dict(argstr='--no-mask-smooth', ), - no_prune=dict( - argstr='--no-prune', - xor=['prunethresh'], - ), + in_file=dict(argstr="--y %s", copyfile=False, extensions=None, mandatory=True,), + invert_mask=dict(argstr="--mask-inv",), + label_file=dict(argstr="--label %s", extensions=None, xor=["cortex"],), + mask_file=dict(argstr="--mask %s", extensions=None,), + no_contrast_ok=dict(argstr="--no-contrasts-ok",), + no_est_fwhm=dict(argstr="--no-est-fwhm",), + no_mask_smooth=dict(argstr="--no-mask-smooth",), + no_prune=dict(argstr="--no-prune", xor=["prunethresh"],), one_sample=dict( - argstr='--osgm', - xor=('one_sample', 'fsgd', 'design', 'contrast'), - ), - pca=dict(argstr='--pca', ), - per_voxel_reg=dict(argstr='--pvr %s...', ), - profile=dict(argstr='--profile %d', ), - prune=dict(argstr='--prune', ), - prune_thresh=dict( - argstr='--prune_thr %f', - xor=['noprune'], - ), - resynth_test=dict(argstr='--resynthtest %d', ), - save_cond=dict(argstr='--save-cond', ), - save_estimate=dict(argstr='--yhat-save', ), - save_res_corr_mtx=dict(argstr='--eres-scm', ), - save_residual=dict(argstr='--eres-save', ), - seed=dict(argstr='--seed %d', ), - self_reg=dict(argstr='--selfreg %d %d %d', ), - sim_done_file=dict( - argstr='--sim-done %s', - extensions=None, - ), - sim_sign=dict(argstr='--sim-sign %s', ), - simulation=dict(argstr='--sim %s %d %f %s', ), + argstr="--osgm", xor=("one_sample", "fsgd", "design", "contrast"), + ), + pca=dict(argstr="--pca",), + per_voxel_reg=dict(argstr="--pvr %s...",), + profile=dict(argstr="--profile %d",), + prune=dict(argstr="--prune",), + prune_thresh=dict(argstr="--prune_thr %f", xor=["noprune"],), + resynth_test=dict(argstr="--resynthtest %d",), + save_cond=dict(argstr="--save-cond",), + save_estimate=dict(argstr="--yhat-save",), + save_res_corr_mtx=dict(argstr="--eres-scm",), + save_residual=dict(argstr="--eres-save",), + seed=dict(argstr="--seed %d",), + self_reg=dict(argstr="--selfreg %d %d %d",), + sim_done_file=dict(argstr="--sim-done %s", extensions=None,), + sim_sign=dict(argstr="--sim-sign %s",), + simulation=dict(argstr="--sim %s %d %f %s",), subject_id=dict(), subjects_dir=dict(), - surf=dict( - argstr='--surf %s %s %s', - requires=['subject_id', 'hemi'], - ), - surf_geo=dict(usedefault=True, ), - synth=dict(argstr='--synth', ), - uniform=dict(argstr='--uniform %f %f', ), - var_fwhm=dict(argstr='--var-fwhm %f', ), - vox_dump=dict(argstr='--voxdump %d %d %d', ), - weight_file=dict( - extensions=None, - xor=['weighted_ls'], - ), - weight_inv=dict( - argstr='--w-inv', - xor=['weighted_ls'], - ), - weight_sqrt=dict( - argstr='--w-sqrt', - xor=['weighted_ls'], - ), + surf=dict(argstr="--surf %s %s %s", requires=["subject_id", "hemi"],), + surf_geo=dict(usedefault=True,), + synth=dict(argstr="--synth",), + uniform=dict(argstr="--uniform %f %f",), + var_fwhm=dict(argstr="--var-fwhm %f",), + vox_dump=dict(argstr="--voxdump %d %d %d",), + weight_file=dict(extensions=None, xor=["weighted_ls"],), + weight_inv=dict(argstr="--w-inv", xor=["weighted_ls"],), + weight_sqrt=dict(argstr="--w-sqrt", xor=["weighted_ls"],), weighted_ls=dict( - argstr='--wls %s', + argstr="--wls %s", extensions=None, - xor=('weight_file', 'weight_inv', 'weight_sqrt'), + xor=("weight_file", "weight_inv", "weight_sqrt"), ), ) inputs = GLMFit.input_spec() @@ -133,25 +77,27 @@ def test_GLMFit_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GLMFit_outputs(): output_map = dict( - beta_file=dict(extensions=None, ), - dof_file=dict(extensions=None, ), - error_file=dict(extensions=None, ), - error_stddev_file=dict(extensions=None, ), - error_var_file=dict(extensions=None, ), - estimate_file=dict(extensions=None, ), - frame_eigenvectors=dict(extensions=None, ), + beta_file=dict(extensions=None,), + dof_file=dict(extensions=None,), + error_file=dict(extensions=None,), + error_stddev_file=dict(extensions=None,), + error_var_file=dict(extensions=None,), + estimate_file=dict(extensions=None,), + frame_eigenvectors=dict(extensions=None,), ftest_file=dict(), - fwhm_file=dict(extensions=None, ), + fwhm_file=dict(extensions=None,), gamma_file=dict(), gamma_var_file=dict(), glm_dir=dict(), - mask_file=dict(extensions=None, ), + mask_file=dict(extensions=None,), sig_file=dict(), - singular_values=dict(extensions=None, ), - spatial_eigenvectors=dict(extensions=None, ), - svd_stats_file=dict(extensions=None, ), + singular_values=dict(extensions=None,), + spatial_eigenvectors=dict(extensions=None,), + svd_stats_file=dict(extensions=None,), ) outputs = GLMFit.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_ImageInfo.py b/nipype/interfaces/freesurfer/tests/test_auto_ImageInfo.py index 9dc4b292dd..2a80c0743b 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_ImageInfo.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_ImageInfo.py @@ -4,16 +4,9 @@ def test_ImageInfo_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - position=1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, position=1,), subjects_dir=dict(), ) inputs = ImageInfo.input_spec() @@ -21,6 +14,8 @@ def test_ImageInfo_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ImageInfo_outputs(): output_map = dict( TE=dict(), @@ -31,7 +26,7 @@ def test_ImageInfo_outputs(): file_format=dict(), info=dict(), orientation=dict(), - out_file=dict(extensions=None, ), + out_file=dict(extensions=None,), ph_enc_dir=dict(), vox_sizes=dict(), ) diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Jacobian.py b/nipype/interfaces/freesurfer/tests/test_auto_Jacobian.py index 079e50968d..14cd9fa9f0 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Jacobian.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Jacobian.py @@ -4,30 +4,17 @@ def test_Jacobian_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_mappedsurf=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - in_origsurf=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_mappedsurf=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + in_origsurf=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, hash_files=False, keep_extension=False, - name_source=['in_origsurf'], - name_template='%s.jacobian', + name_source=["in_origsurf"], + name_template="%s.jacobian", position=-1, ), subjects_dir=dict(), @@ -37,8 +24,10 @@ def test_Jacobian_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Jacobian_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Jacobian.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_LTAConvert.py b/nipype/interfaces/freesurfer/tests/test_auto_LTAConvert.py index 82dd02fe0c..ab59b01867 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_LTAConvert.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_LTAConvert.py @@ -4,84 +4,68 @@ def test_LTAConvert_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), in_fsl=dict( - argstr='--infsl %s', + argstr="--infsl %s", extensions=None, mandatory=True, - xor=('in_lta', 'in_fsl', 'in_mni', 'in_reg', 'in_niftyreg', - 'in_itk'), + xor=("in_lta", "in_fsl", "in_mni", "in_reg", "in_niftyreg", "in_itk"), ), in_itk=dict( - argstr='--initk %s', + argstr="--initk %s", extensions=None, mandatory=True, - xor=('in_lta', 'in_fsl', 'in_mni', 'in_reg', 'in_niftyreg', - 'in_itk'), + xor=("in_lta", "in_fsl", "in_mni", "in_reg", "in_niftyreg", "in_itk"), ), in_lta=dict( - argstr='--inlta %s', + argstr="--inlta %s", mandatory=True, - xor=('in_lta', 'in_fsl', 'in_mni', 'in_reg', 'in_niftyreg', - 'in_itk'), + xor=("in_lta", "in_fsl", "in_mni", "in_reg", "in_niftyreg", "in_itk"), ), in_mni=dict( - argstr='--inmni %s', + argstr="--inmni %s", extensions=None, mandatory=True, - xor=('in_lta', 'in_fsl', 'in_mni', 'in_reg', 'in_niftyreg', - 'in_itk'), + xor=("in_lta", "in_fsl", "in_mni", "in_reg", "in_niftyreg", "in_itk"), ), in_niftyreg=dict( - argstr='--inniftyreg %s', + argstr="--inniftyreg %s", extensions=None, mandatory=True, - xor=('in_lta', 'in_fsl', 'in_mni', 'in_reg', 'in_niftyreg', - 'in_itk'), + xor=("in_lta", "in_fsl", "in_mni", "in_reg", "in_niftyreg", "in_itk"), ), in_reg=dict( - argstr='--inreg %s', + argstr="--inreg %s", extensions=None, mandatory=True, - xor=('in_lta', 'in_fsl', 'in_mni', 'in_reg', 'in_niftyreg', - 'in_itk'), - ), - invert=dict(argstr='--invert', ), - ltavox2vox=dict( - argstr='--ltavox2vox', - requires=['out_lta'], - ), - out_fsl=dict(argstr='--outfsl %s', ), - out_itk=dict(argstr='--outitk %s', ), - out_lta=dict(argstr='--outlta %s', ), - out_mni=dict(argstr='--outmni %s', ), - out_reg=dict(argstr='--outreg %s', ), - source_file=dict( - argstr='--src %s', - extensions=None, - ), - target_conform=dict(argstr='--trgconform', ), - target_file=dict( - argstr='--trg %s', - extensions=None, + xor=("in_lta", "in_fsl", "in_mni", "in_reg", "in_niftyreg", "in_itk"), ), + invert=dict(argstr="--invert",), + ltavox2vox=dict(argstr="--ltavox2vox", requires=["out_lta"],), + out_fsl=dict(argstr="--outfsl %s",), + out_itk=dict(argstr="--outitk %s",), + out_lta=dict(argstr="--outlta %s",), + out_mni=dict(argstr="--outmni %s",), + out_reg=dict(argstr="--outreg %s",), + source_file=dict(argstr="--src %s", extensions=None,), + target_conform=dict(argstr="--trgconform",), + target_file=dict(argstr="--trg %s", extensions=None,), ) inputs = LTAConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LTAConvert_outputs(): output_map = dict( - out_fsl=dict(extensions=None, ), - out_itk=dict(extensions=None, ), - out_lta=dict(extensions=None, ), - out_mni=dict(extensions=None, ), - out_reg=dict(extensions=None, ), + out_fsl=dict(extensions=None,), + out_itk=dict(extensions=None,), + out_lta=dict(extensions=None,), + out_mni=dict(extensions=None,), + out_reg=dict(extensions=None,), ) outputs = LTAConvert.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Label2Annot.py b/nipype/interfaces/freesurfer/tests/test_auto_Label2Annot.py index 78f7076f92..f800c560f7 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Label2Annot.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Label2Annot.py @@ -4,48 +4,28 @@ def test_Label2Annot_inputs(): input_map = dict( - args=dict(argstr='%s', ), - color_table=dict( - argstr='--ctab %s', - extensions=None, - ), + args=dict(argstr="%s",), + color_table=dict(argstr="--ctab %s", extensions=None,), copy_inputs=dict(), - environ=dict( - nohash=True, - usedefault=True, - ), - hemisphere=dict( - argstr='--hemi %s', - mandatory=True, - ), - in_labels=dict( - argstr='--l %s...', - mandatory=True, - ), - keep_max=dict(argstr='--maxstatwinner', ), - orig=dict( - extensions=None, - mandatory=True, - ), - out_annot=dict( - argstr='--a %s', - mandatory=True, - ), - subject_id=dict( - argstr='--s %s', - mandatory=True, - usedefault=True, - ), + environ=dict(nohash=True, usedefault=True,), + hemisphere=dict(argstr="--hemi %s", mandatory=True,), + in_labels=dict(argstr="--l %s...", mandatory=True,), + keep_max=dict(argstr="--maxstatwinner",), + orig=dict(extensions=None, mandatory=True,), + out_annot=dict(argstr="--a %s", mandatory=True,), + subject_id=dict(argstr="--s %s", mandatory=True, usedefault=True,), subjects_dir=dict(), - verbose_off=dict(argstr='--noverbose', ), + verbose_off=dict(argstr="--noverbose",), ) inputs = Label2Annot.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Label2Annot_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Label2Annot.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Label2Label.py b/nipype/interfaces/freesurfer/tests/test_auto_Label2Label.py index 934770875f..e5e227c5a9 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Label2Label.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Label2Label.py @@ -4,67 +4,37 @@ def test_Label2Label_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), copy_inputs=dict(), - environ=dict( - nohash=True, - usedefault=True, - ), - hemisphere=dict( - argstr='--hemi %s', - mandatory=True, - ), + environ=dict(nohash=True, usedefault=True,), + hemisphere=dict(argstr="--hemi %s", mandatory=True,), out_file=dict( - argstr='--trglabel %s', + argstr="--trglabel %s", extensions=None, hash_files=False, keep_extension=True, - name_source=['source_label'], - name_template='%s_converted', - ), - registration_method=dict( - argstr='--regmethod %s', - usedefault=True, - ), - source_label=dict( - argstr='--srclabel %s', - extensions=None, - mandatory=True, - ), - source_sphere_reg=dict( - extensions=None, - mandatory=True, - ), - source_subject=dict( - argstr='--srcsubject %s', - mandatory=True, - ), - source_white=dict( - extensions=None, - mandatory=True, - ), - sphere_reg=dict( - extensions=None, - mandatory=True, - ), - subject_id=dict( - argstr='--trgsubject %s', - mandatory=True, - usedefault=True, - ), + name_source=["source_label"], + name_template="%s_converted", + ), + registration_method=dict(argstr="--regmethod %s", usedefault=True,), + source_label=dict(argstr="--srclabel %s", extensions=None, mandatory=True,), + source_sphere_reg=dict(extensions=None, mandatory=True,), + source_subject=dict(argstr="--srcsubject %s", mandatory=True,), + source_white=dict(extensions=None, mandatory=True,), + sphere_reg=dict(extensions=None, mandatory=True,), + subject_id=dict(argstr="--trgsubject %s", mandatory=True, usedefault=True,), subjects_dir=dict(), - white=dict( - extensions=None, - mandatory=True, - ), + white=dict(extensions=None, mandatory=True,), ) inputs = Label2Label.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Label2Label_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Label2Label.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Label2Vol.py b/nipype/interfaces/freesurfer/tests/test_auto_Label2Vol.py index 11a244e94b..dd890531c9 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Label2Vol.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Label2Vol.py @@ -5,88 +5,67 @@ def test_Label2Vol_inputs(): input_map = dict( annot_file=dict( - argstr='--annot %s', + argstr="--annot %s", copyfile=False, extensions=None, mandatory=True, - requires=('subject_id', 'hemi'), - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), + requires=("subject_id", "hemi"), + xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), ), aparc_aseg=dict( - argstr='--aparc+aseg', + argstr="--aparc+aseg", mandatory=True, - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), + xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fill_thresh=dict(argstr='--fillthresh %g', ), - hemi=dict(argstr='--hemi %s', ), - identity=dict( - argstr='--identity', - xor=('reg_file', 'reg_header', 'identity'), - ), - invert_mtx=dict(argstr='--invertmtx', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + fill_thresh=dict(argstr="--fillthresh %g",), + hemi=dict(argstr="--hemi %s",), + identity=dict(argstr="--identity", xor=("reg_file", "reg_header", "identity"),), + invert_mtx=dict(argstr="--invertmtx",), label_file=dict( - argstr='--label %s...', + argstr="--label %s...", copyfile=False, mandatory=True, - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), - ), - label_hit_file=dict( - argstr='--hits %s', - extensions=None, - ), - label_voxel_volume=dict(argstr='--labvoxvol %f', ), - map_label_stat=dict( - argstr='--label-stat %s', - extensions=None, - ), - native_vox2ras=dict(argstr='--native-vox2ras', ), - proj=dict( - argstr='--proj %s %f %f %f', - requires=('subject_id', 'hemi'), + xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), ), + label_hit_file=dict(argstr="--hits %s", extensions=None,), + label_voxel_volume=dict(argstr="--labvoxvol %f",), + map_label_stat=dict(argstr="--label-stat %s", extensions=None,), + native_vox2ras=dict(argstr="--native-vox2ras",), + proj=dict(argstr="--proj %s %f %f %f", requires=("subject_id", "hemi"),), reg_file=dict( - argstr='--reg %s', + argstr="--reg %s", extensions=None, - xor=('reg_file', 'reg_header', 'identity'), + xor=("reg_file", "reg_header", "identity"), ), reg_header=dict( - argstr='--regheader %s', + argstr="--regheader %s", extensions=None, - xor=('reg_file', 'reg_header', 'identity'), + xor=("reg_file", "reg_header", "identity"), ), seg_file=dict( - argstr='--seg %s', + argstr="--seg %s", copyfile=False, extensions=None, mandatory=True, - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), + xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), ), - subject_id=dict(argstr='--subject %s', ), + subject_id=dict(argstr="--subject %s",), subjects_dir=dict(), - surface=dict(argstr='--surf %s', ), - template_file=dict( - argstr='--temp %s', - extensions=None, - mandatory=True, - ), - vol_label_file=dict( - argstr='--o %s', - extensions=None, - genfile=True, - ), + surface=dict(argstr="--surf %s",), + template_file=dict(argstr="--temp %s", extensions=None, mandatory=True,), + vol_label_file=dict(argstr="--o %s", extensions=None, genfile=True,), ) inputs = Label2Vol.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Label2Vol_outputs(): - output_map = dict(vol_label_file=dict(extensions=None, ), ) + output_map = dict(vol_label_file=dict(extensions=None,),) outputs = Label2Vol.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MNIBiasCorrection.py b/nipype/interfaces/freesurfer/tests/test_auto_MNIBiasCorrection.py index 6ba4926cd3..16ed15d093 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MNIBiasCorrection.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MNIBiasCorrection.py @@ -4,50 +4,36 @@ def test_MNIBiasCorrection_inputs(): input_map = dict( - args=dict(argstr='%s', ), - distance=dict(argstr='--distance %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='--i %s', - extensions=None, - mandatory=True, - ), - iterations=dict( - argstr='--n %d', - usedefault=True, - ), - mask=dict( - argstr='--mask %s', - extensions=None, - ), - no_rescale=dict(argstr='--no-rescale', ), + args=dict(argstr="%s",), + distance=dict(argstr="--distance %d",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="--i %s", extensions=None, mandatory=True,), + iterations=dict(argstr="--n %d", usedefault=True,), + mask=dict(argstr="--mask %s", extensions=None,), + no_rescale=dict(argstr="--no-rescale",), out_file=dict( - argstr='--o %s', + argstr="--o %s", extensions=None, hash_files=False, keep_extension=True, - name_source=['in_file'], - name_template='%s_output', + name_source=["in_file"], + name_template="%s_output", ), - protocol_iterations=dict(argstr='--proto-iters %d', ), - shrink=dict(argstr='--shrink %d', ), - stop=dict(argstr='--stop %f', ), + protocol_iterations=dict(argstr="--proto-iters %d",), + shrink=dict(argstr="--shrink %d",), + stop=dict(argstr="--stop %f",), subjects_dir=dict(), - transform=dict( - argstr='--uchar %s', - extensions=None, - ), + transform=dict(argstr="--uchar %s", extensions=None,), ) inputs = MNIBiasCorrection.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MNIBiasCorrection_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MNIBiasCorrection.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MPRtoMNI305.py b/nipype/interfaces/freesurfer/tests/test_auto_MPRtoMNI305.py index 3e916cf7dc..ae81998809 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MPRtoMNI305.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MPRtoMNI305.py @@ -4,38 +4,24 @@ def test_MPRtoMNI305_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - usedefault=True, - ), - reference_dir=dict( - mandatory=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, usedefault=True,), + reference_dir=dict(mandatory=True, usedefault=True,), subjects_dir=dict(), - target=dict( - mandatory=True, - usedefault=True, - ), + target=dict(mandatory=True, usedefault=True,), ) inputs = MPRtoMNI305.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MPRtoMNI305_outputs(): output_map = dict( - log_file=dict( - extensions=None, - usedefault=True, - ), - out_file=dict(extensions=None, ), + log_file=dict(extensions=None, usedefault=True,), + out_file=dict(extensions=None,), ) outputs = MPRtoMNI305.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIConvert.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIConvert.py index f5ab4ff630..b75f338f31 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIConvert.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIConvert.py @@ -5,136 +5,107 @@ def test_MRIConvert_inputs(): input_map = dict( apply_inv_transform=dict( - argstr='--apply_inverse_transform %s', - extensions=None, + argstr="--apply_inverse_transform %s", extensions=None, ), - apply_transform=dict( - argstr='--apply_transform %s', - extensions=None, - ), - args=dict(argstr='%s', ), - ascii=dict(argstr='--ascii', ), - autoalign_matrix=dict( - argstr='--autoalign %s', - extensions=None, - ), - color_file=dict( - argstr='--color_file %s', - extensions=None, - ), - conform=dict(argstr='--conform', ), - conform_min=dict(argstr='--conform_min', ), - conform_size=dict(argstr='--conform_size %s', ), - crop_center=dict(argstr='--crop %d %d %d', ), - crop_gdf=dict(argstr='--crop_gdf', ), - crop_size=dict(argstr='--cropsize %d %d %d', ), - cut_ends=dict(argstr='--cutends %d', ), - cw256=dict(argstr='--cw256', ), - devolve_transform=dict(argstr='--devolvexfm %s', ), - drop_n=dict(argstr='--ndrop %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fill_parcellation=dict(argstr='--fill_parcellation', ), - force_ras=dict(argstr='--force_ras_good', ), - frame=dict(argstr='--frame %d', ), - frame_subsample=dict(argstr='--fsubsample %d %d %d', ), - fwhm=dict(argstr='--fwhm %f', ), - in_center=dict(argstr='--in_center %s', ), + apply_transform=dict(argstr="--apply_transform %s", extensions=None,), + args=dict(argstr="%s",), + ascii=dict(argstr="--ascii",), + autoalign_matrix=dict(argstr="--autoalign %s", extensions=None,), + color_file=dict(argstr="--color_file %s", extensions=None,), + conform=dict(argstr="--conform",), + conform_min=dict(argstr="--conform_min",), + conform_size=dict(argstr="--conform_size %s",), + crop_center=dict(argstr="--crop %d %d %d",), + crop_gdf=dict(argstr="--crop_gdf",), + crop_size=dict(argstr="--cropsize %d %d %d",), + cut_ends=dict(argstr="--cutends %d",), + cw256=dict(argstr="--cw256",), + devolve_transform=dict(argstr="--devolvexfm %s",), + drop_n=dict(argstr="--ndrop %d",), + environ=dict(nohash=True, usedefault=True,), + fill_parcellation=dict(argstr="--fill_parcellation",), + force_ras=dict(argstr="--force_ras_good",), + frame=dict(argstr="--frame %d",), + frame_subsample=dict(argstr="--fsubsample %d %d %d",), + fwhm=dict(argstr="--fwhm %f",), + in_center=dict(argstr="--in_center %s",), in_file=dict( - argstr='--input_volume %s', - extensions=None, - mandatory=True, - position=-2, - ), - in_i_dir=dict(argstr='--in_i_direction %f %f %f', ), - in_i_size=dict(argstr='--in_i_size %d', ), - in_info=dict(argstr='--in_info', ), - in_j_dir=dict(argstr='--in_j_direction %f %f %f', ), - in_j_size=dict(argstr='--in_j_size %d', ), - in_k_dir=dict(argstr='--in_k_direction %f %f %f', ), - in_k_size=dict(argstr='--in_k_size %d', ), - in_like=dict( - argstr='--in_like %s', - extensions=None, + argstr="--input_volume %s", extensions=None, mandatory=True, position=-2, ), - in_matrix=dict(argstr='--in_matrix', ), - in_orientation=dict(argstr='--in_orientation %s', ), - in_scale=dict(argstr='--scale %f', ), - in_stats=dict(argstr='--in_stats', ), - in_type=dict(argstr='--in_type %s', ), - invert_contrast=dict(argstr='--invert_contrast %f', ), - midframe=dict(argstr='--mid-frame', ), - no_change=dict(argstr='--nochange', ), - no_scale=dict(argstr='--no_scale 1', ), - no_translate=dict(argstr='--no_translate', ), - no_write=dict(argstr='--no_write', ), - out_center=dict(argstr='--out_center %f %f %f', ), - out_datatype=dict(argstr='--out_data_type %s', ), + in_i_dir=dict(argstr="--in_i_direction %f %f %f",), + in_i_size=dict(argstr="--in_i_size %d",), + in_info=dict(argstr="--in_info",), + in_j_dir=dict(argstr="--in_j_direction %f %f %f",), + in_j_size=dict(argstr="--in_j_size %d",), + in_k_dir=dict(argstr="--in_k_direction %f %f %f",), + in_k_size=dict(argstr="--in_k_size %d",), + in_like=dict(argstr="--in_like %s", extensions=None,), + in_matrix=dict(argstr="--in_matrix",), + in_orientation=dict(argstr="--in_orientation %s",), + in_scale=dict(argstr="--scale %f",), + in_stats=dict(argstr="--in_stats",), + in_type=dict(argstr="--in_type %s",), + invert_contrast=dict(argstr="--invert_contrast %f",), + midframe=dict(argstr="--mid-frame",), + no_change=dict(argstr="--nochange",), + no_scale=dict(argstr="--no_scale 1",), + no_translate=dict(argstr="--no_translate",), + no_write=dict(argstr="--no_write",), + out_center=dict(argstr="--out_center %f %f %f",), + out_datatype=dict(argstr="--out_data_type %s",), out_file=dict( - argstr='--output_volume %s', - extensions=None, - genfile=True, - position=-1, + argstr="--output_volume %s", extensions=None, genfile=True, position=-1, ), - out_i_count=dict(argstr='--out_i_count %d', ), - out_i_dir=dict(argstr='--out_i_direction %f %f %f', ), - out_i_size=dict(argstr='--out_i_size %d', ), - out_info=dict(argstr='--out_info', ), - out_j_count=dict(argstr='--out_j_count %d', ), - out_j_dir=dict(argstr='--out_j_direction %f %f %f', ), - out_j_size=dict(argstr='--out_j_size %d', ), - out_k_count=dict(argstr='--out_k_count %d', ), - out_k_dir=dict(argstr='--out_k_direction %f %f %f', ), - out_k_size=dict(argstr='--out_k_size %d', ), - out_matrix=dict(argstr='--out_matrix', ), - out_orientation=dict(argstr='--out_orientation %s', ), - out_scale=dict(argstr='--out-scale %d', ), - out_stats=dict(argstr='--out_stats', ), - out_type=dict(argstr='--out_type %s', ), - parse_only=dict(argstr='--parse_only', ), - read_only=dict(argstr='--read_only', ), - reorder=dict(argstr='--reorder %d %d %d', ), - resample_type=dict(argstr='--resample_type %s', ), - reslice_like=dict( - argstr='--reslice_like %s', - extensions=None, - ), - sdcm_list=dict( - argstr='--sdcmlist %s', - extensions=None, - ), - skip_n=dict(argstr='--nskip %d', ), - slice_bias=dict(argstr='--slice-bias %f', ), - slice_crop=dict(argstr='--slice-crop %d %d', ), - slice_reverse=dict(argstr='--slice-reverse', ), - smooth_parcellation=dict(argstr='--smooth_parcellation', ), - sphinx=dict(argstr='--sphinx', ), - split=dict(argstr='--split', ), - status_file=dict( - argstr='--status %s', - extensions=None, - ), - subject_name=dict(argstr='--subject_name %s', ), + out_i_count=dict(argstr="--out_i_count %d",), + out_i_dir=dict(argstr="--out_i_direction %f %f %f",), + out_i_size=dict(argstr="--out_i_size %d",), + out_info=dict(argstr="--out_info",), + out_j_count=dict(argstr="--out_j_count %d",), + out_j_dir=dict(argstr="--out_j_direction %f %f %f",), + out_j_size=dict(argstr="--out_j_size %d",), + out_k_count=dict(argstr="--out_k_count %d",), + out_k_dir=dict(argstr="--out_k_direction %f %f %f",), + out_k_size=dict(argstr="--out_k_size %d",), + out_matrix=dict(argstr="--out_matrix",), + out_orientation=dict(argstr="--out_orientation %s",), + out_scale=dict(argstr="--out-scale %d",), + out_stats=dict(argstr="--out_stats",), + out_type=dict(argstr="--out_type %s",), + parse_only=dict(argstr="--parse_only",), + read_only=dict(argstr="--read_only",), + reorder=dict(argstr="--reorder %d %d %d",), + resample_type=dict(argstr="--resample_type %s",), + reslice_like=dict(argstr="--reslice_like %s", extensions=None,), + sdcm_list=dict(argstr="--sdcmlist %s", extensions=None,), + skip_n=dict(argstr="--nskip %d",), + slice_bias=dict(argstr="--slice-bias %f",), + slice_crop=dict(argstr="--slice-crop %d %d",), + slice_reverse=dict(argstr="--slice-reverse",), + smooth_parcellation=dict(argstr="--smooth_parcellation",), + sphinx=dict(argstr="--sphinx",), + split=dict(argstr="--split",), + status_file=dict(argstr="--status %s", extensions=None,), + subject_name=dict(argstr="--subject_name %s",), subjects_dir=dict(), - te=dict(argstr='-te %d', ), - template_info=dict(argstr='--template_info', ), - template_type=dict(argstr='--template_type %s', ), - ti=dict(argstr='-ti %d', ), - tr=dict(argstr='-tr %d', ), - unwarp_gradient=dict(argstr='--unwarp_gradient_nonlinearity', ), - vox_size=dict(argstr='-voxsize %f %f %f', ), - zero_ge_z_offset=dict(argstr='--zero_ge_z_offset', ), - zero_outlines=dict(argstr='--zero_outlines', ), + te=dict(argstr="-te %d",), + template_info=dict(argstr="--template_info",), + template_type=dict(argstr="--template_type %s",), + ti=dict(argstr="-ti %d",), + tr=dict(argstr="-tr %d",), + unwarp_gradient=dict(argstr="--unwarp_gradient_nonlinearity",), + vox_size=dict(argstr="-voxsize %f %f %f",), + zero_ge_z_offset=dict(argstr="--zero_ge_z_offset",), + zero_outlines=dict(argstr="--zero_outlines",), ) inputs = MRIConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRIConvert_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict(out_file=dict(),) outputs = MRIConvert.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRICoreg.py b/nipype/interfaces/freesurfer/tests/test_auto_MRICoreg.py index 39076d96a9..1cef259c82 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRICoreg.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRICoreg.py @@ -4,81 +4,65 @@ def test_MRICoreg_inputs(): input_map = dict( - args=dict(argstr='%s', ), - brute_force_limit=dict( - argstr='--bf-lim %g', - xor=['no_brute_force'], - ), - brute_force_samples=dict( - argstr='--bf-nsamp %d', - xor=['no_brute_force'], - ), - conform_reference=dict(argstr='--conf-ref', ), - dof=dict(argstr='--dof %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - ftol=dict(argstr='--ftol %e', ), - initial_rotation=dict(argstr='--rot %g %g %g', ), - initial_scale=dict(argstr='--scale %g %g %g', ), - initial_shear=dict(argstr='--shear %g %g %g', ), - initial_translation=dict(argstr='--trans %g %g %g', ), - linmintol=dict(argstr='--linmintol %e', ), - max_iters=dict(argstr='--nitersmax %d', ), - no_brute_force=dict(argstr='--no-bf', ), - no_coord_dithering=dict(argstr='--no-coord-dither', ), - no_cras0=dict(argstr='--no-cras0', ), - no_intensity_dithering=dict(argstr='--no-intensity-dither', ), - no_smooth=dict(argstr='--no-smooth', ), - num_threads=dict(argstr='--threads %d', ), - out_lta_file=dict( - argstr='--lta %s', - usedefault=True, - ), - out_params_file=dict(argstr='--params %s', ), - out_reg_file=dict(argstr='--regdat %s', ), - ref_fwhm=dict(argstr='--ref-fwhm', ), + args=dict(argstr="%s",), + brute_force_limit=dict(argstr="--bf-lim %g", xor=["no_brute_force"],), + brute_force_samples=dict(argstr="--bf-nsamp %d", xor=["no_brute_force"],), + conform_reference=dict(argstr="--conf-ref",), + dof=dict(argstr="--dof %d",), + environ=dict(nohash=True, usedefault=True,), + ftol=dict(argstr="--ftol %e",), + initial_rotation=dict(argstr="--rot %g %g %g",), + initial_scale=dict(argstr="--scale %g %g %g",), + initial_shear=dict(argstr="--shear %g %g %g",), + initial_translation=dict(argstr="--trans %g %g %g",), + linmintol=dict(argstr="--linmintol %e",), + max_iters=dict(argstr="--nitersmax %d",), + no_brute_force=dict(argstr="--no-bf",), + no_coord_dithering=dict(argstr="--no-coord-dither",), + no_cras0=dict(argstr="--no-cras0",), + no_intensity_dithering=dict(argstr="--no-intensity-dither",), + no_smooth=dict(argstr="--no-smooth",), + num_threads=dict(argstr="--threads %d",), + out_lta_file=dict(argstr="--lta %s", usedefault=True,), + out_params_file=dict(argstr="--params %s",), + out_reg_file=dict(argstr="--regdat %s",), + ref_fwhm=dict(argstr="--ref-fwhm",), reference_file=dict( - argstr='--ref %s', + argstr="--ref %s", copyfile=False, extensions=None, mandatory=True, - xor=['subject_id'], - ), - reference_mask=dict( - argstr='--ref-mask %s', - position=2, + xor=["subject_id"], ), - saturation_threshold=dict(argstr='--sat %g', ), - sep=dict(argstr='--sep %s...', ), + reference_mask=dict(argstr="--ref-mask %s", position=2,), + saturation_threshold=dict(argstr="--sat %g",), + sep=dict(argstr="--sep %s...",), source_file=dict( - argstr='--mov %s', - copyfile=False, - extensions=None, - mandatory=True, + argstr="--mov %s", copyfile=False, extensions=None, mandatory=True, ), - source_mask=dict(argstr='--mov-mask', ), - source_oob=dict(argstr='--mov-oob', ), + source_mask=dict(argstr="--mov-mask",), + source_oob=dict(argstr="--mov-oob",), subject_id=dict( - argstr='--s %s', + argstr="--s %s", mandatory=True, position=1, - requires=['subjects_dir'], - xor=['reference_file'], + requires=["subjects_dir"], + xor=["reference_file"], ), - subjects_dir=dict(argstr='--sd %s', ), + subjects_dir=dict(argstr="--sd %s",), ) inputs = MRICoreg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRICoreg_outputs(): output_map = dict( - out_lta_file=dict(extensions=None, ), - out_params_file=dict(extensions=None, ), - out_reg_file=dict(extensions=None, ), + out_lta_file=dict(extensions=None,), + out_params_file=dict(extensions=None,), + out_reg_file=dict(extensions=None,), ) outputs = MRICoreg.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIFill.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIFill.py index d70da67a50..c8a2f7090c 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIFill.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIFill.py @@ -4,47 +4,24 @@ def test_MRIFill_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - log_file=dict( - argstr='-a %s', - extensions=None, - ), - out_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - ), - segmentation=dict( - argstr='-segmentation %s', - extensions=None, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + log_file=dict(argstr="-a %s", extensions=None,), + out_file=dict(argstr="%s", extensions=None, mandatory=True, position=-1,), + segmentation=dict(argstr="-segmentation %s", extensions=None,), subjects_dir=dict(), - transform=dict( - argstr='-xform %s', - extensions=None, - ), + transform=dict(argstr="-xform %s", extensions=None,), ) inputs = MRIFill.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRIFill_outputs(): - output_map = dict( - log_file=dict(extensions=None, ), - out_file=dict(extensions=None, ), - ) + output_map = dict(log_file=dict(extensions=None,), out_file=dict(extensions=None,),) outputs = MRIFill.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIMarchingCubes.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIMarchingCubes.py index 2152aec1e5..25137a53a8 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIMarchingCubes.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIMarchingCubes.py @@ -4,33 +4,12 @@ def test_MRIMarchingCubes_inputs(): input_map = dict( - args=dict(argstr='%s', ), - connectivity_value=dict( - argstr='%d', - position=-1, - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), - label_value=dict( - argstr='%d', - mandatory=True, - position=2, - ), - out_file=dict( - argstr='./%s', - extensions=None, - genfile=True, - position=-2, - ), + args=dict(argstr="%s",), + connectivity_value=dict(argstr="%d", position=-1, usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=1,), + label_value=dict(argstr="%d", mandatory=True, position=2,), + out_file=dict(argstr="./%s", extensions=None, genfile=True, position=-2,), subjects_dir=dict(), ) inputs = MRIMarchingCubes.input_spec() @@ -38,8 +17,10 @@ def test_MRIMarchingCubes_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRIMarchingCubes_outputs(): - output_map = dict(surface=dict(extensions=None, ), ) + output_map = dict(surface=dict(extensions=None,),) outputs = MRIMarchingCubes.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIPretess.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIPretess.py index 578d09d9fb..195472d4ad 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIPretess.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIPretess.py @@ -4,49 +4,33 @@ def test_MRIPretess_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_filled=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-4, - ), - in_norm=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - keep=dict(argstr='-keep', ), - label=dict( - argstr='%s', - mandatory=True, - position=-3, - usedefault=True, - ), - nocorners=dict(argstr='-nocorners', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_filled=dict(argstr="%s", extensions=None, mandatory=True, position=-4,), + in_norm=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + keep=dict(argstr="-keep",), + label=dict(argstr="%s", mandatory=True, position=-3, usedefault=True,), + nocorners=dict(argstr="-nocorners",), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, keep_extension=True, - name_source=['in_filled'], - name_template='%s_pretesswm', + name_source=["in_filled"], + name_template="%s_pretesswm", position=-1, ), subjects_dir=dict(), - test=dict(argstr='-test', ), + test=dict(argstr="-test",), ) inputs = MRIPretess.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRIPretess_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MRIPretess.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRISPreproc.py b/nipype/interfaces/freesurfer/tests/test_auto_MRISPreproc.py index fab549d02f..03d9ccd2e4 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRISPreproc.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRISPreproc.py @@ -4,80 +4,53 @@ def test_MRISPreproc_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), fsgd_file=dict( - argstr='--fsgd %s', - extensions=None, - xor=('subjects', 'fsgd_file', 'subject_file'), - ), - fwhm=dict( - argstr='--fwhm %f', - xor=['num_iters'], - ), - fwhm_source=dict( - argstr='--fwhm-src %f', - xor=['num_iters_source'], - ), - hemi=dict( - argstr='--hemi %s', - mandatory=True, - ), - num_iters=dict( - argstr='--niters %d', - xor=['fwhm'], - ), - num_iters_source=dict( - argstr='--niterssrc %d', - xor=['fwhm_source'], - ), - out_file=dict( - argstr='--out %s', + argstr="--fsgd %s", extensions=None, - genfile=True, - ), - proj_frac=dict(argstr='--projfrac %s', ), - smooth_cortex_only=dict(argstr='--smooth-cortex-only', ), - source_format=dict(argstr='--srcfmt %s', ), + xor=("subjects", "fsgd_file", "subject_file"), + ), + fwhm=dict(argstr="--fwhm %f", xor=["num_iters"],), + fwhm_source=dict(argstr="--fwhm-src %f", xor=["num_iters_source"],), + hemi=dict(argstr="--hemi %s", mandatory=True,), + num_iters=dict(argstr="--niters %d", xor=["fwhm"],), + num_iters_source=dict(argstr="--niterssrc %d", xor=["fwhm_source"],), + out_file=dict(argstr="--out %s", extensions=None, genfile=True,), + proj_frac=dict(argstr="--projfrac %s",), + smooth_cortex_only=dict(argstr="--smooth-cortex-only",), + source_format=dict(argstr="--srcfmt %s",), subject_file=dict( - argstr='--f %s', + argstr="--f %s", extensions=None, - xor=('subjects', 'fsgd_file', 'subject_file'), + xor=("subjects", "fsgd_file", "subject_file"), ), subjects=dict( - argstr='--s %s...', - xor=('subjects', 'fsgd_file', 'subject_file'), + argstr="--s %s...", xor=("subjects", "fsgd_file", "subject_file"), ), subjects_dir=dict(), surf_area=dict( - argstr='--area %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), + argstr="--area %s", xor=("surf_measure", "surf_measure_file", "surf_area"), ), - surf_dir=dict(argstr='--surfdir %s', ), + surf_dir=dict(argstr="--surfdir %s",), surf_measure=dict( - argstr='--meas %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), + argstr="--meas %s", xor=("surf_measure", "surf_measure_file", "surf_area"), ), surf_measure_file=dict( - argstr='--is %s...', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), + argstr="--is %s...", xor=("surf_measure", "surf_measure_file", "surf_area"), ), - target=dict( - argstr='--target %s', - mandatory=True, - ), - vol_measure_file=dict(argstr='--iv %s %s...', ), + target=dict(argstr="--target %s", mandatory=True,), + vol_measure_file=dict(argstr="--iv %s %s...",), ) inputs = MRISPreproc.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRISPreproc_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MRISPreproc.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRISPreprocReconAll.py b/nipype/interfaces/freesurfer/tests/test_auto_MRISPreprocReconAll.py index d7a138015a..5a7a711263 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRISPreprocReconAll.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRISPreprocReconAll.py @@ -4,99 +4,66 @@ def test_MRISPreprocReconAll_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), copy_inputs=dict(), - environ=dict( - nohash=True, - usedefault=True, - ), + environ=dict(nohash=True, usedefault=True,), fsgd_file=dict( - argstr='--fsgd %s', + argstr="--fsgd %s", extensions=None, - xor=('subjects', 'fsgd_file', 'subject_file'), - ), - fwhm=dict( - argstr='--fwhm %f', - xor=['num_iters'], - ), - fwhm_source=dict( - argstr='--fwhm-src %f', - xor=['num_iters_source'], - ), - hemi=dict( - argstr='--hemi %s', - mandatory=True, - ), - lh_surfreg_target=dict( - extensions=None, - requires=['surfreg_files'], - ), - num_iters=dict( - argstr='--niters %d', - xor=['fwhm'], - ), - num_iters_source=dict( - argstr='--niterssrc %d', - xor=['fwhm_source'], - ), - out_file=dict( - argstr='--out %s', - extensions=None, - genfile=True, - ), - proj_frac=dict(argstr='--projfrac %s', ), - rh_surfreg_target=dict( - extensions=None, - requires=['surfreg_files'], - ), - smooth_cortex_only=dict(argstr='--smooth-cortex-only', ), - source_format=dict(argstr='--srcfmt %s', ), + xor=("subjects", "fsgd_file", "subject_file"), + ), + fwhm=dict(argstr="--fwhm %f", xor=["num_iters"],), + fwhm_source=dict(argstr="--fwhm-src %f", xor=["num_iters_source"],), + hemi=dict(argstr="--hemi %s", mandatory=True,), + lh_surfreg_target=dict(extensions=None, requires=["surfreg_files"],), + num_iters=dict(argstr="--niters %d", xor=["fwhm"],), + num_iters_source=dict(argstr="--niterssrc %d", xor=["fwhm_source"],), + out_file=dict(argstr="--out %s", extensions=None, genfile=True,), + proj_frac=dict(argstr="--projfrac %s",), + rh_surfreg_target=dict(extensions=None, requires=["surfreg_files"],), + smooth_cortex_only=dict(argstr="--smooth-cortex-only",), + source_format=dict(argstr="--srcfmt %s",), subject_file=dict( - argstr='--f %s', + argstr="--f %s", extensions=None, - xor=('subjects', 'fsgd_file', 'subject_file'), + xor=("subjects", "fsgd_file", "subject_file"), ), subject_id=dict( - argstr='--s %s', + argstr="--s %s", usedefault=True, - xor=('subjects', 'fsgd_file', 'subject_file', 'subject_id'), + xor=("subjects", "fsgd_file", "subject_file", "subject_id"), ), subjects=dict( - argstr='--s %s...', - xor=('subjects', 'fsgd_file', 'subject_file'), + argstr="--s %s...", xor=("subjects", "fsgd_file", "subject_file"), ), subjects_dir=dict(), surf_area=dict( - argstr='--area %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), + argstr="--area %s", xor=("surf_measure", "surf_measure_file", "surf_area"), ), - surf_dir=dict(argstr='--surfdir %s', ), + surf_dir=dict(argstr="--surfdir %s",), surf_measure=dict( - argstr='--meas %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), + argstr="--meas %s", xor=("surf_measure", "surf_measure_file", "surf_area"), ), surf_measure_file=dict( - argstr='--meas %s', + argstr="--meas %s", extensions=None, - xor=('surf_measure', 'surf_measure_file', 'surf_area'), + xor=("surf_measure", "surf_measure_file", "surf_area"), ), surfreg_files=dict( - argstr='--surfreg %s', - requires=['lh_surfreg_target', 'rh_surfreg_target'], - ), - target=dict( - argstr='--target %s', - mandatory=True, + argstr="--surfreg %s", requires=["lh_surfreg_target", "rh_surfreg_target"], ), - vol_measure_file=dict(argstr='--iv %s %s...', ), + target=dict(argstr="--target %s", mandatory=True,), + vol_measure_file=dict(argstr="--iv %s %s...",), ) inputs = MRISPreprocReconAll.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRISPreprocReconAll_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MRISPreprocReconAll.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRITessellate.py b/nipype/interfaces/freesurfer/tests/test_auto_MRITessellate.py index 464eddb043..8bba694bf7 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRITessellate.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRITessellate.py @@ -4,39 +4,24 @@ def test_MRITessellate_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), - label_value=dict( - argstr='%d', - mandatory=True, - position=-2, - ), - out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + label_value=dict(argstr="%d", mandatory=True, position=-2,), + out_file=dict(argstr="%s", extensions=None, genfile=True, position=-1,), subjects_dir=dict(), - tesselate_all_voxels=dict(argstr='-a', ), - use_real_RAS_coordinates=dict(argstr='-n', ), + tesselate_all_voxels=dict(argstr="-a",), + use_real_RAS_coordinates=dict(argstr="-n",), ) inputs = MRITessellate.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRITessellate_outputs(): - output_map = dict(surface=dict(extensions=None, ), ) + output_map = dict(surface=dict(extensions=None,),) outputs = MRITessellate.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIsCALabel.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIsCALabel.py index 0bac91b343..560f7e4fce 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIsCALabel.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIsCALabel.py @@ -4,75 +4,40 @@ def test_MRIsCALabel_inputs(): input_map = dict( - args=dict(argstr='%s', ), - aseg=dict( - argstr='-aseg %s', - extensions=None, - ), - canonsurf=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), - classifier=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), + args=dict(argstr="%s",), + aseg=dict(argstr="-aseg %s", extensions=None,), + canonsurf=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + classifier=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), copy_inputs=dict(), - curv=dict( - extensions=None, - mandatory=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - hemisphere=dict( - argstr='%s', - mandatory=True, - position=-4, - ), - label=dict( - argstr='-l %s', - extensions=None, - ), + curv=dict(extensions=None, mandatory=True,), + environ=dict(nohash=True, usedefault=True,), + hemisphere=dict(argstr="%s", mandatory=True, position=-4,), + label=dict(argstr="-l %s", extensions=None,), num_threads=dict(), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, hash_files=False, keep_extension=True, - name_source=['hemisphere'], - name_template='%s.aparc.annot', + name_source=["hemisphere"], + name_template="%s.aparc.annot", position=-1, ), - seed=dict(argstr='-seed %d', ), - smoothwm=dict( - extensions=None, - mandatory=True, - ), - subject_id=dict( - argstr='%s', - mandatory=True, - position=-5, - usedefault=True, - ), + seed=dict(argstr="-seed %d",), + smoothwm=dict(extensions=None, mandatory=True,), + subject_id=dict(argstr="%s", mandatory=True, position=-5, usedefault=True,), subjects_dir=dict(), - sulc=dict( - extensions=None, - mandatory=True, - ), + sulc=dict(extensions=None, mandatory=True,), ) inputs = MRIsCALabel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRIsCALabel_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MRIsCALabel.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIsCalc.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIsCalc.py index be72b4814f..521c1d5d6c 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIsCalc.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIsCalc.py @@ -4,43 +4,16 @@ def test_MRIsCalc_inputs(): input_map = dict( - action=dict( - argstr='%s', - mandatory=True, - position=-2, - ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file1=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), + action=dict(argstr="%s", mandatory=True, position=-2,), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file1=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), in_file2=dict( - argstr='%s', - extensions=None, - position=-1, - xor=['in_float', 'in_int'], - ), - in_float=dict( - argstr='%f', - position=-1, - xor=['in_file2', 'in_int'], - ), - in_int=dict( - argstr='%d', - position=-1, - xor=['in_file2', 'in_float'], - ), - out_file=dict( - argstr='-o %s', - extensions=None, - mandatory=True, + argstr="%s", extensions=None, position=-1, xor=["in_float", "in_int"], ), + in_float=dict(argstr="%f", position=-1, xor=["in_file2", "in_int"],), + in_int=dict(argstr="%d", position=-1, xor=["in_file2", "in_float"],), + out_file=dict(argstr="-o %s", extensions=None, mandatory=True,), subjects_dir=dict(), ) inputs = MRIsCalc.input_spec() @@ -48,8 +21,10 @@ def test_MRIsCalc_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRIsCalc_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MRIsCalc.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIsCombine.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIsCombine.py index 9dd2993621..56fd270efc 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIsCombine.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIsCombine.py @@ -4,22 +4,11 @@ def test_MRIsCombine_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_files=dict( - argstr='--combinesurfs %s', - mandatory=True, - position=1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_files=dict(argstr="--combinesurfs %s", mandatory=True, position=1,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - mandatory=True, - position=-1, + argstr="%s", extensions=None, genfile=True, mandatory=True, position=-1, ), subjects_dir=dict(), ) @@ -28,8 +17,10 @@ def test_MRIsCombine_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRIsCombine_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MRIsCombine.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIsConvert.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIsConvert.py index 4c84120d7e..6972ae4f33 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIsConvert.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIsConvert.py @@ -4,73 +4,46 @@ def test_MRIsConvert_inputs(): input_map = dict( - annot_file=dict( - argstr='--annot %s', - extensions=None, - ), - args=dict(argstr='%s', ), - dataarray_num=dict(argstr='--da_num %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - functional_file=dict( - argstr='-f %s', - extensions=None, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - label_file=dict( - argstr='--label %s', - extensions=None, - ), - labelstats_outfile=dict( - argstr='--labelstats %s', - extensions=None, - ), - normal=dict(argstr='-n', ), - origname=dict(argstr='-o %s', ), - out_datatype=dict( - mandatory=True, - xor=['out_file'], - ), + annot_file=dict(argstr="--annot %s", extensions=None,), + args=dict(argstr="%s",), + dataarray_num=dict(argstr="--da_num %d",), + environ=dict(nohash=True, usedefault=True,), + functional_file=dict(argstr="-f %s", extensions=None,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + label_file=dict(argstr="--label %s", extensions=None,), + labelstats_outfile=dict(argstr="--labelstats %s", extensions=None,), + normal=dict(argstr="-n",), + origname=dict(argstr="-o %s",), + out_datatype=dict(mandatory=True, xor=["out_file"],), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, mandatory=True, position=-1, - xor=['out_datatype'], - ), - parcstats_file=dict( - argstr='--parcstats %s', - extensions=None, - ), - patch=dict(argstr='-p', ), - rescale=dict(argstr='-r', ), - scalarcurv_file=dict( - argstr='-c %s', - extensions=None, + xor=["out_datatype"], ), - scale=dict(argstr='-s %.3f', ), + parcstats_file=dict(argstr="--parcstats %s", extensions=None,), + patch=dict(argstr="-p",), + rescale=dict(argstr="-r",), + scalarcurv_file=dict(argstr="-c %s", extensions=None,), + scale=dict(argstr="-s %.3f",), subjects_dir=dict(), - talairachxfm_subjid=dict(argstr='-t %s', ), - to_scanner=dict(argstr='--to-scanner', ), - to_tkr=dict(argstr='--to-tkr', ), - vertex=dict(argstr='-v', ), - xyz_ascii=dict(argstr='-a', ), + talairachxfm_subjid=dict(argstr="-t %s",), + to_scanner=dict(argstr="--to-scanner",), + to_tkr=dict(argstr="--to-tkr",), + vertex=dict(argstr="-v",), + xyz_ascii=dict(argstr="-a",), ) inputs = MRIsConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRIsConvert_outputs(): - output_map = dict(converted=dict(extensions=None, ), ) + output_map = dict(converted=dict(extensions=None,),) outputs = MRIsConvert.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIsExpand.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIsExpand.py index 0b0297bcdd..b2d97f0d48 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIsExpand.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIsExpand.py @@ -4,55 +4,33 @@ def test_MRIsExpand_inputs(): input_map = dict( - args=dict(argstr='%s', ), - distance=dict( - argstr='%g', - mandatory=True, - position=-2, - ), - dt=dict(argstr='-T %g', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + distance=dict(argstr="%g", mandatory=True, position=-2,), + dt=dict(argstr="-T %g",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=-3, - ), - nsurfaces=dict(argstr='-N %d', ), - out_name=dict( - argstr='%s', - position=-1, - usedefault=True, - ), - pial=dict( - argstr='-pial %s', - copyfile=False, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-3, ), - smooth_averages=dict(argstr='-A %d', ), - sphere=dict( - copyfile=False, - usedefault=True, - ), - spring=dict(argstr='-S %g', ), + nsurfaces=dict(argstr="-N %d",), + out_name=dict(argstr="%s", position=-1, usedefault=True,), + pial=dict(argstr="-pial %s", copyfile=False,), + smooth_averages=dict(argstr="-A %d",), + sphere=dict(copyfile=False, usedefault=True,), + spring=dict(argstr="-S %g",), subjects_dir=dict(), - thickness=dict(argstr='-thickness', ), - thickness_name=dict( - argstr='-thickness_name %s', - copyfile=False, - ), - write_iterations=dict(argstr='-W %d', ), + thickness=dict(argstr="-thickness",), + thickness_name=dict(argstr="-thickness_name %s", copyfile=False,), + write_iterations=dict(argstr="-W %d",), ) inputs = MRIsExpand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRIsExpand_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MRIsExpand.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIsInflate.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIsInflate.py index 993d578485..aead890eff 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIsInflate.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIsInflate.py @@ -4,35 +4,22 @@ def test_MRIsInflate_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=True, - extensions=None, - mandatory=True, - position=-2, - ), - no_save_sulc=dict( - argstr='-no-save-sulc', - xor=['out_sulc'], + argstr="%s", copyfile=True, extensions=None, mandatory=True, position=-2, ), + no_save_sulc=dict(argstr="-no-save-sulc", xor=["out_sulc"],), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, hash_files=False, keep_extension=True, - name_source=['in_file'], - name_template='%s.inflated', + name_source=["in_file"], + name_template="%s.inflated", position=-1, ), - out_sulc=dict( - extensions=None, - xor=['no_save_sulc'], - ), + out_sulc=dict(extensions=None, xor=["no_save_sulc"],), subjects_dir=dict(), ) inputs = MRIsInflate.input_spec() @@ -40,11 +27,10 @@ def test_MRIsInflate_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRIsInflate_outputs(): - output_map = dict( - out_file=dict(extensions=None, ), - out_sulc=dict(extensions=None, ), - ) + output_map = dict(out_file=dict(extensions=None,), out_sulc=dict(extensions=None,),) outputs = MRIsInflate.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MS_LDA.py b/nipype/interfaces/freesurfer/tests/test_auto_MS_LDA.py index eb1fbe4043..47575cf851 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MS_LDA.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MS_LDA.py @@ -4,54 +4,29 @@ def test_MS_LDA_inputs(): input_map = dict( - args=dict(argstr='%s', ), - conform=dict(argstr='-conform', ), - environ=dict( - nohash=True, - usedefault=True, - ), - images=dict( - argstr='%s', - copyfile=False, - mandatory=True, - position=-1, - ), - label_file=dict( - argstr='-label %s', - extensions=None, - ), - lda_labels=dict( - argstr='-lda %s', - mandatory=True, - sep=' ', - ), - mask_file=dict( - argstr='-mask %s', - extensions=None, - ), - shift=dict(argstr='-shift %d', ), + args=dict(argstr="%s",), + conform=dict(argstr="-conform",), + environ=dict(nohash=True, usedefault=True,), + images=dict(argstr="%s", copyfile=False, mandatory=True, position=-1,), + label_file=dict(argstr="-label %s", extensions=None,), + lda_labels=dict(argstr="-lda %s", mandatory=True, sep=" ",), + mask_file=dict(argstr="-mask %s", extensions=None,), + shift=dict(argstr="-shift %d",), subjects_dir=dict(), - use_weights=dict(argstr='-W', ), - vol_synth_file=dict( - argstr='-synth %s', - extensions=None, - mandatory=True, - ), - weight_file=dict( - argstr='-weight %s', - extensions=None, - mandatory=True, - ), + use_weights=dict(argstr="-W",), + vol_synth_file=dict(argstr="-synth %s", extensions=None, mandatory=True,), + weight_file=dict(argstr="-weight %s", extensions=None, mandatory=True,), ) inputs = MS_LDA.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MS_LDA_outputs(): output_map = dict( - vol_synth_file=dict(extensions=None, ), - weight_file=dict(extensions=None, ), + vol_synth_file=dict(extensions=None,), weight_file=dict(extensions=None,), ) outputs = MS_LDA.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MakeAverageSubject.py b/nipype/interfaces/freesurfer/tests/test_auto_MakeAverageSubject.py index 48002e6051..3f7b6ac9ab 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MakeAverageSubject.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MakeAverageSubject.py @@ -4,30 +4,21 @@ def test_MakeAverageSubject_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - out_name=dict( - argstr='--out %s', - extensions=None, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + out_name=dict(argstr="--out %s", extensions=None, usedefault=True,), subjects_dir=dict(), - subjects_ids=dict( - argstr='--subjects %s', - mandatory=True, - sep=' ', - ), + subjects_ids=dict(argstr="--subjects %s", mandatory=True, sep=" ",), ) inputs = MakeAverageSubject.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MakeAverageSubject_outputs(): - output_map = dict(average_subject_name=dict(), ) + output_map = dict(average_subject_name=dict(),) outputs = MakeAverageSubject.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MakeSurfaces.py b/nipype/interfaces/freesurfer/tests/test_auto_MakeSurfaces.py index 9cf56f1222..219150aef9 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MakeSurfaces.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MakeSurfaces.py @@ -4,84 +4,45 @@ def test_MakeSurfaces_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), copy_inputs=dict(), - environ=dict( - nohash=True, - usedefault=True, - ), - fix_mtl=dict(argstr='-fix_mtl', ), - hemisphere=dict( - argstr='%s', - mandatory=True, - position=-1, - ), - in_T1=dict( - argstr='-T1 %s', - extensions=None, - ), - in_aseg=dict( - argstr='-aseg %s', - extensions=None, - ), - in_filled=dict( - extensions=None, - mandatory=True, - ), - in_label=dict( - extensions=None, - xor=['noaparc'], - ), - in_orig=dict( - argstr='-orig %s', - extensions=None, - mandatory=True, - ), - in_white=dict(extensions=None, ), - in_wm=dict( - extensions=None, - mandatory=True, - ), - longitudinal=dict(argstr='-long', ), - maximum=dict(argstr='-max %.1f', ), - mgz=dict(argstr='-mgz', ), - no_white=dict(argstr='-nowhite', ), - noaparc=dict( - argstr='-noaparc', - xor=['in_label'], - ), - orig_pial=dict( - argstr='-orig_pial %s', - extensions=None, - requires=['in_label'], - ), - orig_white=dict( - argstr='-orig_white %s', - extensions=None, - ), - subject_id=dict( - argstr='%s', - mandatory=True, - position=-2, - usedefault=True, - ), + environ=dict(nohash=True, usedefault=True,), + fix_mtl=dict(argstr="-fix_mtl",), + hemisphere=dict(argstr="%s", mandatory=True, position=-1,), + in_T1=dict(argstr="-T1 %s", extensions=None,), + in_aseg=dict(argstr="-aseg %s", extensions=None,), + in_filled=dict(extensions=None, mandatory=True,), + in_label=dict(extensions=None, xor=["noaparc"],), + in_orig=dict(argstr="-orig %s", extensions=None, mandatory=True,), + in_white=dict(extensions=None,), + in_wm=dict(extensions=None, mandatory=True,), + longitudinal=dict(argstr="-long",), + maximum=dict(argstr="-max %.1f",), + mgz=dict(argstr="-mgz",), + no_white=dict(argstr="-nowhite",), + noaparc=dict(argstr="-noaparc", xor=["in_label"],), + orig_pial=dict(argstr="-orig_pial %s", extensions=None, requires=["in_label"],), + orig_white=dict(argstr="-orig_white %s", extensions=None,), + subject_id=dict(argstr="%s", mandatory=True, position=-2, usedefault=True,), subjects_dir=dict(), - white=dict(argstr='-white %s', ), - white_only=dict(argstr='-whiteonly', ), + white=dict(argstr="-white %s",), + white_only=dict(argstr="-whiteonly",), ) inputs = MakeSurfaces.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MakeSurfaces_outputs(): output_map = dict( - out_area=dict(extensions=None, ), - out_cortex=dict(extensions=None, ), - out_curv=dict(extensions=None, ), - out_pial=dict(extensions=None, ), - out_thickness=dict(extensions=None, ), - out_white=dict(extensions=None, ), + out_area=dict(extensions=None,), + out_cortex=dict(extensions=None,), + out_curv=dict(extensions=None,), + out_pial=dict(extensions=None,), + out_thickness=dict(extensions=None,), + out_white=dict(extensions=None,), ) outputs = MakeSurfaces.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Normalize.py b/nipype/interfaces/freesurfer/tests/test_auto_Normalize.py index 905a8929cd..f639141960 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Normalize.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Normalize.py @@ -4,45 +4,33 @@ def test_Normalize_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - gradient=dict(argstr='-g %d', ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - mask=dict( - argstr='-mask %s', - extensions=None, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + gradient=dict(argstr="-g %d",), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + mask=dict(argstr="-mask %s", extensions=None,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, hash_files=False, keep_extension=True, - name_source=['in_file'], - name_template='%s_norm', + name_source=["in_file"], + name_template="%s_norm", position=-1, ), - segmentation=dict( - argstr='-aseg %s', - extensions=None, - ), + segmentation=dict(argstr="-aseg %s", extensions=None,), subjects_dir=dict(), - transform=dict(extensions=None, ), + transform=dict(extensions=None,), ) inputs = Normalize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Normalize_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Normalize.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_OneSampleTTest.py b/nipype/interfaces/freesurfer/tests/test_auto_OneSampleTTest.py index 712b4cfaac..da476e1cb3 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_OneSampleTTest.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_OneSampleTTest.py @@ -4,128 +4,72 @@ def test_OneSampleTTest_inputs(): input_map = dict( - allow_ill_cond=dict(argstr='--illcond', ), - allow_repeated_subjects=dict(argstr='--allowsubjrep', ), - args=dict(argstr='%s', ), - calc_AR1=dict(argstr='--tar1', ), - check_opts=dict(argstr='--checkopts', ), - compute_log_y=dict(argstr='--logy', ), - contrast=dict(argstr='--C %s...', ), - cortex=dict( - argstr='--cortex', - xor=['label_file'], - ), - debug=dict(argstr='--debug', ), + allow_ill_cond=dict(argstr="--illcond",), + allow_repeated_subjects=dict(argstr="--allowsubjrep",), + args=dict(argstr="%s",), + calc_AR1=dict(argstr="--tar1",), + check_opts=dict(argstr="--checkopts",), + compute_log_y=dict(argstr="--logy",), + contrast=dict(argstr="--C %s...",), + cortex=dict(argstr="--cortex", xor=["label_file"],), + debug=dict(argstr="--debug",), design=dict( - argstr='--X %s', - extensions=None, - xor=('fsgd', 'design', 'one_sample'), - ), - diag=dict(argstr='--diag %d', ), - diag_cluster=dict(argstr='--diag-cluster', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixed_fx_dof=dict( - argstr='--ffxdof %d', - xor=['fixed_fx_dof_file'], + argstr="--X %s", extensions=None, xor=("fsgd", "design", "one_sample"), ), + diag=dict(argstr="--diag %d",), + diag_cluster=dict(argstr="--diag-cluster",), + environ=dict(nohash=True, usedefault=True,), + fixed_fx_dof=dict(argstr="--ffxdof %d", xor=["fixed_fx_dof_file"],), fixed_fx_dof_file=dict( - argstr='--ffxdofdat %d', - extensions=None, - xor=['fixed_fx_dof'], - ), - fixed_fx_var=dict( - argstr='--yffxvar %s', - extensions=None, - ), - force_perm=dict(argstr='--perm-force', ), - fsgd=dict( - argstr='--fsgd %s %s', - xor=('fsgd', 'design', 'one_sample'), - ), - fwhm=dict(argstr='--fwhm %f', ), - glm_dir=dict( - argstr='--glmdir %s', - genfile=True, + argstr="--ffxdofdat %d", extensions=None, xor=["fixed_fx_dof"], ), + fixed_fx_var=dict(argstr="--yffxvar %s", extensions=None,), + force_perm=dict(argstr="--perm-force",), + fsgd=dict(argstr="--fsgd %s %s", xor=("fsgd", "design", "one_sample"),), + fwhm=dict(argstr="--fwhm %f",), + glm_dir=dict(argstr="--glmdir %s", genfile=True,), hemi=dict(), - in_file=dict( - argstr='--y %s', - copyfile=False, - extensions=None, - mandatory=True, - ), - invert_mask=dict(argstr='--mask-inv', ), - label_file=dict( - argstr='--label %s', - extensions=None, - xor=['cortex'], - ), - mask_file=dict( - argstr='--mask %s', - extensions=None, - ), - no_contrast_ok=dict(argstr='--no-contrasts-ok', ), - no_est_fwhm=dict(argstr='--no-est-fwhm', ), - no_mask_smooth=dict(argstr='--no-mask-smooth', ), - no_prune=dict( - argstr='--no-prune', - xor=['prunethresh'], - ), + in_file=dict(argstr="--y %s", copyfile=False, extensions=None, mandatory=True,), + invert_mask=dict(argstr="--mask-inv",), + label_file=dict(argstr="--label %s", extensions=None, xor=["cortex"],), + mask_file=dict(argstr="--mask %s", extensions=None,), + no_contrast_ok=dict(argstr="--no-contrasts-ok",), + no_est_fwhm=dict(argstr="--no-est-fwhm",), + no_mask_smooth=dict(argstr="--no-mask-smooth",), + no_prune=dict(argstr="--no-prune", xor=["prunethresh"],), one_sample=dict( - argstr='--osgm', - xor=('one_sample', 'fsgd', 'design', 'contrast'), - ), - pca=dict(argstr='--pca', ), - per_voxel_reg=dict(argstr='--pvr %s...', ), - profile=dict(argstr='--profile %d', ), - prune=dict(argstr='--prune', ), - prune_thresh=dict( - argstr='--prune_thr %f', - xor=['noprune'], - ), - resynth_test=dict(argstr='--resynthtest %d', ), - save_cond=dict(argstr='--save-cond', ), - save_estimate=dict(argstr='--yhat-save', ), - save_res_corr_mtx=dict(argstr='--eres-scm', ), - save_residual=dict(argstr='--eres-save', ), - seed=dict(argstr='--seed %d', ), - self_reg=dict(argstr='--selfreg %d %d %d', ), - sim_done_file=dict( - argstr='--sim-done %s', - extensions=None, - ), - sim_sign=dict(argstr='--sim-sign %s', ), - simulation=dict(argstr='--sim %s %d %f %s', ), + argstr="--osgm", xor=("one_sample", "fsgd", "design", "contrast"), + ), + pca=dict(argstr="--pca",), + per_voxel_reg=dict(argstr="--pvr %s...",), + profile=dict(argstr="--profile %d",), + prune=dict(argstr="--prune",), + prune_thresh=dict(argstr="--prune_thr %f", xor=["noprune"],), + resynth_test=dict(argstr="--resynthtest %d",), + save_cond=dict(argstr="--save-cond",), + save_estimate=dict(argstr="--yhat-save",), + save_res_corr_mtx=dict(argstr="--eres-scm",), + save_residual=dict(argstr="--eres-save",), + seed=dict(argstr="--seed %d",), + self_reg=dict(argstr="--selfreg %d %d %d",), + sim_done_file=dict(argstr="--sim-done %s", extensions=None,), + sim_sign=dict(argstr="--sim-sign %s",), + simulation=dict(argstr="--sim %s %d %f %s",), subject_id=dict(), subjects_dir=dict(), - surf=dict( - argstr='--surf %s %s %s', - requires=['subject_id', 'hemi'], - ), - surf_geo=dict(usedefault=True, ), - synth=dict(argstr='--synth', ), - uniform=dict(argstr='--uniform %f %f', ), - var_fwhm=dict(argstr='--var-fwhm %f', ), - vox_dump=dict(argstr='--voxdump %d %d %d', ), - weight_file=dict( - extensions=None, - xor=['weighted_ls'], - ), - weight_inv=dict( - argstr='--w-inv', - xor=['weighted_ls'], - ), - weight_sqrt=dict( - argstr='--w-sqrt', - xor=['weighted_ls'], - ), + surf=dict(argstr="--surf %s %s %s", requires=["subject_id", "hemi"],), + surf_geo=dict(usedefault=True,), + synth=dict(argstr="--synth",), + uniform=dict(argstr="--uniform %f %f",), + var_fwhm=dict(argstr="--var-fwhm %f",), + vox_dump=dict(argstr="--voxdump %d %d %d",), + weight_file=dict(extensions=None, xor=["weighted_ls"],), + weight_inv=dict(argstr="--w-inv", xor=["weighted_ls"],), + weight_sqrt=dict(argstr="--w-sqrt", xor=["weighted_ls"],), weighted_ls=dict( - argstr='--wls %s', + argstr="--wls %s", extensions=None, - xor=('weight_file', 'weight_inv', 'weight_sqrt'), + xor=("weight_file", "weight_inv", "weight_sqrt"), ), ) inputs = OneSampleTTest.input_spec() @@ -133,25 +77,27 @@ def test_OneSampleTTest_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_OneSampleTTest_outputs(): output_map = dict( - beta_file=dict(extensions=None, ), - dof_file=dict(extensions=None, ), - error_file=dict(extensions=None, ), - error_stddev_file=dict(extensions=None, ), - error_var_file=dict(extensions=None, ), - estimate_file=dict(extensions=None, ), - frame_eigenvectors=dict(extensions=None, ), + beta_file=dict(extensions=None,), + dof_file=dict(extensions=None,), + error_file=dict(extensions=None,), + error_stddev_file=dict(extensions=None,), + error_var_file=dict(extensions=None,), + estimate_file=dict(extensions=None,), + frame_eigenvectors=dict(extensions=None,), ftest_file=dict(), - fwhm_file=dict(extensions=None, ), + fwhm_file=dict(extensions=None,), gamma_file=dict(), gamma_var_file=dict(), glm_dir=dict(), - mask_file=dict(extensions=None, ), + mask_file=dict(extensions=None,), sig_file=dict(), - singular_values=dict(extensions=None, ), - spatial_eigenvectors=dict(extensions=None, ), - svd_stats_file=dict(extensions=None, ), + singular_values=dict(extensions=None,), + spatial_eigenvectors=dict(extensions=None,), + svd_stats_file=dict(extensions=None,), ) outputs = OneSampleTTest.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Paint.py b/nipype/interfaces/freesurfer/tests/test_auto_Paint.py index 33c76bbe12..0a93abec78 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Paint.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Paint.py @@ -4,34 +4,21 @@ def test_Paint_inputs(): input_map = dict( - args=dict(argstr='%s', ), - averages=dict(argstr='-a %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_surf=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), + args=dict(argstr="%s",), + averages=dict(argstr="-a %d",), + environ=dict(nohash=True, usedefault=True,), + in_surf=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, hash_files=False, keep_extension=False, - name_source=['in_surf'], - name_template='%s.avg_curv', + name_source=["in_surf"], + name_template="%s.avg_curv", position=-1, ), subjects_dir=dict(), - template=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), + template=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), template_param=dict(), ) inputs = Paint.input_spec() @@ -39,8 +26,10 @@ def test_Paint_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Paint_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Paint.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_ParcellationStats.py b/nipype/interfaces/freesurfer/tests/test_auto_ParcellationStats.py index 85a8747bd0..cf42abe8b6 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_ParcellationStats.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_ParcellationStats.py @@ -4,111 +4,49 @@ def test_ParcellationStats_inputs(): input_map = dict( - args=dict(argstr='%s', ), - aseg=dict( - extensions=None, - mandatory=True, - ), - brainmask=dict( - extensions=None, - mandatory=True, - ), + args=dict(argstr="%s",), + aseg=dict(extensions=None, mandatory=True,), + brainmask=dict(extensions=None, mandatory=True,), copy_inputs=dict(), - cortex_label=dict(extensions=None, ), - environ=dict( - nohash=True, - usedefault=True, - ), - hemisphere=dict( - argstr='%s', - mandatory=True, - position=-2, - ), - in_annotation=dict( - argstr='-a %s', - extensions=None, - xor=['in_label'], - ), - in_cortex=dict( - argstr='-cortex %s', - extensions=None, - ), + cortex_label=dict(extensions=None,), + environ=dict(nohash=True, usedefault=True,), + hemisphere=dict(argstr="%s", mandatory=True, position=-2,), + in_annotation=dict(argstr="-a %s", extensions=None, xor=["in_label"],), + in_cortex=dict(argstr="-cortex %s", extensions=None,), in_label=dict( - argstr='-l %s', - extensions=None, - xor=['in_annotatoin', 'out_color'], - ), - lh_pial=dict( - extensions=None, - mandatory=True, - ), - lh_white=dict( - extensions=None, - mandatory=True, + argstr="-l %s", extensions=None, xor=["in_annotatoin", "out_color"], ), - mgz=dict(argstr='-mgz', ), + lh_pial=dict(extensions=None, mandatory=True,), + lh_white=dict(extensions=None, mandatory=True,), + mgz=dict(argstr="-mgz",), out_color=dict( - argstr='-c %s', - extensions=None, - genfile=True, - xor=['in_label'], + argstr="-c %s", extensions=None, genfile=True, xor=["in_label"], ), out_table=dict( - argstr='-f %s', - extensions=None, - genfile=True, - requires=['tabular_output'], - ), - rh_pial=dict( - extensions=None, - mandatory=True, - ), - rh_white=dict( - extensions=None, - mandatory=True, - ), - ribbon=dict( - extensions=None, - mandatory=True, - ), - subject_id=dict( - argstr='%s', - mandatory=True, - position=-3, - usedefault=True, + argstr="-f %s", extensions=None, genfile=True, requires=["tabular_output"], ), + rh_pial=dict(extensions=None, mandatory=True,), + rh_white=dict(extensions=None, mandatory=True,), + ribbon=dict(extensions=None, mandatory=True,), + subject_id=dict(argstr="%s", mandatory=True, position=-3, usedefault=True,), subjects_dir=dict(), - surface=dict( - argstr='%s', - position=-1, - ), - tabular_output=dict(argstr='-b', ), - th3=dict( - argstr='-th3', - requires=['cortex_label'], - ), - thickness=dict( - extensions=None, - mandatory=True, - ), - transform=dict( - extensions=None, - mandatory=True, - ), - wm=dict( - extensions=None, - mandatory=True, - ), + surface=dict(argstr="%s", position=-1,), + tabular_output=dict(argstr="-b",), + th3=dict(argstr="-th3", requires=["cortex_label"],), + thickness=dict(extensions=None, mandatory=True,), + transform=dict(extensions=None, mandatory=True,), + wm=dict(extensions=None, mandatory=True,), ) inputs = ParcellationStats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ParcellationStats_outputs(): output_map = dict( - out_color=dict(extensions=None, ), - out_table=dict(extensions=None, ), + out_color=dict(extensions=None,), out_table=dict(extensions=None,), ) outputs = ParcellationStats.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_ParseDICOMDir.py b/nipype/interfaces/freesurfer/tests/test_auto_ParseDICOMDir.py index 7fed3ad587..243e769266 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_ParseDICOMDir.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_ParseDICOMDir.py @@ -4,31 +4,23 @@ def test_ParseDICOMDir_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dicom_dir=dict( - argstr='--d %s', - mandatory=True, - ), - dicom_info_file=dict( - argstr='--o %s', - extensions=None, - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - sortbyrun=dict(argstr='--sortbyrun', ), + args=dict(argstr="%s",), + dicom_dir=dict(argstr="--d %s", mandatory=True,), + dicom_info_file=dict(argstr="--o %s", extensions=None, usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + sortbyrun=dict(argstr="--sortbyrun",), subjects_dir=dict(), - summarize=dict(argstr='--summarize', ), + summarize=dict(argstr="--summarize",), ) inputs = ParseDICOMDir.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ParseDICOMDir_outputs(): - output_map = dict(dicom_info_file=dict(extensions=None, ), ) + output_map = dict(dicom_info_file=dict(extensions=None,),) outputs = ParseDICOMDir.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_ReconAll.py b/nipype/interfaces/freesurfer/tests/test_auto_ReconAll.py index 2d4365e51e..29a9f0006b 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_ReconAll.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_ReconAll.py @@ -4,221 +4,107 @@ def test_ReconAll_inputs(): input_map = dict( - FLAIR_file=dict( - argstr='-FLAIR %s', - extensions=None, - min_ver='5.3.0', - ), - T1_files=dict(argstr='-i %s...', ), - T2_file=dict( - argstr='-T2 %s', - extensions=None, - min_ver='5.3.0', - ), - args=dict(argstr='%s', ), - big_ventricles=dict(argstr='-bigventricles', ), - brainstem=dict(argstr='-brainstem-structures', ), - directive=dict( - argstr='-%s', - position=0, - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - expert=dict( - argstr='-expert %s', - extensions=None, - ), - flags=dict(argstr='%s', ), - hemi=dict(argstr='-hemi %s', ), + FLAIR_file=dict(argstr="-FLAIR %s", extensions=None, min_ver="5.3.0",), + T1_files=dict(argstr="-i %s...",), + T2_file=dict(argstr="-T2 %s", extensions=None, min_ver="5.3.0",), + args=dict(argstr="%s",), + big_ventricles=dict(argstr="-bigventricles",), + brainstem=dict(argstr="-brainstem-structures",), + directive=dict(argstr="-%s", position=0, usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + expert=dict(argstr="-expert %s", extensions=None,), + flags=dict(argstr="%s",), + hemi=dict(argstr="-hemi %s",), hippocampal_subfields_T1=dict( - argstr='-hippocampal-subfields-T1', - min_ver='6.0.0', + argstr="-hippocampal-subfields-T1", min_ver="6.0.0", ), hippocampal_subfields_T2=dict( - argstr='-hippocampal-subfields-T2 %s %s', - min_ver='6.0.0', - ), - hires=dict( - argstr='-hires', - min_ver='6.0.0', - ), - mprage=dict(argstr='-mprage', ), - mri_aparc2aseg=dict(xor=['expert'], ), - mri_ca_label=dict(xor=['expert'], ), - mri_ca_normalize=dict(xor=['expert'], ), - mri_ca_register=dict(xor=['expert'], ), - mri_edit_wm_with_aseg=dict(xor=['expert'], ), - mri_em_register=dict(xor=['expert'], ), - mri_fill=dict(xor=['expert'], ), - mri_mask=dict(xor=['expert'], ), - mri_normalize=dict(xor=['expert'], ), - mri_pretess=dict(xor=['expert'], ), - mri_remove_neck=dict(xor=['expert'], ), - mri_segment=dict(xor=['expert'], ), - mri_segstats=dict(xor=['expert'], ), - mri_tessellate=dict(xor=['expert'], ), - mri_watershed=dict(xor=['expert'], ), - mris_anatomical_stats=dict(xor=['expert'], ), - mris_ca_label=dict(xor=['expert'], ), - mris_fix_topology=dict(xor=['expert'], ), - mris_inflate=dict(xor=['expert'], ), - mris_make_surfaces=dict(xor=['expert'], ), - mris_register=dict(xor=['expert'], ), - mris_smooth=dict(xor=['expert'], ), - mris_sphere=dict(xor=['expert'], ), - mris_surf2vol=dict(xor=['expert'], ), - mrisp_paint=dict(xor=['expert'], ), - openmp=dict(argstr='-openmp %d', ), - parallel=dict(argstr='-parallel', ), - subject_id=dict( - argstr='-subjid %s', - usedefault=True, - ), - subjects_dir=dict( - argstr='-sd %s', - genfile=True, - hash_files=False, - ), - talairach=dict(xor=['expert'], ), - use_FLAIR=dict( - argstr='-FLAIRpial', - min_ver='5.3.0', - xor=['use_T2'], - ), - use_T2=dict( - argstr='-T2pial', - min_ver='5.3.0', - xor=['use_FLAIR'], - ), - xopts=dict(argstr='-xopts-%s', ), + argstr="-hippocampal-subfields-T2 %s %s", min_ver="6.0.0", + ), + hires=dict(argstr="-hires", min_ver="6.0.0",), + mprage=dict(argstr="-mprage",), + mri_aparc2aseg=dict(xor=["expert"],), + mri_ca_label=dict(xor=["expert"],), + mri_ca_normalize=dict(xor=["expert"],), + mri_ca_register=dict(xor=["expert"],), + mri_edit_wm_with_aseg=dict(xor=["expert"],), + mri_em_register=dict(xor=["expert"],), + mri_fill=dict(xor=["expert"],), + mri_mask=dict(xor=["expert"],), + mri_normalize=dict(xor=["expert"],), + mri_pretess=dict(xor=["expert"],), + mri_remove_neck=dict(xor=["expert"],), + mri_segment=dict(xor=["expert"],), + mri_segstats=dict(xor=["expert"],), + mri_tessellate=dict(xor=["expert"],), + mri_watershed=dict(xor=["expert"],), + mris_anatomical_stats=dict(xor=["expert"],), + mris_ca_label=dict(xor=["expert"],), + mris_fix_topology=dict(xor=["expert"],), + mris_inflate=dict(xor=["expert"],), + mris_make_surfaces=dict(xor=["expert"],), + mris_register=dict(xor=["expert"],), + mris_smooth=dict(xor=["expert"],), + mris_sphere=dict(xor=["expert"],), + mris_surf2vol=dict(xor=["expert"],), + mrisp_paint=dict(xor=["expert"],), + openmp=dict(argstr="-openmp %d",), + parallel=dict(argstr="-parallel",), + subject_id=dict(argstr="-subjid %s", usedefault=True,), + subjects_dir=dict(argstr="-sd %s", genfile=True, hash_files=False,), + talairach=dict(xor=["expert"],), + use_FLAIR=dict(argstr="-FLAIRpial", min_ver="5.3.0", xor=["use_T2"],), + use_T2=dict(argstr="-T2pial", min_ver="5.3.0", xor=["use_FLAIR"],), + xopts=dict(argstr="-xopts-%s",), ) inputs = ReconAll.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ReconAll_outputs(): output_map = dict( - BA_stats=dict( - altkey='BA', - loc='stats', - ), - T1=dict( - extensions=None, - loc='mri', - ), - annot=dict( - altkey='*annot', - loc='label', - ), - aparc_a2009s_stats=dict( - altkey='aparc.a2009s', - loc='stats', - ), - aparc_aseg=dict( - altkey='aparc*aseg', - loc='mri', - ), - aparc_stats=dict( - altkey='aparc', - loc='stats', - ), - area_pial=dict( - altkey='area.pial', - loc='surf', - ), - aseg=dict( - extensions=None, - loc='mri', - ), - aseg_stats=dict( - altkey='aseg', - loc='stats', - ), - avg_curv=dict(loc='surf', ), - brain=dict( - extensions=None, - loc='mri', - ), - brainmask=dict( - extensions=None, - loc='mri', - ), - curv=dict(loc='surf', ), - curv_pial=dict( - altkey='curv.pial', - loc='surf', - ), - curv_stats=dict( - altkey='curv', - loc='stats', - ), - entorhinal_exvivo_stats=dict( - altkey='entorhinal_exvivo', - loc='stats', - ), - filled=dict( - extensions=None, - loc='mri', - ), - graymid=dict( - altkey=['graymid', 'midthickness'], - loc='surf', - ), - inflated=dict(loc='surf', ), - jacobian_white=dict(loc='surf', ), - label=dict( - altkey='*label', - loc='label', - ), - norm=dict( - extensions=None, - loc='mri', - ), - nu=dict( - extensions=None, - loc='mri', - ), - orig=dict( - extensions=None, - loc='mri', - ), - pial=dict(loc='surf', ), - rawavg=dict( - extensions=None, - loc='mri', - ), - ribbon=dict( - altkey='*ribbon', - loc='mri', - ), - smoothwm=dict(loc='surf', ), - sphere=dict(loc='surf', ), - sphere_reg=dict( - altkey='sphere.reg', - loc='surf', - ), + BA_stats=dict(altkey="BA", loc="stats",), + T1=dict(extensions=None, loc="mri",), + annot=dict(altkey="*annot", loc="label",), + aparc_a2009s_stats=dict(altkey="aparc.a2009s", loc="stats",), + aparc_aseg=dict(altkey="aparc*aseg", loc="mri",), + aparc_stats=dict(altkey="aparc", loc="stats",), + area_pial=dict(altkey="area.pial", loc="surf",), + aseg=dict(extensions=None, loc="mri",), + aseg_stats=dict(altkey="aseg", loc="stats",), + avg_curv=dict(loc="surf",), + brain=dict(extensions=None, loc="mri",), + brainmask=dict(extensions=None, loc="mri",), + curv=dict(loc="surf",), + curv_pial=dict(altkey="curv.pial", loc="surf",), + curv_stats=dict(altkey="curv", loc="stats",), + entorhinal_exvivo_stats=dict(altkey="entorhinal_exvivo", loc="stats",), + filled=dict(extensions=None, loc="mri",), + graymid=dict(altkey=["graymid", "midthickness"], loc="surf",), + inflated=dict(loc="surf",), + jacobian_white=dict(loc="surf",), + label=dict(altkey="*label", loc="label",), + norm=dict(extensions=None, loc="mri",), + nu=dict(extensions=None, loc="mri",), + orig=dict(extensions=None, loc="mri",), + pial=dict(loc="surf",), + rawavg=dict(extensions=None, loc="mri",), + ribbon=dict(altkey="*ribbon", loc="mri",), + smoothwm=dict(loc="surf",), + sphere=dict(loc="surf",), + sphere_reg=dict(altkey="sphere.reg", loc="surf",), subject_id=dict(), subjects_dir=dict(), - sulc=dict(loc='surf', ), - thickness=dict(loc='surf', ), - volume=dict(loc='surf', ), - white=dict(loc='surf', ), - wm=dict( - extensions=None, - loc='mri', - ), - wmparc=dict( - extensions=None, - loc='mri', - ), - wmparc_stats=dict( - altkey='wmparc', - loc='stats', - ), + sulc=dict(loc="surf",), + thickness=dict(loc="surf",), + volume=dict(loc="surf",), + white=dict(loc="surf",), + wm=dict(extensions=None, loc="mri",), + wmparc=dict(extensions=None, loc="mri",), + wmparc_stats=dict(altkey="wmparc", loc="stats",), ) outputs = ReconAll.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Register.py b/nipype/interfaces/freesurfer/tests/test_auto_Register.py index 2d940d131d..d2add3db5b 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Register.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Register.py @@ -4,52 +4,27 @@ def test_Register_inputs(): input_map = dict( - args=dict(argstr='%s', ), - curv=dict( - argstr='-curv', - requires=['in_smoothwm'], - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_smoothwm=dict( - copyfile=True, - extensions=None, - ), - in_sulc=dict( - copyfile=True, - extensions=None, - mandatory=True, - ), + args=dict(argstr="%s",), + curv=dict(argstr="-curv", requires=["in_smoothwm"],), + environ=dict(nohash=True, usedefault=True,), + in_smoothwm=dict(copyfile=True, extensions=None,), + in_sulc=dict(copyfile=True, extensions=None, mandatory=True,), in_surf=dict( - argstr='%s', - copyfile=True, - extensions=None, - mandatory=True, - position=-3, - ), - out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, + argstr="%s", copyfile=True, extensions=None, mandatory=True, position=-3, ), + out_file=dict(argstr="%s", extensions=None, genfile=True, position=-1,), subjects_dir=dict(), - target=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), + target=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), ) inputs = Register.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Register_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Register.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_RegisterAVItoTalairach.py b/nipype/interfaces/freesurfer/tests/test_auto_RegisterAVItoTalairach.py index 405a9e5c4f..39a7e754bb 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_RegisterAVItoTalairach.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_RegisterAVItoTalairach.py @@ -4,49 +4,25 @@ def test_RegisterAVItoTalairach_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=0, - ), - out_file=dict( - argstr='%s', - extensions=None, - position=3, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=0,), + out_file=dict(argstr="%s", extensions=None, position=3, usedefault=True,), subjects_dir=dict(), - target=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), - vox2vox=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), + target=dict(argstr="%s", extensions=None, mandatory=True, position=1,), + vox2vox=dict(argstr="%s", extensions=None, mandatory=True, position=2,), ) inputs = RegisterAVItoTalairach.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RegisterAVItoTalairach_outputs(): output_map = dict( - log_file=dict( - extensions=None, - usedefault=True, - ), - out_file=dict(extensions=None, ), + log_file=dict(extensions=None, usedefault=True,), + out_file=dict(extensions=None,), ) outputs = RegisterAVItoTalairach.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_RelabelHypointensities.py b/nipype/interfaces/freesurfer/tests/test_auto_RelabelHypointensities.py index 6258e7b0b7..fd459f14a7 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_RelabelHypointensities.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_RelabelHypointensities.py @@ -4,53 +4,32 @@ def test_RelabelHypointensities_inputs(): input_map = dict( - args=dict(argstr='%s', ), - aseg=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - lh_white=dict( - copyfile=True, - extensions=None, - mandatory=True, - ), + args=dict(argstr="%s",), + aseg=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + environ=dict(nohash=True, usedefault=True,), + lh_white=dict(copyfile=True, extensions=None, mandatory=True,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, hash_files=False, keep_extension=False, - name_source=['aseg'], - name_template='%s.hypos.mgz', + name_source=["aseg"], + name_template="%s.hypos.mgz", position=-1, ), - rh_white=dict( - copyfile=True, - extensions=None, - mandatory=True, - ), + rh_white=dict(copyfile=True, extensions=None, mandatory=True,), subjects_dir=dict(), - surf_directory=dict( - argstr='%s', - position=-2, - usedefault=True, - ), + surf_directory=dict(argstr="%s", position=-2, usedefault=True,), ) inputs = RelabelHypointensities.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RelabelHypointensities_outputs(): - output_map = dict(out_file=dict( - argstr='%s', - extensions=None, - ), ) + output_map = dict(out_file=dict(argstr="%s", extensions=None,),) outputs = RelabelHypointensities.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_RemoveIntersection.py b/nipype/interfaces/freesurfer/tests/test_auto_RemoveIntersection.py index 2d4a67045b..d94124a82b 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_RemoveIntersection.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_RemoveIntersection.py @@ -4,25 +4,18 @@ def test_RemoveIntersection_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=True, - extensions=None, - mandatory=True, - position=-2, + argstr="%s", copyfile=True, extensions=None, mandatory=True, position=-2, ), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, hash_files=False, keep_extension=True, - name_source=['in_file'], - name_template='%s', + name_source=["in_file"], + name_template="%s", position=-1, ), subjects_dir=dict(), @@ -32,8 +25,10 @@ def test_RemoveIntersection_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RemoveIntersection_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = RemoveIntersection.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_RemoveNeck.py b/nipype/interfaces/freesurfer/tests/test_auto_RemoveNeck.py index 18dc6f9311..4050db776b 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_RemoveNeck.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_RemoveNeck.py @@ -4,48 +4,32 @@ def test_RemoveNeck_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-4, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-4,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, hash_files=False, keep_extension=True, - name_source=['in_file'], - name_template='%s_noneck', + name_source=["in_file"], + name_template="%s_noneck", position=-1, ), - radius=dict(argstr='-radius %d', ), + radius=dict(argstr="-radius %d",), subjects_dir=dict(), - template=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - transform=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), + template=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + transform=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), ) inputs = RemoveNeck.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RemoveNeck_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = RemoveNeck.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Resample.py b/nipype/interfaces/freesurfer/tests/test_auto_Resample.py index 7e771e10a0..0dccad303c 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Resample.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Resample.py @@ -4,36 +4,24 @@ def test_Resample_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - position=-2, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-i %s", extensions=None, mandatory=True, position=-2,), resampled_file=dict( - argstr='-o %s', - extensions=None, - genfile=True, - position=-1, + argstr="-o %s", extensions=None, genfile=True, position=-1, ), subjects_dir=dict(), - voxel_size=dict( - argstr='-vs %.2f %.2f %.2f', - mandatory=True, - ), + voxel_size=dict(argstr="-vs %.2f %.2f %.2f", mandatory=True,), ) inputs = Resample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Resample_outputs(): - output_map = dict(resampled_file=dict(extensions=None, ), ) + output_map = dict(resampled_file=dict(extensions=None,),) outputs = Resample.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_RobustRegister.py b/nipype/interfaces/freesurfer/tests/test_auto_RobustRegister.py index 98e7de44c0..7dde230eb7 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_RobustRegister.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_RobustRegister.py @@ -4,85 +4,56 @@ def test_RobustRegister_inputs(): input_map = dict( - args=dict(argstr='%s', ), - auto_sens=dict( - argstr='--satit', - mandatory=True, - xor=['outlier_sens'], - ), - environ=dict( - nohash=True, - usedefault=True, - ), - est_int_scale=dict(argstr='--iscale', ), - force_double=dict(argstr='--doubleprec', ), - force_float=dict(argstr='--floattype', ), - half_source=dict(argstr='--halfmov %s', ), - half_source_xfm=dict(argstr='--halfmovlta %s', ), - half_targ=dict(argstr='--halfdst %s', ), - half_targ_xfm=dict(argstr='--halfdstlta %s', ), - half_weights=dict(argstr='--halfweights %s', ), - high_iterations=dict(argstr='--highit %d', ), - in_xfm_file=dict( - argstr='--transform', - extensions=None, - ), - init_orient=dict(argstr='--initorient', ), - iteration_thresh=dict(argstr='--epsit %.3f', ), - least_squares=dict(argstr='--leastsquares', ), - mask_source=dict( - argstr='--maskmov %s', - extensions=None, - ), - mask_target=dict( - argstr='--maskdst %s', - extensions=None, - ), - max_iterations=dict(argstr='--maxit %d', ), - no_init=dict(argstr='--noinit', ), - no_multi=dict(argstr='--nomulti', ), - out_reg_file=dict( - argstr='--lta %s', - usedefault=True, - ), - outlier_limit=dict(argstr='--wlimit %.3f', ), - outlier_sens=dict( - argstr='--sat %.4f', - mandatory=True, - xor=['auto_sens'], - ), - registered_file=dict(argstr='--warp %s', ), - source_file=dict( - argstr='--mov %s', - extensions=None, - mandatory=True, - ), + args=dict(argstr="%s",), + auto_sens=dict(argstr="--satit", mandatory=True, xor=["outlier_sens"],), + environ=dict(nohash=True, usedefault=True,), + est_int_scale=dict(argstr="--iscale",), + force_double=dict(argstr="--doubleprec",), + force_float=dict(argstr="--floattype",), + half_source=dict(argstr="--halfmov %s",), + half_source_xfm=dict(argstr="--halfmovlta %s",), + half_targ=dict(argstr="--halfdst %s",), + half_targ_xfm=dict(argstr="--halfdstlta %s",), + half_weights=dict(argstr="--halfweights %s",), + high_iterations=dict(argstr="--highit %d",), + in_xfm_file=dict(argstr="--transform", extensions=None,), + init_orient=dict(argstr="--initorient",), + iteration_thresh=dict(argstr="--epsit %.3f",), + least_squares=dict(argstr="--leastsquares",), + mask_source=dict(argstr="--maskmov %s", extensions=None,), + mask_target=dict(argstr="--maskdst %s", extensions=None,), + max_iterations=dict(argstr="--maxit %d",), + no_init=dict(argstr="--noinit",), + no_multi=dict(argstr="--nomulti",), + out_reg_file=dict(argstr="--lta %s", usedefault=True,), + outlier_limit=dict(argstr="--wlimit %.3f",), + outlier_sens=dict(argstr="--sat %.4f", mandatory=True, xor=["auto_sens"],), + registered_file=dict(argstr="--warp %s",), + source_file=dict(argstr="--mov %s", extensions=None, mandatory=True,), subjects_dir=dict(), - subsample_thresh=dict(argstr='--subsample %d', ), - target_file=dict( - argstr='--dst %s', - extensions=None, - mandatory=True, - ), - trans_only=dict(argstr='--transonly', ), - weights_file=dict(argstr='--weights %s', ), - write_vo2vox=dict(argstr='--vox2vox', ), + subsample_thresh=dict(argstr="--subsample %d",), + target_file=dict(argstr="--dst %s", extensions=None, mandatory=True,), + trans_only=dict(argstr="--transonly",), + weights_file=dict(argstr="--weights %s",), + write_vo2vox=dict(argstr="--vox2vox",), ) inputs = RobustRegister.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RobustRegister_outputs(): output_map = dict( - half_source=dict(extensions=None, ), - half_source_xfm=dict(extensions=None, ), - half_targ=dict(extensions=None, ), - half_targ_xfm=dict(extensions=None, ), - half_weights=dict(extensions=None, ), - out_reg_file=dict(extensions=None, ), - registered_file=dict(extensions=None, ), - weights_file=dict(extensions=None, ), + half_source=dict(extensions=None,), + half_source_xfm=dict(extensions=None,), + half_targ=dict(extensions=None,), + half_targ_xfm=dict(extensions=None,), + half_weights=dict(extensions=None,), + out_reg_file=dict(extensions=None,), + registered_file=dict(extensions=None,), + weights_file=dict(extensions=None,), ) outputs = RobustRegister.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_RobustTemplate.py b/nipype/interfaces/freesurfer/tests/test_auto_RobustTemplate.py index d56a11c1b9..3ee33a567e 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_RobustTemplate.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_RobustTemplate.py @@ -4,52 +4,41 @@ def test_RobustTemplate_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), auto_detect_sensitivity=dict( - argstr='--satit', - mandatory=True, - xor=['outlier_sensitivity'], + argstr="--satit", mandatory=True, xor=["outlier_sensitivity"], ), - average_metric=dict(argstr='--average %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixed_timepoint=dict(argstr='--fixtp', ), - in_files=dict( - argstr='--mov %s', - mandatory=True, - ), - in_intensity_scales=dict(argstr='--iscalein %s', ), - initial_timepoint=dict(argstr='--inittp %d', ), - initial_transforms=dict(argstr='--ixforms %s', ), - intensity_scaling=dict(argstr='--iscale', ), - no_iteration=dict(argstr='--noit', ), + average_metric=dict(argstr="--average %d",), + environ=dict(nohash=True, usedefault=True,), + fixed_timepoint=dict(argstr="--fixtp",), + in_files=dict(argstr="--mov %s", mandatory=True,), + in_intensity_scales=dict(argstr="--iscalein %s",), + initial_timepoint=dict(argstr="--inittp %d",), + initial_transforms=dict(argstr="--ixforms %s",), + intensity_scaling=dict(argstr="--iscale",), + no_iteration=dict(argstr="--noit",), num_threads=dict(), out_file=dict( - argstr='--template %s', - extensions=None, - mandatory=True, - usedefault=True, + argstr="--template %s", extensions=None, mandatory=True, usedefault=True, ), outlier_sensitivity=dict( - argstr='--sat %.4f', - mandatory=True, - xor=['auto_detect_sensitivity'], + argstr="--sat %.4f", mandatory=True, xor=["auto_detect_sensitivity"], ), - scaled_intensity_outputs=dict(argstr='--iscaleout %s', ), + scaled_intensity_outputs=dict(argstr="--iscaleout %s",), subjects_dir=dict(), - subsample_threshold=dict(argstr='--subsample %d', ), - transform_outputs=dict(argstr='--lta %s', ), + subsample_threshold=dict(argstr="--subsample %d",), + transform_outputs=dict(argstr="--lta %s",), ) inputs = RobustTemplate.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RobustTemplate_outputs(): output_map = dict( - out_file=dict(extensions=None, ), + out_file=dict(extensions=None,), scaled_intensity_outputs=dict(), transform_outputs=dict(), ) diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SampleToSurface.py b/nipype/interfaces/freesurfer/tests/test_auto_SampleToSurface.py index 44fd5885d2..67cc1fd244 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SampleToSurface.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SampleToSurface.py @@ -4,116 +4,76 @@ def test_SampleToSurface_inputs(): input_map = dict( - apply_rot=dict(argstr='--rot %.3f %.3f %.3f', ), - apply_trans=dict(argstr='--trans %.3f %.3f %.3f', ), - args=dict(argstr='%s', ), - cortex_mask=dict( - argstr='--cortex', - xor=['mask_label'], - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fix_tk_reg=dict(argstr='--fixtkreg', ), - float2int_method=dict(argstr='--float2int %s', ), - frame=dict(argstr='--frame %d', ), - hemi=dict( - argstr='--hemi %s', - mandatory=True, - ), - hits_file=dict(argstr='--srchit %s', ), - hits_type=dict(argstr='--srchit_type', ), - ico_order=dict( - argstr='--icoorder %d', - requires=['target_subject'], - ), - interp_method=dict(argstr='--interp %s', ), - mask_label=dict( - argstr='--mask %s', - extensions=None, - xor=['cortex_mask'], - ), + apply_rot=dict(argstr="--rot %.3f %.3f %.3f",), + apply_trans=dict(argstr="--trans %.3f %.3f %.3f",), + args=dict(argstr="%s",), + cortex_mask=dict(argstr="--cortex", xor=["mask_label"],), + environ=dict(nohash=True, usedefault=True,), + fix_tk_reg=dict(argstr="--fixtkreg",), + float2int_method=dict(argstr="--float2int %s",), + frame=dict(argstr="--frame %d",), + hemi=dict(argstr="--hemi %s", mandatory=True,), + hits_file=dict(argstr="--srchit %s",), + hits_type=dict(argstr="--srchit_type",), + ico_order=dict(argstr="--icoorder %d", requires=["target_subject"],), + interp_method=dict(argstr="--interp %s",), + mask_label=dict(argstr="--mask %s", extensions=None, xor=["cortex_mask"],), mni152reg=dict( - argstr='--mni152reg', + argstr="--mni152reg", mandatory=True, - xor=['reg_file', 'reg_header', 'mni152reg'], - ), - no_reshape=dict( - argstr='--noreshape', - xor=['reshape'], - ), - out_file=dict( - argstr='--o %s', - extensions=None, - genfile=True, - ), - out_type=dict(argstr='--out_type %s', ), - override_reg_subj=dict( - argstr='--srcsubject %s', - requires=['subject_id'], - ), - projection_stem=dict( - mandatory=True, - xor=['sampling_method'], - ), - reference_file=dict( - argstr='--ref %s', - extensions=None, - ), + xor=["reg_file", "reg_header", "mni152reg"], + ), + no_reshape=dict(argstr="--noreshape", xor=["reshape"],), + out_file=dict(argstr="--o %s", extensions=None, genfile=True,), + out_type=dict(argstr="--out_type %s",), + override_reg_subj=dict(argstr="--srcsubject %s", requires=["subject_id"],), + projection_stem=dict(mandatory=True, xor=["sampling_method"],), + reference_file=dict(argstr="--ref %s", extensions=None,), reg_file=dict( - argstr='--reg %s', + argstr="--reg %s", extensions=None, mandatory=True, - xor=['reg_file', 'reg_header', 'mni152reg'], + xor=["reg_file", "reg_header", "mni152reg"], ), reg_header=dict( - argstr='--regheader %s', + argstr="--regheader %s", mandatory=True, - requires=['subject_id'], - xor=['reg_file', 'reg_header', 'mni152reg'], - ), - reshape=dict( - argstr='--reshape', - xor=['no_reshape'], + requires=["subject_id"], + xor=["reg_file", "reg_header", "mni152reg"], ), - reshape_slices=dict(argstr='--rf %d', ), + reshape=dict(argstr="--reshape", xor=["no_reshape"],), + reshape_slices=dict(argstr="--rf %d",), sampling_method=dict( - argstr='%s', + argstr="%s", mandatory=True, - requires=['sampling_range', 'sampling_units'], - xor=['projection_stem'], + requires=["sampling_range", "sampling_units"], + xor=["projection_stem"], ), sampling_range=dict(), sampling_units=dict(), - scale_input=dict(argstr='--scale %.3f', ), - smooth_surf=dict(argstr='--surf-fwhm %.3f', ), - smooth_vol=dict(argstr='--fwhm %.3f', ), - source_file=dict( - argstr='--mov %s', - extensions=None, - mandatory=True, - ), + scale_input=dict(argstr="--scale %.3f",), + smooth_surf=dict(argstr="--surf-fwhm %.3f",), + smooth_vol=dict(argstr="--fwhm %.3f",), + source_file=dict(argstr="--mov %s", extensions=None, mandatory=True,), subject_id=dict(), subjects_dir=dict(), - surf_reg=dict( - argstr='--surfreg %s', - requires=['target_subject'], - ), - surface=dict(argstr='--surf %s', ), - target_subject=dict(argstr='--trgsubject %s', ), - vox_file=dict(argstr='--nvox %s', ), + surf_reg=dict(argstr="--surfreg %s", requires=["target_subject"],), + surface=dict(argstr="--surf %s",), + target_subject=dict(argstr="--trgsubject %s",), + vox_file=dict(argstr="--nvox %s",), ) inputs = SampleToSurface.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SampleToSurface_outputs(): output_map = dict( - hits_file=dict(extensions=None, ), - out_file=dict(extensions=None, ), - vox_file=dict(extensions=None, ), + hits_file=dict(extensions=None,), + out_file=dict(extensions=None,), + vox_file=dict(extensions=None,), ) outputs = SampleToSurface.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SegStats.py b/nipype/interfaces/freesurfer/tests/test_auto_SegStats.py index b330c689e9..5cb7321d50 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SegStats.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SegStats.py @@ -5,110 +5,90 @@ def test_SegStats_inputs(): input_map = dict( annot=dict( - argstr='--annot %s %s %s', + argstr="--annot %s %s %s", mandatory=True, - xor=('segmentation_file', 'annot', 'surf_label'), + xor=("segmentation_file", "annot", "surf_label"), ), - args=dict(argstr='%s', ), - avgwf_file=dict(argstr='--avgwfvol %s', ), - avgwf_txt_file=dict(argstr='--avgwf %s', ), - brain_vol=dict(argstr='--%s', ), - brainmask_file=dict( - argstr='--brainmask %s', - extensions=None, - ), - calc_power=dict(argstr='--%s', ), - calc_snr=dict(argstr='--snr', ), + args=dict(argstr="%s",), + avgwf_file=dict(argstr="--avgwfvol %s",), + avgwf_txt_file=dict(argstr="--avgwf %s",), + brain_vol=dict(argstr="--%s",), + brainmask_file=dict(argstr="--brainmask %s", extensions=None,), + calc_power=dict(argstr="--%s",), + calc_snr=dict(argstr="--snr",), color_table_file=dict( - argstr='--ctab %s', + argstr="--ctab %s", extensions=None, - xor=('color_table_file', 'default_color_table', 'gca_color_table'), + xor=("color_table_file", "default_color_table", "gca_color_table"), ), - cortex_vol_from_surf=dict(argstr='--surf-ctx-vol', ), + cortex_vol_from_surf=dict(argstr="--surf-ctx-vol",), default_color_table=dict( - argstr='--ctab-default', - xor=('color_table_file', 'default_color_table', 'gca_color_table'), + argstr="--ctab-default", + xor=("color_table_file", "default_color_table", "gca_color_table"), ), - empty=dict(argstr='--empty', ), - environ=dict( - nohash=True, - usedefault=True, - ), - etiv=dict(argstr='--etiv', ), + empty=dict(argstr="--empty",), + environ=dict(nohash=True, usedefault=True,), + etiv=dict(argstr="--etiv",), etiv_only=dict(), - euler=dict(argstr='--euler', ), - exclude_ctx_gm_wm=dict(argstr='--excl-ctxgmwm', ), - exclude_id=dict(argstr='--excludeid %d', ), - frame=dict(argstr='--frame %d', ), + euler=dict(argstr="--euler",), + exclude_ctx_gm_wm=dict(argstr="--excl-ctxgmwm",), + exclude_id=dict(argstr="--excludeid %d",), + frame=dict(argstr="--frame %d",), gca_color_table=dict( - argstr='--ctab-gca %s', - extensions=None, - xor=('color_table_file', 'default_color_table', 'gca_color_table'), - ), - in_file=dict( - argstr='--i %s', - extensions=None, - ), - in_intensity=dict( - argstr='--in %s --in-intensity-name %s', + argstr="--ctab-gca %s", extensions=None, + xor=("color_table_file", "default_color_table", "gca_color_table"), ), + in_file=dict(argstr="--i %s", extensions=None,), + in_intensity=dict(argstr="--in %s --in-intensity-name %s", extensions=None,), intensity_units=dict( - argstr='--in-intensity-units %s', - requires=['in_intensity'], + argstr="--in-intensity-units %s", requires=["in_intensity"], ), - mask_erode=dict(argstr='--maskerode %d', ), - mask_file=dict( - argstr='--mask %s', - extensions=None, - ), - mask_frame=dict(requires=['mask_file'], ), - mask_invert=dict(argstr='--maskinvert', ), + mask_erode=dict(argstr="--maskerode %d",), + mask_file=dict(argstr="--mask %s", extensions=None,), + mask_frame=dict(requires=["mask_file"],), + mask_invert=dict(argstr="--maskinvert",), mask_sign=dict(), - mask_thresh=dict(argstr='--maskthresh %f', ), - multiply=dict(argstr='--mul %f', ), - non_empty_only=dict(argstr='--nonempty', ), - partial_volume_file=dict( - argstr='--pv %s', - extensions=None, - ), - segment_id=dict(argstr='--id %s...', ), + mask_thresh=dict(argstr="--maskthresh %f",), + multiply=dict(argstr="--mul %f",), + non_empty_only=dict(argstr="--nonempty",), + partial_volume_file=dict(argstr="--pv %s", extensions=None,), + segment_id=dict(argstr="--id %s...",), segmentation_file=dict( - argstr='--seg %s', + argstr="--seg %s", extensions=None, mandatory=True, - xor=('segmentation_file', 'annot', 'surf_label'), + xor=("segmentation_file", "annot", "surf_label"), ), - sf_avg_file=dict(argstr='--sfavg %s', ), - subcort_gm=dict(argstr='--subcortgray', ), + sf_avg_file=dict(argstr="--sfavg %s",), + subcort_gm=dict(argstr="--subcortgray",), subjects_dir=dict(), summary_file=dict( - argstr='--sum %s', - extensions=None, - genfile=True, - position=-1, + argstr="--sum %s", extensions=None, genfile=True, position=-1, ), - supratent=dict(argstr='--supratent', ), + supratent=dict(argstr="--supratent",), surf_label=dict( - argstr='--slabel %s %s %s', + argstr="--slabel %s %s %s", mandatory=True, - xor=('segmentation_file', 'annot', 'surf_label'), + xor=("segmentation_file", "annot", "surf_label"), ), - total_gray=dict(argstr='--totalgray', ), - vox=dict(argstr='--vox %s', ), - wm_vol_from_surf=dict(argstr='--surf-wm-vol', ), + total_gray=dict(argstr="--totalgray",), + vox=dict(argstr="--vox %s",), + wm_vol_from_surf=dict(argstr="--surf-wm-vol",), ) inputs = SegStats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SegStats_outputs(): output_map = dict( - avgwf_file=dict(extensions=None, ), - avgwf_txt_file=dict(extensions=None, ), - sf_avg_file=dict(extensions=None, ), - summary_file=dict(extensions=None, ), + avgwf_file=dict(extensions=None,), + avgwf_txt_file=dict(extensions=None,), + sf_avg_file=dict(extensions=None,), + summary_file=dict(extensions=None,), ) outputs = SegStats.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SegStatsReconAll.py b/nipype/interfaces/freesurfer/tests/test_auto_SegStatsReconAll.py index 11d1972e16..2301087a3b 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SegStatsReconAll.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SegStatsReconAll.py @@ -5,150 +5,102 @@ def test_SegStatsReconAll_inputs(): input_map = dict( annot=dict( - argstr='--annot %s %s %s', + argstr="--annot %s %s %s", mandatory=True, - xor=('segmentation_file', 'annot', 'surf_label'), - ), - args=dict(argstr='%s', ), - aseg=dict(extensions=None, ), - avgwf_file=dict(argstr='--avgwfvol %s', ), - avgwf_txt_file=dict(argstr='--avgwf %s', ), - brain_vol=dict(argstr='--%s', ), - brainmask_file=dict( - argstr='--brainmask %s', - extensions=None, - ), - calc_power=dict(argstr='--%s', ), - calc_snr=dict(argstr='--snr', ), + xor=("segmentation_file", "annot", "surf_label"), + ), + args=dict(argstr="%s",), + aseg=dict(extensions=None,), + avgwf_file=dict(argstr="--avgwfvol %s",), + avgwf_txt_file=dict(argstr="--avgwf %s",), + brain_vol=dict(argstr="--%s",), + brainmask_file=dict(argstr="--brainmask %s", extensions=None,), + calc_power=dict(argstr="--%s",), + calc_snr=dict(argstr="--snr",), color_table_file=dict( - argstr='--ctab %s', + argstr="--ctab %s", extensions=None, - xor=('color_table_file', 'default_color_table', 'gca_color_table'), + xor=("color_table_file", "default_color_table", "gca_color_table"), ), copy_inputs=dict(), - cortex_vol_from_surf=dict(argstr='--surf-ctx-vol', ), + cortex_vol_from_surf=dict(argstr="--surf-ctx-vol",), default_color_table=dict( - argstr='--ctab-default', - xor=('color_table_file', 'default_color_table', 'gca_color_table'), + argstr="--ctab-default", + xor=("color_table_file", "default_color_table", "gca_color_table"), ), - empty=dict(argstr='--empty', ), - environ=dict( - nohash=True, - usedefault=True, - ), - etiv=dict(argstr='--etiv', ), + empty=dict(argstr="--empty",), + environ=dict(nohash=True, usedefault=True,), + etiv=dict(argstr="--etiv",), etiv_only=dict(), - euler=dict(argstr='--euler', ), - exclude_ctx_gm_wm=dict(argstr='--excl-ctxgmwm', ), - exclude_id=dict(argstr='--excludeid %d', ), - frame=dict(argstr='--frame %d', ), + euler=dict(argstr="--euler",), + exclude_ctx_gm_wm=dict(argstr="--excl-ctxgmwm",), + exclude_id=dict(argstr="--excludeid %d",), + frame=dict(argstr="--frame %d",), gca_color_table=dict( - argstr='--ctab-gca %s', - extensions=None, - xor=('color_table_file', 'default_color_table', 'gca_color_table'), - ), - in_file=dict( - argstr='--i %s', - extensions=None, - ), - in_intensity=dict( - argstr='--in %s --in-intensity-name %s', + argstr="--ctab-gca %s", extensions=None, + xor=("color_table_file", "default_color_table", "gca_color_table"), ), + in_file=dict(argstr="--i %s", extensions=None,), + in_intensity=dict(argstr="--in %s --in-intensity-name %s", extensions=None,), intensity_units=dict( - argstr='--in-intensity-units %s', - requires=['in_intensity'], - ), - lh_orig_nofix=dict( - extensions=None, - mandatory=True, - ), - lh_pial=dict( - extensions=None, - mandatory=True, - ), - lh_white=dict( - extensions=None, - mandatory=True, - ), - mask_erode=dict(argstr='--maskerode %d', ), - mask_file=dict( - argstr='--mask %s', - extensions=None, - ), - mask_frame=dict(requires=['mask_file'], ), - mask_invert=dict(argstr='--maskinvert', ), + argstr="--in-intensity-units %s", requires=["in_intensity"], + ), + lh_orig_nofix=dict(extensions=None, mandatory=True,), + lh_pial=dict(extensions=None, mandatory=True,), + lh_white=dict(extensions=None, mandatory=True,), + mask_erode=dict(argstr="--maskerode %d",), + mask_file=dict(argstr="--mask %s", extensions=None,), + mask_frame=dict(requires=["mask_file"],), + mask_invert=dict(argstr="--maskinvert",), mask_sign=dict(), - mask_thresh=dict(argstr='--maskthresh %f', ), - multiply=dict(argstr='--mul %f', ), - non_empty_only=dict(argstr='--nonempty', ), - partial_volume_file=dict( - argstr='--pv %s', - extensions=None, - ), - presurf_seg=dict(extensions=None, ), - rh_orig_nofix=dict( - extensions=None, - mandatory=True, - ), - rh_pial=dict( - extensions=None, - mandatory=True, - ), - rh_white=dict( - extensions=None, - mandatory=True, - ), - ribbon=dict( - extensions=None, - mandatory=True, - ), - segment_id=dict(argstr='--id %s...', ), + mask_thresh=dict(argstr="--maskthresh %f",), + multiply=dict(argstr="--mul %f",), + non_empty_only=dict(argstr="--nonempty",), + partial_volume_file=dict(argstr="--pv %s", extensions=None,), + presurf_seg=dict(extensions=None,), + rh_orig_nofix=dict(extensions=None, mandatory=True,), + rh_pial=dict(extensions=None, mandatory=True,), + rh_white=dict(extensions=None, mandatory=True,), + ribbon=dict(extensions=None, mandatory=True,), + segment_id=dict(argstr="--id %s...",), segmentation_file=dict( - argstr='--seg %s', + argstr="--seg %s", extensions=None, mandatory=True, - xor=('segmentation_file', 'annot', 'surf_label'), - ), - sf_avg_file=dict(argstr='--sfavg %s', ), - subcort_gm=dict(argstr='--subcortgray', ), - subject_id=dict( - argstr='--subject %s', - mandatory=True, - usedefault=True, + xor=("segmentation_file", "annot", "surf_label"), ), + sf_avg_file=dict(argstr="--sfavg %s",), + subcort_gm=dict(argstr="--subcortgray",), + subject_id=dict(argstr="--subject %s", mandatory=True, usedefault=True,), subjects_dir=dict(), summary_file=dict( - argstr='--sum %s', - extensions=None, - genfile=True, - position=-1, + argstr="--sum %s", extensions=None, genfile=True, position=-1, ), - supratent=dict(argstr='--supratent', ), + supratent=dict(argstr="--supratent",), surf_label=dict( - argstr='--slabel %s %s %s', + argstr="--slabel %s %s %s", mandatory=True, - xor=('segmentation_file', 'annot', 'surf_label'), + xor=("segmentation_file", "annot", "surf_label"), ), - total_gray=dict(argstr='--totalgray', ), - transform=dict( - extensions=None, - mandatory=True, - ), - vox=dict(argstr='--vox %s', ), - wm_vol_from_surf=dict(argstr='--surf-wm-vol', ), + total_gray=dict(argstr="--totalgray",), + transform=dict(extensions=None, mandatory=True,), + vox=dict(argstr="--vox %s",), + wm_vol_from_surf=dict(argstr="--surf-wm-vol",), ) inputs = SegStatsReconAll.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SegStatsReconAll_outputs(): output_map = dict( - avgwf_file=dict(extensions=None, ), - avgwf_txt_file=dict(extensions=None, ), - sf_avg_file=dict(extensions=None, ), - summary_file=dict(extensions=None, ), + avgwf_file=dict(extensions=None,), + avgwf_txt_file=dict(extensions=None,), + sf_avg_file=dict(extensions=None,), + summary_file=dict(extensions=None,), ) outputs = SegStatsReconAll.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SegmentCC.py b/nipype/interfaces/freesurfer/tests/test_auto_SegmentCC.py index 25b1dd4aff..9343177c30 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SegmentCC.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SegmentCC.py @@ -4,40 +4,21 @@ def test_SegmentCC_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), copy_inputs=dict(), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-aseg %s', - extensions=None, - mandatory=True, - ), - in_norm=dict( - extensions=None, - mandatory=True, - ), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-aseg %s", extensions=None, mandatory=True,), + in_norm=dict(extensions=None, mandatory=True,), out_file=dict( - argstr='-o %s', + argstr="-o %s", extensions=None, hash_files=False, keep_extension=False, - name_source=['in_file'], - name_template='%s.auto.mgz', - ), - out_rotation=dict( - argstr='-lta %s', - extensions=None, - mandatory=True, - ), - subject_id=dict( - argstr='%s', - mandatory=True, - position=-1, - usedefault=True, + name_source=["in_file"], + name_template="%s.auto.mgz", ), + out_rotation=dict(argstr="-lta %s", extensions=None, mandatory=True,), + subject_id=dict(argstr="%s", mandatory=True, position=-1, usedefault=True,), subjects_dir=dict(), ) inputs = SegmentCC.input_spec() @@ -45,10 +26,11 @@ def test_SegmentCC_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SegmentCC_outputs(): output_map = dict( - out_file=dict(extensions=None, ), - out_rotation=dict(extensions=None, ), + out_file=dict(extensions=None,), out_rotation=dict(extensions=None,), ) outputs = SegmentCC.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SegmentWM.py b/nipype/interfaces/freesurfer/tests/test_auto_SegmentWM.py index 275b710262..87f4af54c0 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SegmentWM.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SegmentWM.py @@ -4,23 +4,10 @@ def test_SegmentWM_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - out_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + out_file=dict(argstr="%s", extensions=None, mandatory=True, position=-1,), subjects_dir=dict(), ) inputs = SegmentWM.input_spec() @@ -28,8 +15,10 @@ def test_SegmentWM_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SegmentWM_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = SegmentWM.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Smooth.py b/nipype/interfaces/freesurfer/tests/test_auto_Smooth.py index c6c4109728..bc180a00f6 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Smooth.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Smooth.py @@ -4,55 +4,32 @@ def test_Smooth_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='--i %s', - extensions=None, - mandatory=True, - ), - num_iters=dict( - argstr='--niters %d', - mandatory=True, - xor=['surface_fwhm'], - ), - proj_frac=dict( - argstr='--projfrac %s', - xor=['proj_frac_avg'], - ), - proj_frac_avg=dict( - argstr='--projfrac-avg %.2f %.2f %.2f', - xor=['proj_frac'], - ), - reg_file=dict( - argstr='--reg %s', - extensions=None, - mandatory=True, - ), - smoothed_file=dict( - argstr='--o %s', - extensions=None, - genfile=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="--i %s", extensions=None, mandatory=True,), + num_iters=dict(argstr="--niters %d", mandatory=True, xor=["surface_fwhm"],), + proj_frac=dict(argstr="--projfrac %s", xor=["proj_frac_avg"],), + proj_frac_avg=dict(argstr="--projfrac-avg %.2f %.2f %.2f", xor=["proj_frac"],), + reg_file=dict(argstr="--reg %s", extensions=None, mandatory=True,), + smoothed_file=dict(argstr="--o %s", extensions=None, genfile=True,), subjects_dir=dict(), surface_fwhm=dict( - argstr='--fwhm %f', + argstr="--fwhm %f", mandatory=True, - requires=['reg_file'], - xor=['num_iters'], + requires=["reg_file"], + xor=["num_iters"], ), - vol_fwhm=dict(argstr='--vol-fwhm %f', ), + vol_fwhm=dict(argstr="--vol-fwhm %f",), ) inputs = Smooth.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Smooth_outputs(): - output_map = dict(smoothed_file=dict(extensions=None, ), ) + output_map = dict(smoothed_file=dict(extensions=None,),) outputs = Smooth.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SmoothTessellation.py b/nipype/interfaces/freesurfer/tests/test_auto_SmoothTessellation.py index 52dbbcf934..aed52899f5 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SmoothTessellation.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SmoothTessellation.py @@ -4,51 +4,35 @@ def test_SmoothTessellation_inputs(): input_map = dict( - args=dict(argstr='%s', ), - curvature_averaging_iterations=dict(argstr='-a %d', ), - disable_estimates=dict(argstr='-nw', ), - environ=dict( - nohash=True, - usedefault=True, - ), - gaussian_curvature_norm_steps=dict(argstr='%d ', ), - gaussian_curvature_smoothing_steps=dict(argstr='%d', ), + args=dict(argstr="%s",), + curvature_averaging_iterations=dict(argstr="-a %d",), + disable_estimates=dict(argstr="-nw",), + environ=dict(nohash=True, usedefault=True,), + gaussian_curvature_norm_steps=dict(argstr="%d ",), + gaussian_curvature_smoothing_steps=dict(argstr="%d",), in_file=dict( - argstr='%s', - copyfile=True, - extensions=None, - mandatory=True, - position=-2, - ), - normalize_area=dict(argstr='-area', ), - out_area_file=dict( - argstr='-b %s', - extensions=None, - ), - out_curvature_file=dict( - argstr='-c %s', - extensions=None, + argstr="%s", copyfile=True, extensions=None, mandatory=True, position=-2, ), - out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, - ), - seed=dict(argstr='-seed %d', ), - smoothing_iterations=dict(argstr='-n %d', ), - snapshot_writing_iterations=dict(argstr='-w %d', ), + normalize_area=dict(argstr="-area",), + out_area_file=dict(argstr="-b %s", extensions=None,), + out_curvature_file=dict(argstr="-c %s", extensions=None,), + out_file=dict(argstr="%s", extensions=None, genfile=True, position=-1,), + seed=dict(argstr="-seed %d",), + smoothing_iterations=dict(argstr="-n %d",), + snapshot_writing_iterations=dict(argstr="-w %d",), subjects_dir=dict(), - use_gaussian_curvature_smoothing=dict(argstr='-g', ), - use_momentum=dict(argstr='-m', ), + use_gaussian_curvature_smoothing=dict(argstr="-g",), + use_momentum=dict(argstr="-m",), ) inputs = SmoothTessellation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SmoothTessellation_outputs(): - output_map = dict(surface=dict(extensions=None, ), ) + output_map = dict(surface=dict(extensions=None,),) outputs = SmoothTessellation.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Sphere.py b/nipype/interfaces/freesurfer/tests/test_auto_Sphere.py index 4ec5392c31..407354fbfe 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Sphere.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Sphere.py @@ -4,33 +4,23 @@ def test_Sphere_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=True, - extensions=None, - mandatory=True, - position=-2, - ), - in_smoothwm=dict( - copyfile=True, - extensions=None, + argstr="%s", copyfile=True, extensions=None, mandatory=True, position=-2, ), - magic=dict(argstr='-q', ), + in_smoothwm=dict(copyfile=True, extensions=None,), + magic=dict(argstr="-q",), num_threads=dict(), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, hash_files=False, - name_source=['in_file'], - name_template='%s.sphere', + name_source=["in_file"], + name_template="%s.sphere", position=-1, ), - seed=dict(argstr='-seed %d', ), + seed=dict(argstr="-seed %d",), subjects_dir=dict(), ) inputs = Sphere.input_spec() @@ -38,8 +28,10 @@ def test_Sphere_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Sphere_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Sphere.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SphericalAverage.py b/nipype/interfaces/freesurfer/tests/test_auto_SphericalAverage.py index 6fa42333a5..41c61ea0fa 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SphericalAverage.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SphericalAverage.py @@ -4,62 +4,29 @@ def test_SphericalAverage_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - erode=dict(argstr='-erode %d', ), - fname=dict( - argstr='%s', - mandatory=True, - position=-5, - ), - hemisphere=dict( - argstr='%s', - mandatory=True, - position=-4, - ), - in_average=dict( - argstr='%s', - genfile=True, - position=-2, - ), - in_orig=dict( - argstr='-orig %s', - extensions=None, - ), - in_surf=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), - out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, - ), - subject_id=dict( - argstr='-o %s', - mandatory=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + erode=dict(argstr="-erode %d",), + fname=dict(argstr="%s", mandatory=True, position=-5,), + hemisphere=dict(argstr="%s", mandatory=True, position=-4,), + in_average=dict(argstr="%s", genfile=True, position=-2,), + in_orig=dict(argstr="-orig %s", extensions=None,), + in_surf=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + out_file=dict(argstr="%s", extensions=None, genfile=True, position=-1,), + subject_id=dict(argstr="-o %s", mandatory=True,), subjects_dir=dict(), - threshold=dict(argstr='-t %.1f', ), - which=dict( - argstr='%s', - mandatory=True, - position=-6, - ), + threshold=dict(argstr="-t %.1f",), + which=dict(argstr="%s", mandatory=True, position=-6,), ) inputs = SphericalAverage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SphericalAverage_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = SphericalAverage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Surface2VolTransform.py b/nipype/interfaces/freesurfer/tests/test_auto_Surface2VolTransform.py index d4fc2c2125..c9e20a00b5 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Surface2VolTransform.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Surface2VolTransform.py @@ -4,56 +4,38 @@ def test_Surface2VolTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - hemi=dict( - argstr='--hemi %s', - mandatory=True, - ), - mkmask=dict( - argstr='--mkmask', - xor=['source_file'], - ), - projfrac=dict(argstr='--projfrac %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + hemi=dict(argstr="--hemi %s", mandatory=True,), + mkmask=dict(argstr="--mkmask", xor=["source_file"],), + projfrac=dict(argstr="--projfrac %s",), reg_file=dict( - argstr='--volreg %s', - extensions=None, - mandatory=True, - xor=['subject_id'], + argstr="--volreg %s", extensions=None, mandatory=True, xor=["subject_id"], ), source_file=dict( - argstr='--surfval %s', + argstr="--surfval %s", copyfile=False, extensions=None, mandatory=True, - xor=['mkmask'], - ), - subject_id=dict( - argstr='--identity %s', - xor=['reg_file'], - ), - subjects_dir=dict(argstr='--sd %s', ), - surf_name=dict(argstr='--surf %s', ), - template_file=dict( - argstr='--template %s', - extensions=None, + xor=["mkmask"], ), + subject_id=dict(argstr="--identity %s", xor=["reg_file"],), + subjects_dir=dict(argstr="--sd %s",), + surf_name=dict(argstr="--surf %s",), + template_file=dict(argstr="--template %s", extensions=None,), transformed_file=dict( - argstr='--outvol %s', + argstr="--outvol %s", extensions=None, hash_files=False, - name_source=['source_file'], - name_template='%s_asVol.nii', + name_source=["source_file"], + name_template="%s_asVol.nii", ), vertexvol_file=dict( - argstr='--vtxvol %s', + argstr="--vtxvol %s", extensions=None, hash_files=False, - name_source=['source_file'], - name_template='%s_asVol_vertex.nii', + name_source=["source_file"], + name_template="%s_asVol_vertex.nii", ), ) inputs = Surface2VolTransform.input_spec() @@ -61,10 +43,11 @@ def test_Surface2VolTransform_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Surface2VolTransform_outputs(): output_map = dict( - transformed_file=dict(extensions=None, ), - vertexvol_file=dict(extensions=None, ), + transformed_file=dict(extensions=None,), vertexvol_file=dict(extensions=None,), ) outputs = Surface2VolTransform.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSmooth.py b/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSmooth.py index 56dc5ba071..020f7af7a8 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSmooth.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSmooth.py @@ -4,42 +4,16 @@ def test_SurfaceSmooth_inputs(): input_map = dict( - args=dict(argstr='%s', ), - cortex=dict( - argstr='--cortex', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fwhm=dict( - argstr='--fwhm %.4f', - xor=['smooth_iters'], - ), - hemi=dict( - argstr='--hemi %s', - mandatory=True, - ), - in_file=dict( - argstr='--sval %s', - extensions=None, - mandatory=True, - ), - out_file=dict( - argstr='--tval %s', - extensions=None, - genfile=True, - ), - reshape=dict(argstr='--reshape', ), - smooth_iters=dict( - argstr='--smooth %d', - xor=['fwhm'], - ), - subject_id=dict( - argstr='--s %s', - mandatory=True, - ), + args=dict(argstr="%s",), + cortex=dict(argstr="--cortex", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + fwhm=dict(argstr="--fwhm %.4f", xor=["smooth_iters"],), + hemi=dict(argstr="--hemi %s", mandatory=True,), + in_file=dict(argstr="--sval %s", extensions=None, mandatory=True,), + out_file=dict(argstr="--tval %s", extensions=None, genfile=True,), + reshape=dict(argstr="--reshape",), + smooth_iters=dict(argstr="--smooth %d", xor=["fwhm"],), + subject_id=dict(argstr="--s %s", mandatory=True,), subjects_dir=dict(), ) inputs = SurfaceSmooth.input_spec() @@ -47,8 +21,10 @@ def test_SurfaceSmooth_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SurfaceSmooth_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = SurfaceSmooth.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSnapshots.py b/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSnapshots.py index d3b37cc2b0..b4b3e8a5e2 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSnapshots.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSnapshots.py @@ -4,107 +4,61 @@ def test_SurfaceSnapshots_inputs(): input_map = dict( - annot_file=dict( - argstr='-annotation %s', - extensions=None, - xor=['annot_name'], - ), - annot_name=dict( - argstr='-annotation %s', - xor=['annot_file'], - ), - args=dict(argstr='%s', ), - colortable=dict( - argstr='-colortable %s', - extensions=None, - ), - demean_overlay=dict(argstr='-zm', ), - environ=dict( - nohash=True, - usedefault=True, - ), - hemi=dict( - argstr='%s', - mandatory=True, - position=2, - ), + annot_file=dict(argstr="-annotation %s", extensions=None, xor=["annot_name"],), + annot_name=dict(argstr="-annotation %s", xor=["annot_file"],), + args=dict(argstr="%s",), + colortable=dict(argstr="-colortable %s", extensions=None,), + demean_overlay=dict(argstr="-zm",), + environ=dict(nohash=True, usedefault=True,), + hemi=dict(argstr="%s", mandatory=True, position=2,), identity_reg=dict( - argstr='-overlay-reg-identity', - xor=['overlay_reg', 'identity_reg', 'mni152_reg'], - ), - invert_overlay=dict(argstr='-invphaseflag 1', ), - label_file=dict( - argstr='-label %s', - extensions=None, - xor=['label_name'], - ), - label_name=dict( - argstr='-label %s', - xor=['label_file'], - ), - label_outline=dict(argstr='-label-outline', ), - label_under=dict(argstr='-labels-under', ), + argstr="-overlay-reg-identity", + xor=["overlay_reg", "identity_reg", "mni152_reg"], + ), + invert_overlay=dict(argstr="-invphaseflag 1",), + label_file=dict(argstr="-label %s", extensions=None, xor=["label_name"],), + label_name=dict(argstr="-label %s", xor=["label_file"],), + label_outline=dict(argstr="-label-outline",), + label_under=dict(argstr="-labels-under",), mni152_reg=dict( - argstr='-mni152reg', - xor=['overlay_reg', 'identity_reg', 'mni152_reg'], + argstr="-mni152reg", xor=["overlay_reg", "identity_reg", "mni152_reg"], ), - orig_suffix=dict(argstr='-orig %s', ), + orig_suffix=dict(argstr="-orig %s",), overlay=dict( - argstr='-overlay %s', - extensions=None, - requires=['overlay_range'], + argstr="-overlay %s", extensions=None, requires=["overlay_range"], ), - overlay_range=dict(argstr='%s', ), - overlay_range_offset=dict(argstr='-foffset %.3f', ), + overlay_range=dict(argstr="%s",), + overlay_range_offset=dict(argstr="-foffset %.3f",), overlay_reg=dict( - argstr='-overlay-reg %s', - extensions=None, - xor=['overlay_reg', 'identity_reg', 'mni152_reg'], - ), - patch_file=dict( - argstr='-patch %s', + argstr="-overlay-reg %s", extensions=None, + xor=["overlay_reg", "identity_reg", "mni152_reg"], ), - reverse_overlay=dict(argstr='-revphaseflag 1', ), + patch_file=dict(argstr="-patch %s", extensions=None,), + reverse_overlay=dict(argstr="-revphaseflag 1",), screenshot_stem=dict(), - show_color_scale=dict(argstr='-colscalebarflag 1', ), - show_color_text=dict(argstr='-colscaletext 1', ), - show_curv=dict( - argstr='-curv', - xor=['show_gray_curv'], - ), - show_gray_curv=dict( - argstr='-gray', - xor=['show_curv'], - ), + show_color_scale=dict(argstr="-colscalebarflag 1",), + show_color_text=dict(argstr="-colscaletext 1",), + show_curv=dict(argstr="-curv", xor=["show_gray_curv"],), + show_gray_curv=dict(argstr="-gray", xor=["show_curv"],), six_images=dict(), - sphere_suffix=dict(argstr='-sphere %s', ), - stem_template_args=dict(requires=['screenshot_stem'], ), - subject_id=dict( - argstr='%s', - mandatory=True, - position=1, - ), + sphere_suffix=dict(argstr="-sphere %s",), + stem_template_args=dict(requires=["screenshot_stem"],), + subject_id=dict(argstr="%s", mandatory=True, position=1,), subjects_dir=dict(), - surface=dict( - argstr='%s', - mandatory=True, - position=3, - ), - tcl_script=dict( - argstr='%s', - extensions=None, - genfile=True, - ), - truncate_overlay=dict(argstr='-truncphaseflag 1', ), + surface=dict(argstr="%s", mandatory=True, position=3,), + tcl_script=dict(argstr="%s", extensions=None, genfile=True,), + truncate_overlay=dict(argstr="-truncphaseflag 1",), ) inputs = SurfaceSnapshots.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SurfaceSnapshots_outputs(): - output_map = dict(snapshots=dict(), ) + output_map = dict(snapshots=dict(),) outputs = SurfaceSnapshots.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SurfaceTransform.py b/nipype/interfaces/freesurfer/tests/test_auto_SurfaceTransform.py index 67ea020def..d8bcf6eb28 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SurfaceTransform.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SurfaceTransform.py @@ -4,57 +4,40 @@ def test_SurfaceTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - hemi=dict( - argstr='--hemi %s', - mandatory=True, - ), - out_file=dict( - argstr='--tval %s', - extensions=None, - genfile=True, - ), - reshape=dict(argstr='--reshape', ), - reshape_factor=dict(argstr='--reshape-factor', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + hemi=dict(argstr="--hemi %s", mandatory=True,), + out_file=dict(argstr="--tval %s", extensions=None, genfile=True,), + reshape=dict(argstr="--reshape",), + reshape_factor=dict(argstr="--reshape-factor",), source_annot_file=dict( - argstr='--sval-annot %s', + argstr="--sval-annot %s", extensions=None, mandatory=True, - xor=['source_file'], + xor=["source_file"], ), source_file=dict( - argstr='--sval %s', + argstr="--sval %s", extensions=None, mandatory=True, - xor=['source_annot_file'], - ), - source_subject=dict( - argstr='--srcsubject %s', - mandatory=True, - ), - source_type=dict( - argstr='--sfmt %s', - requires=['source_file'], + xor=["source_annot_file"], ), + source_subject=dict(argstr="--srcsubject %s", mandatory=True,), + source_type=dict(argstr="--sfmt %s", requires=["source_file"],), subjects_dir=dict(), - target_ico_order=dict(argstr='--trgicoorder %d', ), - target_subject=dict( - argstr='--trgsubject %s', - mandatory=True, - ), - target_type=dict(argstr='--tfmt %s', ), + target_ico_order=dict(argstr="--trgicoorder %d",), + target_subject=dict(argstr="--trgsubject %s", mandatory=True,), + target_type=dict(argstr="--tfmt %s",), ) inputs = SurfaceTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SurfaceTransform_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = SurfaceTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SynthesizeFLASH.py b/nipype/interfaces/freesurfer/tests/test_auto_SynthesizeFLASH.py index 73392602a4..396d46e1a9 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SynthesizeFLASH.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SynthesizeFLASH.py @@ -4,56 +4,26 @@ def test_SynthesizeFLASH_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixed_weighting=dict( - argstr='-w', - position=1, - ), - flip_angle=dict( - argstr='%.2f', - mandatory=True, - position=3, - ), - out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - ), - pd_image=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=6, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + fixed_weighting=dict(argstr="-w", position=1,), + flip_angle=dict(argstr="%.2f", mandatory=True, position=3,), + out_file=dict(argstr="%s", extensions=None, genfile=True,), + pd_image=dict(argstr="%s", extensions=None, mandatory=True, position=6,), subjects_dir=dict(), - t1_image=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=5, - ), - te=dict( - argstr='%.3f', - mandatory=True, - position=4, - ), - tr=dict( - argstr='%.2f', - mandatory=True, - position=2, - ), + t1_image=dict(argstr="%s", extensions=None, mandatory=True, position=5,), + te=dict(argstr="%.3f", mandatory=True, position=4,), + tr=dict(argstr="%.2f", mandatory=True, position=2,), ) inputs = SynthesizeFLASH.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SynthesizeFLASH_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = SynthesizeFLASH.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_TalairachAVI.py b/nipype/interfaces/freesurfer/tests/test_auto_TalairachAVI.py index e5f6427678..41ad7fef4e 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_TalairachAVI.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_TalairachAVI.py @@ -4,22 +4,11 @@ def test_TalairachAVI_inputs(): input_map = dict( - args=dict(argstr='%s', ), - atlas=dict(argstr='--atlas %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='--i %s', - extensions=None, - mandatory=True, - ), - out_file=dict( - argstr='--xfm %s', - extensions=None, - mandatory=True, - ), + args=dict(argstr="%s",), + atlas=dict(argstr="--atlas %s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="--i %s", extensions=None, mandatory=True,), + out_file=dict(argstr="--xfm %s", extensions=None, mandatory=True,), subjects_dir=dict(), ) inputs = TalairachAVI.input_spec() @@ -27,11 +16,13 @@ def test_TalairachAVI_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TalairachAVI_outputs(): output_map = dict( - out_file=dict(extensions=None, ), - out_log=dict(extensions=None, ), - out_txt=dict(extensions=None, ), + out_file=dict(extensions=None,), + out_log=dict(extensions=None,), + out_txt=dict(extensions=None,), ) outputs = TalairachAVI.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_TalairachQC.py b/nipype/interfaces/freesurfer/tests/test_auto_TalairachQC.py index ecf531879e..52b07074e1 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_TalairachQC.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_TalairachQC.py @@ -4,17 +4,9 @@ def test_TalairachQC_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - log_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=0, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + log_file=dict(argstr="%s", extensions=None, mandatory=True, position=0,), subjects_dir=dict(), ) inputs = TalairachQC.input_spec() @@ -22,11 +14,10 @@ def test_TalairachQC_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TalairachQC_outputs(): - output_map = dict(log_file=dict( - extensions=None, - usedefault=True, - ), ) + output_map = dict(log_file=dict(extensions=None, usedefault=True,),) outputs = TalairachQC.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Tkregister2.py b/nipype/interfaces/freesurfer/tests/test_auto_Tkregister2.py index 3074cc3934..0e80196220 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Tkregister2.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Tkregister2.py @@ -4,73 +4,40 @@ def test_Tkregister2_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fsl_in_matrix=dict( - argstr='--fsl %s', - extensions=None, - ), - fsl_out=dict(argstr='--fslregout %s', ), - fstal=dict( - argstr='--fstal', - xor=['target_image', 'moving_image', 'reg_file'], - ), - fstarg=dict( - argstr='--fstarg', - xor=['target_image'], - ), - invert_lta_in=dict(requires=['lta_in'], ), - invert_lta_out=dict( - argstr='--ltaout-inv', - requires=['lta_in'], - ), - lta_in=dict( - argstr='--lta %s', - extensions=None, - ), - lta_out=dict(argstr='--ltaout %s', ), - moving_image=dict( - argstr='--mov %s', - extensions=None, - mandatory=True, - ), - movscale=dict(argstr='--movscale %f', ), - noedit=dict( - argstr='--noedit', - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + fsl_in_matrix=dict(argstr="--fsl %s", extensions=None,), + fsl_out=dict(argstr="--fslregout %s",), + fstal=dict(argstr="--fstal", xor=["target_image", "moving_image", "reg_file"],), + fstarg=dict(argstr="--fstarg", xor=["target_image"],), + invert_lta_in=dict(requires=["lta_in"],), + invert_lta_out=dict(argstr="--ltaout-inv", requires=["lta_in"],), + lta_in=dict(argstr="--lta %s", extensions=None,), + lta_out=dict(argstr="--ltaout %s",), + moving_image=dict(argstr="--mov %s", extensions=None, mandatory=True,), + movscale=dict(argstr="--movscale %f",), + noedit=dict(argstr="--noedit", usedefault=True,), reg_file=dict( - argstr='--reg %s', - extensions=None, - mandatory=True, - usedefault=True, + argstr="--reg %s", extensions=None, mandatory=True, usedefault=True, ), - reg_header=dict(argstr='--regheader', ), - subject_id=dict(argstr='--s %s', ), + reg_header=dict(argstr="--regheader",), + subject_id=dict(argstr="--s %s",), subjects_dir=dict(), - target_image=dict( - argstr='--targ %s', - extensions=None, - xor=['fstarg'], - ), - xfm=dict( - argstr='--xfm %s', - extensions=None, - ), + target_image=dict(argstr="--targ %s", extensions=None, xor=["fstarg"],), + xfm=dict(argstr="--xfm %s", extensions=None,), ) inputs = Tkregister2.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Tkregister2_outputs(): output_map = dict( - fsl_file=dict(extensions=None, ), - lta_file=dict(extensions=None, ), - reg_file=dict(extensions=None, ), + fsl_file=dict(extensions=None,), + lta_file=dict(extensions=None,), + reg_file=dict(extensions=None,), ) outputs = Tkregister2.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_UnpackSDICOMDir.py b/nipype/interfaces/freesurfer/tests/test_auto_UnpackSDICOMDir.py index bd82d0c198..b4378f3cac 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_UnpackSDICOMDir.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_UnpackSDICOMDir.py @@ -4,45 +4,33 @@ def test_UnpackSDICOMDir_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), config=dict( - argstr='-cfg %s', + argstr="-cfg %s", extensions=None, mandatory=True, - xor=('run_info', 'config', 'seq_config'), + xor=("run_info", "config", "seq_config"), ), - dir_structure=dict(argstr='-%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - log_file=dict( - argstr='-log %s', - extensions=None, - ), - no_info_dump=dict(argstr='-noinfodump', ), - no_unpack_err=dict(argstr='-no-unpackerr', ), - output_dir=dict(argstr='-targ %s', ), + dir_structure=dict(argstr="-%s",), + environ=dict(nohash=True, usedefault=True,), + log_file=dict(argstr="-log %s", extensions=None,), + no_info_dump=dict(argstr="-noinfodump",), + no_unpack_err=dict(argstr="-no-unpackerr",), + output_dir=dict(argstr="-targ %s",), run_info=dict( - argstr='-run %d %s %s %s', + argstr="-run %d %s %s %s", mandatory=True, - xor=('run_info', 'config', 'seq_config'), - ), - scan_only=dict( - argstr='-scanonly %s', - extensions=None, + xor=("run_info", "config", "seq_config"), ), + scan_only=dict(argstr="-scanonly %s", extensions=None,), seq_config=dict( - argstr='-seqcfg %s', + argstr="-seqcfg %s", extensions=None, mandatory=True, - xor=('run_info', 'config', 'seq_config'), - ), - source_dir=dict( - argstr='-src %s', - mandatory=True, + xor=("run_info", "config", "seq_config"), ), - spm_zeropad=dict(argstr='-nspmzeropad %d', ), + source_dir=dict(argstr="-src %s", mandatory=True,), + spm_zeropad=dict(argstr="-nspmzeropad %d",), subjects_dir=dict(), ) inputs = UnpackSDICOMDir.input_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_VolumeMask.py b/nipype/interfaces/freesurfer/tests/test_auto_VolumeMask.py index 1a45d4cab3..cef9ddfedc 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_VolumeMask.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_VolumeMask.py @@ -4,60 +4,21 @@ def test_VolumeMask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - aseg=dict( - extensions=None, - xor=['in_aseg'], - ), + args=dict(argstr="%s",), + aseg=dict(extensions=None, xor=["in_aseg"],), copy_inputs=dict(), - environ=dict( - nohash=True, - usedefault=True, - ), - in_aseg=dict( - argstr='--aseg_name %s', - extensions=None, - xor=['aseg'], - ), - left_ribbonlabel=dict( - argstr='--label_left_ribbon %d', - mandatory=True, - ), - left_whitelabel=dict( - argstr='--label_left_white %d', - mandatory=True, - ), - lh_pial=dict( - extensions=None, - mandatory=True, - ), - lh_white=dict( - extensions=None, - mandatory=True, - ), - rh_pial=dict( - extensions=None, - mandatory=True, - ), - rh_white=dict( - extensions=None, - mandatory=True, - ), - right_ribbonlabel=dict( - argstr='--label_right_ribbon %d', - mandatory=True, - ), - right_whitelabel=dict( - argstr='--label_right_white %d', - mandatory=True, - ), - save_ribbon=dict(argstr='--save_ribbon', ), - subject_id=dict( - argstr='%s', - mandatory=True, - position=-1, - usedefault=True, - ), + environ=dict(nohash=True, usedefault=True,), + in_aseg=dict(argstr="--aseg_name %s", extensions=None, xor=["aseg"],), + left_ribbonlabel=dict(argstr="--label_left_ribbon %d", mandatory=True,), + left_whitelabel=dict(argstr="--label_left_white %d", mandatory=True,), + lh_pial=dict(extensions=None, mandatory=True,), + lh_white=dict(extensions=None, mandatory=True,), + rh_pial=dict(extensions=None, mandatory=True,), + rh_white=dict(extensions=None, mandatory=True,), + right_ribbonlabel=dict(argstr="--label_right_ribbon %d", mandatory=True,), + right_whitelabel=dict(argstr="--label_right_white %d", mandatory=True,), + save_ribbon=dict(argstr="--save_ribbon",), + subject_id=dict(argstr="%s", mandatory=True, position=-1, usedefault=True,), subjects_dir=dict(), ) inputs = VolumeMask.input_spec() @@ -65,11 +26,13 @@ def test_VolumeMask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_VolumeMask_outputs(): output_map = dict( - lh_ribbon=dict(extensions=None, ), - out_ribbon=dict(extensions=None, ), - rh_ribbon=dict(extensions=None, ), + lh_ribbon=dict(extensions=None,), + out_ribbon=dict(extensions=None,), + rh_ribbon=dict(extensions=None,), ) outputs = VolumeMask.output_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_WatershedSkullStrip.py b/nipype/interfaces/freesurfer/tests/test_auto_WatershedSkullStrip.py index d489d79295..649e4e497b 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_WatershedSkullStrip.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_WatershedSkullStrip.py @@ -4,44 +4,26 @@ def test_WatershedSkullStrip_inputs(): input_map = dict( - args=dict(argstr='%s', ), - brain_atlas=dict( - argstr='-brain_atlas %s', - extensions=None, - position=-4, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), + args=dict(argstr="%s",), + brain_atlas=dict(argstr="-brain_atlas %s", extensions=None, position=-4,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), out_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - usedefault=True, + argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, ), subjects_dir=dict(), - t1=dict(argstr='-T1', ), - transform=dict( - argstr='%s', - extensions=None, - position=-3, - ), + t1=dict(argstr="-T1",), + transform=dict(argstr="%s", extensions=None, position=-3,), ) inputs = WatershedSkullStrip.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_WatershedSkullStrip_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = WatershedSkullStrip.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_model.py b/nipype/interfaces/freesurfer/tests/test_model.py index cd8d129690..0fe0065af5 100644 --- a/nipype/interfaces/freesurfer/tests/test_model.py +++ b/nipype/interfaces/freesurfer/tests/test_model.py @@ -17,9 +17,9 @@ def test_concatenate(tmpdir): tmpdir.chdir() - in1 = tmpdir.join('cont1.nii').strpath - in2 = tmpdir.join('cont2.nii').strpath - out = 'bar.nii' + in1 = tmpdir.join("cont1.nii").strpath + in2 = tmpdir.join("cont2.nii").strpath + out = "bar.nii" data1 = np.zeros((3, 3, 3, 1), dtype=np.float32) data2 = np.ones((3, 3, 3, 5), dtype=np.float32) @@ -31,9 +31,8 @@ def test_concatenate(tmpdir): # Test default behavior res = model.Concatenate(in_files=[in1, in2]).run() - assert res.outputs.concatenated_file == tmpdir.join( - 'concat_output.nii.gz').strpath - assert np.allclose(nb.load('concat_output.nii.gz').get_data(), out_data) + assert res.outputs.concatenated_file == tmpdir.join("concat_output.nii.gz").strpath + assert np.allclose(nb.load("concat_output.nii.gz").get_data(), out_data) # Test specified concatenated_file res = model.Concatenate(in_files=[in1, in2], concatenated_file=out).run() @@ -41,17 +40,19 @@ def test_concatenate(tmpdir): assert np.allclose(nb.load(out, mmap=NUMPY_MMAP).get_data(), out_data) # Test in workflow - wf = pe.Workflow('test_concatenate', base_dir=tmpdir.strpath) + wf = pe.Workflow("test_concatenate", base_dir=tmpdir.strpath) concat = pe.Node( - model.Concatenate(in_files=[in1, in2], concatenated_file=out), - name='concat') + model.Concatenate(in_files=[in1, in2], concatenated_file=out), name="concat" + ) wf.add_nodes([concat]) wf.run() assert np.allclose( - nb.load(tmpdir.join('test_concatenate', 'concat', - out).strpath).get_data(), out_data) + nb.load(tmpdir.join("test_concatenate", "concat", out).strpath).get_data(), + out_data, + ) # Test a simple statistic res = model.Concatenate( - in_files=[in1, in2], concatenated_file=out, stats='mean').run() + in_files=[in1, in2], concatenated_file=out, stats="mean" + ).run() assert np.allclose(nb.load(out, mmap=NUMPY_MMAP).get_data(), mean_data) diff --git a/nipype/interfaces/freesurfer/tests/test_preprocess.py b/nipype/interfaces/freesurfer/tests/test_preprocess.py index b0a6db293a..518d60b9a5 100644 --- a/nipype/interfaces/freesurfer/tests/test_preprocess.py +++ b/nipype/interfaces/freesurfer/tests/test_preprocess.py @@ -11,8 +11,7 @@ from nipype import LooseVersion -@pytest.mark.skipif( - freesurfer.no_freesurfer(), reason="freesurfer is not installed") +@pytest.mark.skipif(freesurfer.no_freesurfer(), reason="freesurfer is not installed") def test_robustregister(create_files_in_directory): filelist, outdir = create_files_in_directory @@ -20,7 +19,7 @@ def test_robustregister(create_files_in_directory): cwd = os.getcwd() # make sure command gets called - assert reg.cmd == 'mri_robust_register' + assert reg.cmd == "mri_robust_register" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -30,32 +29,35 @@ def test_robustregister(create_files_in_directory): reg.inputs.source_file = filelist[0] reg.inputs.target_file = filelist[1] reg.inputs.auto_sens = True - assert reg.cmdline == ('mri_robust_register --satit --lta ' - '%s/%s_robustreg.lta --mov %s --dst %s' % - (cwd, filelist[0][:-4], filelist[0], filelist[1])) + assert reg.cmdline == ( + "mri_robust_register --satit --lta " + "%s/%s_robustreg.lta --mov %s --dst %s" + % (cwd, filelist[0][:-4], filelist[0], filelist[1]) + ) # constructor based parameter setting reg2 = freesurfer.RobustRegister( source_file=filelist[0], target_file=filelist[1], outlier_sens=3.0, - out_reg_file='foo.lta', - half_targ=True) + out_reg_file="foo.lta", + half_targ=True, + ) assert reg2.cmdline == ( - 'mri_robust_register --halfdst %s_halfway.nii --lta foo.lta ' - '--sat 3.0000 --mov %s --dst %s' % - (os.path.join(outdir, filelist[1][:-4]), filelist[0], filelist[1])) + "mri_robust_register --halfdst %s_halfway.nii --lta foo.lta " + "--sat 3.0000 --mov %s --dst %s" + % (os.path.join(outdir, filelist[1][:-4]), filelist[0], filelist[1]) + ) -@pytest.mark.skipif( - freesurfer.no_freesurfer(), reason="freesurfer is not installed") +@pytest.mark.skipif(freesurfer.no_freesurfer(), reason="freesurfer is not installed") def test_fitmsparams(create_files_in_directory): filelist, outdir = create_files_in_directory fit = freesurfer.FitMSParams() # make sure command gets called - assert fit.cmd == 'mri_ms_fitparms' + assert fit.cmd == "mri_ms_fitparms" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -64,29 +66,30 @@ def test_fitmsparams(create_files_in_directory): # .inputs based parameters setting fit.inputs.in_files = filelist fit.inputs.out_dir = outdir - assert fit.cmdline == 'mri_ms_fitparms %s %s %s' % (filelist[0], - filelist[1], outdir) + assert fit.cmdline == "mri_ms_fitparms %s %s %s" % ( + filelist[0], + filelist[1], + outdir, + ) # constructor based parameter setting fit2 = freesurfer.FitMSParams( - in_files=filelist, - te_list=[1.5, 3.5], - flip_list=[20, 30], - out_dir=outdir) + in_files=filelist, te_list=[1.5, 3.5], flip_list=[20, 30], out_dir=outdir + ) assert fit2.cmdline == ( - 'mri_ms_fitparms -te %.3f -fa %.1f %s -te %.3f -fa %.1f %s %s' % - (1.500, 20.0, filelist[0], 3.500, 30.0, filelist[1], outdir)) + "mri_ms_fitparms -te %.3f -fa %.1f %s -te %.3f -fa %.1f %s %s" + % (1.500, 20.0, filelist[0], 3.500, 30.0, filelist[1], outdir) + ) -@pytest.mark.skipif( - freesurfer.no_freesurfer(), reason="freesurfer is not installed") +@pytest.mark.skipif(freesurfer.no_freesurfer(), reason="freesurfer is not installed") def test_synthesizeflash(create_files_in_directory): filelist, outdir = create_files_in_directory syn = freesurfer.SynthesizeFLASH() # make sure command gets called - assert syn.cmd == 'mri_synthesize' + assert syn.cmd == "mri_synthesize" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -99,20 +102,22 @@ def test_synthesizeflash(create_files_in_directory): syn.inputs.te = 4.5 syn.inputs.tr = 20 - assert syn.cmdline == ('mri_synthesize 20.00 30.00 4.500 %s %s %s' % - (filelist[0], filelist[1], - os.path.join(outdir, 'synth-flash_30.mgz'))) + assert syn.cmdline == ( + "mri_synthesize 20.00 30.00 4.500 %s %s %s" + % (filelist[0], filelist[1], os.path.join(outdir, "synth-flash_30.mgz")) + ) # constructor based parameters setting syn2 = freesurfer.SynthesizeFLASH( - t1_image=filelist[0], pd_image=filelist[1], flip_angle=20, te=5, tr=25) - assert syn2.cmdline == ('mri_synthesize 25.00 20.00 5.000 %s %s %s' % - (filelist[0], filelist[1], - os.path.join(outdir, 'synth-flash_20.mgz'))) + t1_image=filelist[0], pd_image=filelist[1], flip_angle=20, te=5, tr=25 + ) + assert syn2.cmdline == ( + "mri_synthesize 25.00 20.00 5.000 %s %s %s" + % (filelist[0], filelist[1], os.path.join(outdir, "synth-flash_20.mgz")) + ) -@pytest.mark.skipif( - freesurfer.no_freesurfer(), reason="freesurfer is not installed") +@pytest.mark.skipif(freesurfer.no_freesurfer(), reason="freesurfer is not installed") def test_mandatory_outvol(create_files_in_directory): filelist, outdir = create_files_in_directory mni = freesurfer.MNIBiasCorrection() @@ -127,28 +132,30 @@ def test_mandatory_outvol(create_files_in_directory): # test with minimal args mni.inputs.in_file = filelist[0] base, ext = os.path.splitext(os.path.basename(filelist[0])) - if ext == '.gz': + if ext == ".gz": base, ext2 = os.path.splitext(base) ext = ext2 + ext - assert mni.cmdline == ('mri_nu_correct.mni --i %s --n 4 --o %s_output%s' % - (filelist[0], base, ext)) + assert mni.cmdline == ( + "mri_nu_correct.mni --i %s --n 4 --o %s_output%s" % (filelist[0], base, ext) + ) # test with custom outfile - mni.inputs.out_file = 'new_corrected_file.mgz' + mni.inputs.out_file = "new_corrected_file.mgz" assert mni.cmdline == ( - 'mri_nu_correct.mni --i %s --n 4 --o new_corrected_file.mgz' % (filelist[0])) + "mri_nu_correct.mni --i %s --n 4 --o new_corrected_file.mgz" % (filelist[0]) + ) # constructor based tests mni2 = freesurfer.MNIBiasCorrection( - in_file=filelist[0], out_file='bias_corrected_output', iterations=2) + in_file=filelist[0], out_file="bias_corrected_output", iterations=2 + ) assert mni2.cmdline == ( - 'mri_nu_correct.mni --i %s --n 2 --o bias_corrected_output' % - filelist[0]) + "mri_nu_correct.mni --i %s --n 2 --o bias_corrected_output" % filelist[0] + ) -@pytest.mark.skipif( - freesurfer.no_freesurfer(), reason="freesurfer is not installed") +@pytest.mark.skipif(freesurfer.no_freesurfer(), reason="freesurfer is not installed") def test_bbregister(create_files_in_directory): filelist, outdir = create_files_in_directory bbr = freesurfer.BBRegister() @@ -160,9 +167,9 @@ def test_bbregister(create_files_in_directory): with pytest.raises(ValueError): bbr.cmdline - bbr.inputs.subject_id = 'fsaverage' + bbr.inputs.subject_id = "fsaverage" bbr.inputs.source_file = filelist[0] - bbr.inputs.contrast_type = 't2' + bbr.inputs.contrast_type = "t2" # Check that 'init' is mandatory in FS < 6, but not in 6+ if Info.looseversion() < LooseVersion("6.0.0"): @@ -171,16 +178,17 @@ def test_bbregister(create_files_in_directory): else: bbr.cmdline - bbr.inputs.init = 'fsl' + bbr.inputs.init = "fsl" base, ext = os.path.splitext(os.path.basename(filelist[0])) - if ext == '.gz': + if ext == ".gz": base, _ = os.path.splitext(base) - assert bbr.cmdline == ('bbregister --t2 --init-fsl ' - '--reg {base}_bbreg_fsaverage.dat ' - '--mov {full} --s fsaverage'.format( - full=filelist[0], base=base)) + assert bbr.cmdline == ( + "bbregister --t2 --init-fsl " + "--reg {base}_bbreg_fsaverage.dat " + "--mov {full} --s fsaverage".format(full=filelist[0], base=base) + ) def test_FSVersion(): diff --git a/nipype/interfaces/freesurfer/tests/test_utils.py b/nipype/interfaces/freesurfer/tests/test_utils.py index 203b2483f8..52348bb5e0 100644 --- a/nipype/interfaces/freesurfer/tests/test_utils.py +++ b/nipype/interfaces/freesurfer/tests/test_utils.py @@ -4,8 +4,10 @@ import os import os.path as op import pytest -from nipype.testing.fixtures import (create_files_in_directory_plus_dummy_file, - create_surf_file_in_directory) +from nipype.testing.fixtures import ( + create_files_in_directory_plus_dummy_file, + create_surf_file_in_directory, +) from nipype.pipeline import engine as pe from nipype.interfaces import freesurfer as fs @@ -18,7 +20,7 @@ def test_sample2surf(create_files_in_directory_plus_dummy_file): s2s = fs.SampleToSurface() # Test underlying command - assert s2s.cmd == 'mri_vol2surf' + assert s2s.cmd == "mri_vol2surf" # Test mandatory args exception with pytest.raises(ValueError): @@ -32,29 +34,30 @@ def test_sample2surf(create_files_in_directory_plus_dummy_file): s2s.inputs.reference_file = files[1] s2s.inputs.hemi = "lh" s2s.inputs.reg_file = files[2] - s2s.inputs.sampling_range = .5 + s2s.inputs.sampling_range = 0.5 s2s.inputs.sampling_units = "frac" s2s.inputs.sampling_method = "point" # Test a basic command line assert s2s.cmdline == ( "mri_vol2surf " - "--hemi lh --o %s --ref %s --reg reg.dat --projfrac 0.500 --mov %s" % - (os.path.join(cwd, "lh.a.mgz"), files[1], files[0])) + "--hemi lh --o %s --ref %s --reg reg.dat --projfrac 0.500 --mov %s" + % (os.path.join(cwd, "lh.a.mgz"), files[1], files[0]) + ) # Test identity s2sish = fs.SampleToSurface( - source_file=files[1], reference_file=files[0], hemi="rh") + source_file=files[1], reference_file=files[0], hemi="rh" + ) assert s2s != s2sish # Test hits file name creation s2s.inputs.hits_file = True - assert s2s._get_outfilename("hits_file") == os.path.join( - cwd, "lh.a_hits.mgz") + assert s2s._get_outfilename("hits_file") == os.path.join(cwd, "lh.a_hits.mgz") # Test that a 2-tuple range raises an error def set_illegal_range(): - s2s.inputs.sampling_range = (.2, .5) + s2s.inputs.sampling_range = (0.2, 0.5) with pytest.raises(TraitError): set_illegal_range() @@ -83,9 +86,10 @@ def test_surfsmooth(create_surf_file_in_directory): smooth.inputs.hemi = "lh" # Test the command line - assert smooth.cmdline == \ - ("mri_surf2surf --cortex --fwhm 5.0000 --hemi lh --sval %s --tval %s/lh.a_smooth%d.nii --s fsaverage" % - (surf, cwd, fwhm)) + assert smooth.cmdline == ( + "mri_surf2surf --cortex --fwhm 5.0000 --hemi lh --sval %s --tval %s/lh.a_smooth%d.nii --s fsaverage" + % (surf, cwd, fwhm) + ) # Test identity shmooth = fs.SurfaceSmooth( @@ -93,7 +97,8 @@ def test_surfsmooth(create_surf_file_in_directory): fwhm=6, in_file=surf, hemi="lh", - out_file="lh.a_smooth.nii") + out_file="lh.a_smooth.nii", + ) assert smooth != shmooth @@ -119,16 +124,18 @@ def test_surfxfm(create_surf_file_in_directory): xfm.inputs.hemi = "lh" # Test the command line - assert xfm.cmdline == \ - ("mri_surf2surf --hemi lh --tval %s/lh.a.fsaverage.nii --sval %s --srcsubject my_subject --trgsubject fsaverage" % - (cwd, surf)) + assert xfm.cmdline == ( + "mri_surf2surf --hemi lh --tval %s/lh.a.fsaverage.nii --sval %s --srcsubject my_subject --trgsubject fsaverage" + % (cwd, surf) + ) # Test identity xfmish = fs.SurfaceTransform( source_subject="fsaverage", target_subject="my_subject", source_file=surf, - hemi="lh") + hemi="lh", + ) assert xfm != xfmish @@ -156,8 +163,7 @@ def test_surfshots(create_files_in_directory_plus_dummy_file): assert fotos.cmdline == "tksurfer fsaverage lh pial -tcl snapshots.tcl" # Test identity - schmotos = fs.SurfaceSnapshots( - subject_id="mysubject", hemi="rh", surface="white") + schmotos = fs.SurfaceSnapshots(subject_id="mysubject", hemi="rh", surface="white") assert fotos != schmotos # Test that the tcl script gets written @@ -183,25 +189,25 @@ def test_surfshots(create_files_in_directory_plus_dummy_file): @pytest.mark.skipif(fs.no_freesurfer(), reason="freesurfer is not installed") def test_mrisexpand(tmpdir): fssrc = FreeSurferSource( - subjects_dir=fs.Info.subjectsdir(), subject_id='fsaverage', hemi='lh') + subjects_dir=fs.Info.subjectsdir(), subject_id="fsaverage", hemi="lh" + ) fsavginfo = fssrc.run().outputs.get() # dt=60 to ensure very short runtime expand_if = fs.MRIsExpand( - in_file=fsavginfo['smoothwm'], out_name='expandtmp', distance=1, dt=60) + in_file=fsavginfo["smoothwm"], out_name="expandtmp", distance=1, dt=60 + ) expand_nd = pe.Node( fs.MRIsExpand( - in_file=fsavginfo['smoothwm'], - out_name='expandtmp', - distance=1, - dt=60), - name='expand_node') + in_file=fsavginfo["smoothwm"], out_name="expandtmp", distance=1, dt=60 + ), + name="expand_node", + ) # Interfaces should have same command line at instantiation - orig_cmdline = 'mris_expand -T 60 {} 1 expandtmp'.format( - fsavginfo['smoothwm']) + orig_cmdline = "mris_expand -T 60 {} 1 expandtmp".format(fsavginfo["smoothwm"]) assert expand_if.cmdline == orig_cmdline assert expand_nd.interface.cmdline == orig_cmdline @@ -209,16 +215,18 @@ def test_mrisexpand(tmpdir): nd_res = expand_nd.run() # Commandlines differ - node_cmdline = 'mris_expand -T 60 -pial {cwd}/lh.pial {cwd}/lh.smoothwm ' \ - '1 expandtmp'.format(cwd=nd_res.runtime.cwd) + node_cmdline = ( + "mris_expand -T 60 -pial {cwd}/lh.pial {cwd}/lh.smoothwm " + "1 expandtmp".format(cwd=nd_res.runtime.cwd) + ) assert nd_res.runtime.cmdline == node_cmdline # Check output - if_out_file = expand_if._list_outputs()['out_file'] - nd_out_file = nd_res.outputs.get()['out_file'] + if_out_file = expand_if._list_outputs()["out_file"] + nd_out_file = nd_res.outputs.get()["out_file"] # Same filename assert op.basename(if_out_file) == op.basename(nd_out_file) # Interface places output in source directory - assert op.dirname(if_out_file) == op.dirname(fsavginfo['smoothwm']) + assert op.dirname(if_out_file) == op.dirname(fsavginfo["smoothwm"]) # Node places output in working directory assert op.dirname(nd_out_file) == nd_res.runtime.cwd diff --git a/nipype/interfaces/freesurfer/utils.py b/nipype/interfaces/freesurfer/utils.py index 106938d54d..ab47dacbd4 100644 --- a/nipype/interfaces/freesurfer/utils.py +++ b/nipype/interfaces/freesurfer/utils.py @@ -9,37 +9,69 @@ from ... import logging from ...utils.filemanip import fname_presuffix, split_filename -from ..base import (TraitedSpec, Directory, File, traits, OutputMultiPath, isdefined, - CommandLine, CommandLineInputSpec) -from .base import (FSCommand, FSTraitedSpec, FSSurfaceCommand, FSScriptCommand, - FSScriptOutputSpec, FSTraitedSpecOpenMP, FSCommandOpenMP) -__docformat__ = 'restructuredtext' +from ..base import ( + TraitedSpec, + Directory, + File, + traits, + OutputMultiPath, + isdefined, + CommandLine, + CommandLineInputSpec, +) +from .base import ( + FSCommand, + FSTraitedSpec, + FSSurfaceCommand, + FSScriptCommand, + FSScriptOutputSpec, + FSTraitedSpecOpenMP, + FSCommandOpenMP, +) + +__docformat__ = "restructuredtext" filemap = dict( - cor='cor', - mgh='mgh', - mgz='mgz', - minc='mnc', - afni='brik', - brik='brik', - bshort='bshort', - spm='img', - analyze='img', - analyze4d='img', - bfloat='bfloat', - nifti1='img', - nii='nii', - niigz='nii.gz', - gii='gii') + cor="cor", + mgh="mgh", + mgz="mgz", + minc="mnc", + afni="brik", + brik="brik", + bshort="bshort", + spm="img", + analyze="img", + analyze4d="img", + bfloat="bfloat", + nifti1="img", + nii="nii", + niigz="nii.gz", + gii="gii", +) filetypes = [ - 'cor', 'mgh', 'mgz', 'minc', 'analyze', 'analyze4d', 'spm', 'afni', 'brik', - 'bshort', 'bfloat', 'sdt', 'outline', 'otl', 'gdf', 'nifti1', 'nii', - 'niigz' + "cor", + "mgh", + "mgz", + "minc", + "analyze", + "analyze4d", + "spm", + "afni", + "brik", + "bshort", + "bfloat", + "sdt", + "outline", + "otl", + "gdf", + "nifti1", + "nii", + "niigz", ] -implicit_filetypes = ['gii'] +implicit_filetypes = ["gii"] -logger = logging.getLogger('nipype.interface') +logger = logging.getLogger("nipype.interface") def copy2subjdir(cls, in_file, folder=None, basename=None, subject_id=None): @@ -57,7 +89,7 @@ def copy2subjdir(cls, in_file, folder=None, basename=None, subject_id=None): if isdefined(cls.inputs.subject_id): subject_id = cls.inputs.subject_id else: - subject_id = 'subject_id' # default + subject_id = "subject_id" # default # check for basename if basename is None: basename = os.path.basename(in_file) @@ -89,20 +121,18 @@ class SampleToSurfaceInputSpec(FSTraitedSpec): exists=True, mandatory=True, argstr="--mov %s", - desc="volume to sample values from") + desc="volume to sample values from", + ) reference_file = File( - exists=True, - argstr="--ref %s", - desc="reference volume (default is orig.mgz)") + exists=True, argstr="--ref %s", desc="reference volume (default is orig.mgz)" + ) hemi = traits.Enum( - "lh", - "rh", - mandatory=True, - argstr="--hemi %s", - desc="target hemisphere") + "lh", "rh", mandatory=True, argstr="--hemi %s", desc="target hemisphere" + ) surface = traits.String( - argstr="--surf %s", desc="target surface (default is white)") + argstr="--surf %s", desc="target surface (default is white)" + ) reg_xors = ["reg_file", "reg_header", "mni152reg"] reg_file = File( @@ -110,35 +140,41 @@ class SampleToSurfaceInputSpec(FSTraitedSpec): argstr="--reg %s", mandatory=True, xor=reg_xors, - desc="source-to-reference registration file") + desc="source-to-reference registration file", + ) reg_header = traits.Bool( argstr="--regheader %s", requires=["subject_id"], mandatory=True, xor=reg_xors, - desc="register based on header geometry") + desc="register based on header geometry", + ) mni152reg = traits.Bool( argstr="--mni152reg", mandatory=True, xor=reg_xors, - desc="source volume is in MNI152 space") + desc="source volume is in MNI152 space", + ) apply_rot = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="--rot %.3f %.3f %.3f", - desc="rotation angles (in degrees) to apply to reg matrix") + desc="rotation angles (in degrees) to apply to reg matrix", + ) apply_trans = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="--trans %.3f %.3f %.3f", - desc="translation (in mm) to apply to reg matrix") + desc="translation (in mm) to apply to reg matrix", + ) override_reg_subj = traits.Bool( argstr="--srcsubject %s", requires=["subject_id"], - desc="override the subject in the reg file header") + desc="override the subject in the reg file header", + ) sampling_method = traits.Enum( "point", @@ -148,106 +184,117 @@ class SampleToSurfaceInputSpec(FSTraitedSpec): argstr="%s", xor=["projection_stem"], requires=["sampling_range", "sampling_units"], - desc="how to sample -- at a point or at the max or average over a range" + desc="how to sample -- at a point or at the max or average over a range", ) sampling_range = traits.Either( traits.Float, traits.Tuple(traits.Float, traits.Float, traits.Float), - desc="sampling range - a point or a tuple of (min, max, step)") + desc="sampling range - a point or a tuple of (min, max, step)", + ) sampling_units = traits.Enum( - "mm", "frac", desc="sampling range type -- either 'mm' or 'frac'") + "mm", "frac", desc="sampling range type -- either 'mm' or 'frac'" + ) projection_stem = traits.String( mandatory=True, xor=["sampling_method"], - desc="stem for precomputed linear estimates and volume fractions") + desc="stem for precomputed linear estimates and volume fractions", + ) smooth_vol = traits.Float( - argstr="--fwhm %.3f", desc="smooth input volume (mm fwhm)") + argstr="--fwhm %.3f", desc="smooth input volume (mm fwhm)" + ) smooth_surf = traits.Float( - argstr="--surf-fwhm %.3f", desc="smooth output surface (mm fwhm)") + argstr="--surf-fwhm %.3f", desc="smooth output surface (mm fwhm)" + ) interp_method = traits.Enum( - "nearest", - "trilinear", - argstr="--interp %s", - desc="interpolation method") + "nearest", "trilinear", argstr="--interp %s", desc="interpolation method" + ) cortex_mask = traits.Bool( argstr="--cortex", xor=["mask_label"], - desc="mask the target surface with hemi.cortex.label") + desc="mask the target surface with hemi.cortex.label", + ) mask_label = File( exists=True, argstr="--mask %s", xor=["cortex_mask"], - desc="label file to mask output with") + desc="label file to mask output with", + ) float2int_method = traits.Enum( "round", "tkregister", argstr="--float2int %s", - desc="method to convert reg matrix values (default is round)") + desc="method to convert reg matrix values (default is round)", + ) fix_tk_reg = traits.Bool( - argstr="--fixtkreg", desc="make reg matrix round-compatible") + argstr="--fixtkreg", desc="make reg matrix round-compatible" + ) subject_id = traits.String(desc="subject id") target_subject = traits.String( argstr="--trgsubject %s", - desc="sample to surface of different subject than source") + desc="sample to surface of different subject than source", + ) surf_reg = traits.Either( traits.Bool, traits.Str(), argstr="--surfreg %s", requires=["target_subject"], - desc="use surface registration to target subject") + desc="use surface registration to target subject", + ) ico_order = traits.Int( argstr="--icoorder %d", requires=["target_subject"], - desc="icosahedron order when target_subject is 'ico'") + desc="icosahedron order when target_subject is 'ico'", + ) reshape = traits.Bool( argstr="--reshape", xor=["no_reshape"], - desc="reshape surface vector to fit in non-mgh format") + desc="reshape surface vector to fit in non-mgh format", + ) no_reshape = traits.Bool( argstr="--noreshape", xor=["reshape"], - desc="do not reshape surface vector (default)") + desc="do not reshape surface vector (default)", + ) reshape_slices = traits.Int( - argstr="--rf %d", desc="number of 'slices' for reshaping") + argstr="--rf %d", desc="number of 'slices' for reshaping" + ) scale_input = traits.Float( - argstr="--scale %.3f", desc="multiple all intensities by scale factor") - frame = traits.Int( - argstr="--frame %d", desc="save only one frame (0-based)") + argstr="--scale %.3f", desc="multiple all intensities by scale factor" + ) + frame = traits.Int(argstr="--frame %d", desc="save only one frame (0-based)") - out_file = File( - argstr="--o %s", genfile=True, desc="surface file to write") + out_file = File(argstr="--o %s", genfile=True, desc="surface file to write") out_type = traits.Enum( - filetypes + implicit_filetypes, - argstr="--out_type %s", - desc="output file type") + filetypes + implicit_filetypes, argstr="--out_type %s", desc="output file type" + ) hits_file = traits.Either( traits.Bool, File(exists=True), argstr="--srchit %s", - desc="save image with number of hits at each voxel") - hits_type = traits.Enum( - filetypes, argstr="--srchit_type", desc="hits file type") + desc="save image with number of hits at each voxel", + ) + hits_type = traits.Enum(filetypes, argstr="--srchit_type", desc="hits file type") vox_file = traits.Either( traits.Bool, File, argstr="--nvox %s", - desc="text file with the number of voxels intersecting the surface") + desc="text file with the number of voxels intersecting the surface", + ) class SampleToSurfaceOutputSpec(TraitedSpec): out_file = File(exists=True, desc="surface file") - hits_file = File( - exists=True, desc="image with number of hits at each voxel") + hits_file = File(exists=True, desc="image with number of hits at each voxel") vox_file = File( - exists=True, - desc="text file with the number of voxels intersecting the surface") + exists=True, desc="text file with the number of voxels intersecting the surface" + ) class SampleToSurface(FSCommand): @@ -280,6 +327,7 @@ class SampleToSurface(FSCommand): >>> res = sampler.run() # doctest: +SKIP """ + _cmd = "mri_vol2surf" input_spec = SampleToSurfaceInputSpec output_spec = SampleToSurfaceOutputSpec @@ -310,16 +358,22 @@ def _format_arg(self, name, spec, value): if ext in filemap.values(): raise ValueError( "Cannot create {} file with extension " - "{}".format(value, ext)) + "{}".format(value, ext) + ) else: - logger.warning('Creating %s file with extension %s: %s%s', - value, ext, base, ext) + logger.warning( + "Creating %s file with extension %s: %s%s", + value, + ext, + base, + ext, + ) if value in implicit_filetypes: return "" - if name == 'surf_reg': + if name == "surf_reg": if value is True: - return spec.argstr % 'sphere.reg' + return spec.argstr % "sphere.reg" return super(SampleToSurface, self)._format_arg(name, spec, value) @@ -328,19 +382,20 @@ def _get_outfilename(self, opt="out_file"): if not isdefined(outfile) or isinstance(outfile, bool): if isdefined(self.inputs.out_type): if opt == "hits_file": - suffix = '_hits.' + filemap[self.inputs.out_type] + suffix = "_hits." + filemap[self.inputs.out_type] else: - suffix = '.' + filemap[self.inputs.out_type] + suffix = "." + filemap[self.inputs.out_type] elif opt == "hits_file": suffix = "_hits.mgz" else: - suffix = '.mgz' + suffix = ".mgz" outfile = fname_presuffix( self.inputs.source_file, newpath=os.getcwd(), prefix=self.inputs.hemi + ".", suffix=suffix, - use_ext=False) + use_ext=False, + ) return outfile def _list_outputs(self): @@ -359,7 +414,8 @@ def _list_outputs(self): newpath=os.getcwd(), prefix=self.inputs.hemi + ".", suffix="_vox.txt", - use_ext=False) + use_ext=False, + ) outputs["vox_file"] = voxfile return outputs @@ -371,34 +427,31 @@ def _gen_filename(self, name): class SurfaceSmoothInputSpec(FSTraitedSpec): - in_file = File( - mandatory=True, argstr="--sval %s", desc="source surface file") + in_file = File(mandatory=True, argstr="--sval %s", desc="source surface file") subject_id = traits.String( - mandatory=True, argstr="--s %s", desc="subject id of surface file") + mandatory=True, argstr="--s %s", desc="subject id of surface file" + ) hemi = traits.Enum( - "lh", - "rh", - argstr="--hemi %s", - mandatory=True, - desc="hemisphere to operate on") + "lh", "rh", argstr="--hemi %s", mandatory=True, desc="hemisphere to operate on" + ) fwhm = traits.Float( argstr="--fwhm %.4f", xor=["smooth_iters"], - desc="effective FWHM of the smoothing process") + desc="effective FWHM of the smoothing process", + ) smooth_iters = traits.Int( - argstr="--smooth %d", - xor=["fwhm"], - desc="iterations of the smoothing process") + argstr="--smooth %d", xor=["fwhm"], desc="iterations of the smoothing process" + ) cortex = traits.Bool( True, argstr="--cortex", usedefault=True, - desc="only smooth within $hemi.cortex.label") + desc="only smooth within $hemi.cortex.label", + ) reshape = traits.Bool( - argstr="--reshape", - desc="reshape surface vector to fit in non-mgh format") - out_file = File( - argstr="--tval %s", genfile=True, desc="surface file to write") + argstr="--reshape", desc="reshape surface vector to fit in non-mgh format" + ) + out_file = File(argstr="--tval %s", genfile=True, desc="surface file to write") class SurfaceSmoothOutputSpec(TraitedSpec): @@ -434,6 +487,7 @@ class SurfaceSmooth(FSCommand): >>> smoother.run() # doctest: +SKIP """ + _cmd = "mri_surf2surf" input_spec = SurfaceSmoothInputSpec output_spec = SurfaceSmoothOutputSpec @@ -448,7 +502,8 @@ def _list_outputs(self): else: kernel = self.inputs.smooth_iters outputs["out_file"] = fname_presuffix( - in_file, suffix="_smooth%d" % kernel, newpath=os.getcwd()) + in_file, suffix="_smooth%d" % kernel, newpath=os.getcwd() + ) return outputs def _gen_filename(self, name): @@ -462,28 +517,25 @@ class SurfaceTransformInputSpec(FSTraitedSpec): exists=True, mandatory=True, argstr="--sval %s", - xor=['source_annot_file'], - desc="surface file with source values") + xor=["source_annot_file"], + desc="surface file with source values", + ) source_annot_file = File( exists=True, mandatory=True, argstr="--sval-annot %s", - xor=['source_file'], - desc="surface annotation file") + xor=["source_file"], + desc="surface annotation file", + ) source_subject = traits.String( - mandatory=True, - argstr="--srcsubject %s", - desc="subject id for source surface") + mandatory=True, argstr="--srcsubject %s", desc="subject id for source surface" + ) hemi = traits.Enum( - "lh", - "rh", - argstr="--hemi %s", - mandatory=True, - desc="hemisphere to transform") + "lh", "rh", argstr="--hemi %s", mandatory=True, desc="hemisphere to transform" + ) target_subject = traits.String( - mandatory=True, - argstr="--trgsubject %s", - desc="subject id of target surface") + mandatory=True, argstr="--trgsubject %s", desc="subject id of target surface" + ) target_ico_order = traits.Enum( 1, 2, @@ -493,24 +545,24 @@ class SurfaceTransformInputSpec(FSTraitedSpec): 6, 7, argstr="--trgicoorder %d", - desc=("order of the icosahedron if " - "target_subject is 'ico'")) + desc=("order of the icosahedron if " "target_subject is 'ico'"), + ) source_type = traits.Enum( filetypes, - argstr='--sfmt %s', - requires=['source_file'], - desc="source file format") + argstr="--sfmt %s", + requires=["source_file"], + desc="source file format", + ) target_type = traits.Enum( - filetypes + implicit_filetypes, - argstr='--tfmt %s', - desc="output format") + filetypes + implicit_filetypes, argstr="--tfmt %s", desc="output format" + ) reshape = traits.Bool( - argstr="--reshape", - desc="reshape output surface to conform with Nifti") + argstr="--reshape", desc="reshape output surface to conform with Nifti" + ) reshape_factor = traits.Int( - argstr="--reshape-factor", desc="number of slices in reshaped image") - out_file = File( - argstr="--tval %s", genfile=True, desc="surface file to write") + argstr="--reshape-factor", desc="number of slices in reshaped image" + ) + out_file = File(argstr="--tval %s", genfile=True, desc="surface file to write") class SurfaceTransformOutputSpec(TraitedSpec): @@ -536,6 +588,7 @@ class SurfaceTransform(FSCommand): >>> sxfm.run() # doctest: +SKIP """ + _cmd = "mri_surf2surf" input_spec = SurfaceTransformInputSpec output_spec = SurfaceTransformOutputSpec @@ -543,15 +596,21 @@ class SurfaceTransform(FSCommand): def _format_arg(self, name, spec, value): if name == "target_type": if isdefined(self.inputs.out_file): - _, base, ext = split_filename(self._list_outputs()['out_file']) + _, base, ext = split_filename(self._list_outputs()["out_file"]) if ext != filemap[value]: if ext in filemap.values(): raise ValueError( "Cannot create {} file with extension " - "{}".format(value, ext)) + "{}".format(value, ext) + ) else: - logger.warning('Creating %s file with extension %s: %s%s', - value, ext, base, ext) + logger.warning( + "Creating %s file with extension %s: %s%s", + value, + ext, + base, + ext, + ) if value in implicit_filetypes: return "" return super(SurfaceTransform, self)._format_arg(name, spec, value) @@ -568,10 +627,24 @@ def _list_outputs(self): # Some recon-all files don't have a proper extension (e.g. "lh.thickness") # so we have to account for that here bad_extensions = [ - ".%s" % e for e in [ - "area", "mid", "pial", "avg_curv", "curv", "inflated", - "jacobian_white", "orig", "nofix", "smoothwm", "crv", - "sphere", "sulc", "thickness", "volume", "white" + ".%s" % e + for e in [ + "area", + "mid", + "pial", + "avg_curv", + "curv", + "inflated", + "jacobian_white", + "orig", + "nofix", + "smoothwm", + "crv", + "sphere", + "sulc", + "thickness", + "volume", + "white", ] ] use_ext = True @@ -586,7 +659,8 @@ def _list_outputs(self): source, suffix=".%s%s" % (self.inputs.target_subject, ext), newpath=os.getcwd(), - use_ext=use_ext) + use_ext=use_ext, + ) else: outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs @@ -600,55 +674,59 @@ def _gen_filename(self, name): class Surface2VolTransformInputSpec(FSTraitedSpec): source_file = File( exists=True, - argstr='--surfval %s', + argstr="--surfval %s", copyfile=False, mandatory=True, - xor=['mkmask'], - desc='This is the source of the surface values') - hemi = traits.Str( - argstr='--hemi %s', mandatory=True, desc='hemisphere of data') + xor=["mkmask"], + desc="This is the source of the surface values", + ) + hemi = traits.Str(argstr="--hemi %s", mandatory=True, desc="hemisphere of data") transformed_file = File( name_template="%s_asVol.nii", - desc='Output volume', - argstr='--outvol %s', - name_source=['source_file'], - hash_files=False) + desc="Output volume", + argstr="--outvol %s", + name_source=["source_file"], + hash_files=False, + ) reg_file = File( exists=True, - argstr='--volreg %s', + argstr="--volreg %s", mandatory=True, - desc='tkRAS-to-tkRAS matrix (tkregister2 format)', - xor=['subject_id']) + desc="tkRAS-to-tkRAS matrix (tkregister2 format)", + xor=["subject_id"], + ) template_file = File( - exists=True, argstr='--template %s', desc='Output template volume') + exists=True, argstr="--template %s", desc="Output template volume" + ) mkmask = traits.Bool( - desc='make a mask instead of loading surface values', - argstr='--mkmask', - xor=['source_file']) + desc="make a mask instead of loading surface values", + argstr="--mkmask", + xor=["source_file"], + ) vertexvol_file = File( name_template="%s_asVol_vertex.nii", - desc=('Path name of the vertex output volume, which ' - 'is the same as output volume except that the ' - 'value of each voxel is the vertex-id that is ' - 'mapped to that voxel.'), - argstr='--vtxvol %s', - name_source=['source_file'], - hash_files=False) - surf_name = traits.Str( - argstr='--surf %s', desc='surfname (default is white)') - projfrac = traits.Float(argstr='--projfrac %s', desc='thickness fraction') + desc=( + "Path name of the vertex output volume, which " + "is the same as output volume except that the " + "value of each voxel is the vertex-id that is " + "mapped to that voxel." + ), + argstr="--vtxvol %s", + name_source=["source_file"], + hash_files=False, + ) + surf_name = traits.Str(argstr="--surf %s", desc="surfname (default is white)") + projfrac = traits.Float(argstr="--projfrac %s", desc="thickness fraction") subjects_dir = traits.Str( - argstr='--sd %s', - desc=('freesurfer subjects directory defaults to ' - '$SUBJECTS_DIR')) - subject_id = traits.Str( - argstr='--identity %s', desc='subject id', xor=['reg_file']) + argstr="--sd %s", + desc=("freesurfer subjects directory defaults to " "$SUBJECTS_DIR"), + ) + subject_id = traits.Str(argstr="--identity %s", desc="subject id", xor=["reg_file"]) class Surface2VolTransformOutputSpec(TraitedSpec): - transformed_file = File( - exists=True, desc='Path to output file if used normally') - vertexvol_file = File(desc='vertex map volume path id. Optional') + transformed_file = File(exists=True, desc="Path to output file if used normally") + vertexvol_file = File(desc="vertex map volume path id. Optional") class Surface2VolTransform(FSCommand): @@ -670,7 +748,7 @@ class Surface2VolTransform(FSCommand): """ - _cmd = 'mri_surf2vol' + _cmd = "mri_surf2vol" input_spec = Surface2VolTransformInputSpec output_spec = Surface2VolTransformOutputSpec @@ -682,44 +760,47 @@ class ApplyMaskInputSpec(FSTraitedSpec): mandatory=True, position=-3, argstr="%s", - desc="input image (will be masked)") + desc="input image (will be masked)", + ) mask_file = File( exists=True, mandatory=True, position=-2, argstr="%s", - desc="image defining mask space") + desc="image defining mask space", + ) out_file = File( - name_source=['in_file'], - name_template='%s_masked', + name_source=["in_file"], + name_template="%s_masked", hash_files=True, keep_extension=True, position=-1, argstr="%s", - desc="final image to write") + desc="final image to write", + ) xfm_file = File( exists=True, argstr="-xform %s", - desc="LTA-format transformation matrix to align mask with input") + desc="LTA-format transformation matrix to align mask with input", + ) invert_xfm = traits.Bool(argstr="-invert", desc="invert transformation") xfm_source = File( - exists=True, - argstr="-lta_src %s", - desc="image defining transform source space") + exists=True, argstr="-lta_src %s", desc="image defining transform source space" + ) xfm_target = File( - exists=True, - argstr="-lta_dst %s", - desc="image defining transform target space") + exists=True, argstr="-lta_dst %s", desc="image defining transform target space" + ) use_abs = traits.Bool( - argstr="-abs", desc="take absolute value of mask before applying") - mask_thresh = traits.Float( - argstr="-T %.4f", desc="threshold mask before applying") + argstr="-abs", desc="take absolute value of mask before applying" + ) + mask_thresh = traits.Float(argstr="-T %.4f", desc="threshold mask before applying") keep_mask_deletion_edits = traits.Bool( argstr="-keep_mask_deletion_edits", - desc="transfer voxel-deletion edits (voxels=1) from mask to out vol") + desc="transfer voxel-deletion edits (voxels=1) from mask to out vol", + ) transfer = traits.Int( - argstr="-transfer %d", - desc="transfer only voxel value # from mask to out") + argstr="-transfer %d", desc="transfer only voxel value # from mask to out" + ) class ApplyMaskOutputSpec(TraitedSpec): @@ -735,6 +816,7 @@ class ApplyMask(FSCommand): space with an LTA matrix. """ + _cmd = "mri_mask" input_spec = ApplyMaskInputSpec output_spec = ApplyMaskOutputSpec @@ -743,120 +825,142 @@ class ApplyMask(FSCommand): class SurfaceSnapshotsInputSpec(FSTraitedSpec): subject_id = traits.String( - position=1, argstr="%s", mandatory=True, desc="subject to visualize") + position=1, argstr="%s", mandatory=True, desc="subject to visualize" + ) hemi = traits.Enum( "lh", "rh", position=2, argstr="%s", mandatory=True, - desc="hemisphere to visualize") + desc="hemisphere to visualize", + ) surface = traits.String( - position=3, argstr="%s", mandatory=True, desc="surface to visualize") + position=3, argstr="%s", mandatory=True, desc="surface to visualize" + ) show_curv = traits.Bool( - argstr="-curv", desc="show curvature", xor=["show_gray_curv"]) + argstr="-curv", desc="show curvature", xor=["show_gray_curv"] + ) show_gray_curv = traits.Bool( - argstr="-gray", desc="show curvature in gray", xor=["show_curv"]) + argstr="-gray", desc="show curvature in gray", xor=["show_curv"] + ) overlay = File( exists=True, argstr="-overlay %s", desc="load an overlay volume/surface", - requires=["overlay_range"]) + requires=["overlay_range"], + ) reg_xors = ["overlay_reg", "identity_reg", "mni152_reg"] overlay_reg = File( exists=True, argstr="-overlay-reg %s", xor=reg_xors, - desc="registration matrix file to register overlay to surface") + desc="registration matrix file to register overlay to surface", + ) identity_reg = traits.Bool( argstr="-overlay-reg-identity", xor=reg_xors, - desc="use the identity matrix to register the overlay to the surface") + desc="use the identity matrix to register the overlay to the surface", + ) mni152_reg = traits.Bool( argstr="-mni152reg", xor=reg_xors, - desc="use to display a volume in MNI152 space on the average subject") + desc="use to display a volume in MNI152 space on the average subject", + ) overlay_range = traits.Either( traits.Float, traits.Tuple(traits.Float, traits.Float), traits.Tuple(traits.Float, traits.Float, traits.Float), desc="overlay range--either min, (min, max) or (min, mid, max)", - argstr="%s") + argstr="%s", + ) overlay_range_offset = traits.Float( argstr="-foffset %.3f", - desc="overlay range will be symettric around offset value") + desc="overlay range will be symettric around offset value", + ) truncate_overlay = traits.Bool( - argstr="-truncphaseflag 1", desc="truncate the overlay display") + argstr="-truncphaseflag 1", desc="truncate the overlay display" + ) reverse_overlay = traits.Bool( - argstr="-revphaseflag 1", desc="reverse the overlay display") + argstr="-revphaseflag 1", desc="reverse the overlay display" + ) invert_overlay = traits.Bool( - argstr="-invphaseflag 1", desc="invert the overlay display") + argstr="-invphaseflag 1", desc="invert the overlay display" + ) demean_overlay = traits.Bool(argstr="-zm", desc="remove mean from overlay") annot_file = File( exists=True, argstr="-annotation %s", xor=["annot_name"], - desc="path to annotation file to display") + desc="path to annotation file to display", + ) annot_name = traits.String( argstr="-annotation %s", xor=["annot_file"], - desc= - "name of annotation to display (must be in $subject/label directory") + desc="name of annotation to display (must be in $subject/label directory", + ) label_file = File( exists=True, argstr="-label %s", xor=["label_name"], - desc="path to label file to display") + desc="path to label file to display", + ) label_name = traits.String( argstr="-label %s", xor=["label_file"], - desc="name of label to display (must be in $subject/label directory") + desc="name of label to display (must be in $subject/label directory", + ) - colortable = File( - exists=True, argstr="-colortable %s", desc="load colortable file") + colortable = File(exists=True, argstr="-colortable %s", desc="load colortable file") label_under = traits.Bool( - argstr="-labels-under", desc="draw label/annotation under overlay") + argstr="-labels-under", desc="draw label/annotation under overlay" + ) label_outline = traits.Bool( - argstr="-label-outline", desc="draw label/annotation as outline") + argstr="-label-outline", desc="draw label/annotation as outline" + ) patch_file = File(exists=True, argstr="-patch %s", desc="load a patch") orig_suffix = traits.String( - argstr="-orig %s", desc="set the orig surface suffix string") + argstr="-orig %s", desc="set the orig surface suffix string" + ) sphere_suffix = traits.String( - argstr="-sphere %s", desc="set the sphere.reg suffix string") + argstr="-sphere %s", desc="set the sphere.reg suffix string" + ) show_color_scale = traits.Bool( - argstr="-colscalebarflag 1", desc="display the color scale bar") + argstr="-colscalebarflag 1", desc="display the color scale bar" + ) show_color_text = traits.Bool( - argstr="-colscaletext 1", desc="display text in the color scale bar") + argstr="-colscaletext 1", desc="display text in the color scale bar" + ) six_images = traits.Bool(desc="also take anterior and posterior snapshots") - screenshot_stem = traits.String( - desc="stem to use for screenshot file names") + screenshot_stem = traits.String(desc="stem to use for screenshot file names") stem_template_args = traits.List( traits.String, requires=["screenshot_stem"], - desc= - "input names to use as arguments for a string-formated stem template") + desc="input names to use as arguments for a string-formated stem template", + ) tcl_script = File( exists=True, argstr="%s", genfile=True, - desc="override default screenshot script") + desc="override default screenshot script", + ) class SurfaceSnapshotsOutputSpec(TraitedSpec): snapshots = OutputMultiPath( - File(exists=True), - desc="tiff images of the surface from different perspectives") + File(exists=True), desc="tiff images of the surface from different perspectives" + ) class SurfaceSnapshots(FSCommand): @@ -885,6 +989,7 @@ class SurfaceSnapshots(FSCommand): >>> res = shots.run() # doctest: +SKIP """ + _cmd = "tksurfer" input_spec = SurfaceSnapshotsInputSpec output_spec = SurfaceSnapshotsOutputSpec @@ -902,9 +1007,11 @@ def _format_arg(self, name, spec, value): if len(value) == 2: return "-fminmax %.3f %.3f" % value else: - return "-fminmax %.3f %.3f -fmid %.3f" % (value[0], - value[2], - value[1]) + return "-fminmax %.3f %.3f -fmid %.3f" % ( + value[0], + value[2], + value[1], + ) elif name == "annot_name" and isdefined(value): # Matching annot by name needs to strip the leading hemi and trailing # extension strings @@ -917,8 +1024,11 @@ def _format_arg(self, name, spec, value): def _run_interface(self, runtime): if not isdefined(self.inputs.screenshot_stem): - stem = "%s_%s_%s" % (self.inputs.subject_id, self.inputs.hemi, - self.inputs.surface) + stem = "%s_%s_%s" % ( + self.inputs.subject_id, + self.inputs.hemi, + self.inputs.surface, + ) else: stem = self.inputs.screenshot_stem stem_args = self.inputs.stem_template_args @@ -927,8 +1037,7 @@ def _run_interface(self, runtime): stem = stem % args # Check if the DISPLAY variable is set -- should avoid crashes (might not?) if "DISPLAY" not in os.environ: - raise RuntimeError( - "Graphics are not enabled -- cannot run tksurfer") + raise RuntimeError("Graphics are not enabled -- cannot run tksurfer") runtime.environ["_SNAPSHOT_STEM"] = stem self._write_tcl_script() runtime = super(SurfaceSnapshots, self)._run_interface(runtime) @@ -937,7 +1046,7 @@ def _run_interface(self, runtime): # better exception here if that happened. errors = [ "surfer: failed, no suitable display found", - "Fatal Error in tksurfer.bin: could not open display" + "Fatal Error in tksurfer.bin: could not open display", ] for err in errors: if err in runtime.stderr: @@ -950,21 +1059,33 @@ def _run_interface(self, runtime): def _write_tcl_script(self): fid = open("snapshots.tcl", "w") script = [ - "save_tiff $env(_SNAPSHOT_STEM)-lat.tif", "make_lateral_view", - "rotate_brain_y 180", "redraw", - "save_tiff $env(_SNAPSHOT_STEM)-med.tif", "make_lateral_view", - "rotate_brain_x 90", "redraw", - "save_tiff $env(_SNAPSHOT_STEM)-ven.tif", "make_lateral_view", - "rotate_brain_x -90", "redraw", - "save_tiff $env(_SNAPSHOT_STEM)-dor.tif" + "save_tiff $env(_SNAPSHOT_STEM)-lat.tif", + "make_lateral_view", + "rotate_brain_y 180", + "redraw", + "save_tiff $env(_SNAPSHOT_STEM)-med.tif", + "make_lateral_view", + "rotate_brain_x 90", + "redraw", + "save_tiff $env(_SNAPSHOT_STEM)-ven.tif", + "make_lateral_view", + "rotate_brain_x -90", + "redraw", + "save_tiff $env(_SNAPSHOT_STEM)-dor.tif", ] if isdefined(self.inputs.six_images) and self.inputs.six_images: - script.extend([ - "make_lateral_view", "rotate_brain_y 90", "redraw", - "save_tiff $env(_SNAPSHOT_STEM)-pos.tif", "make_lateral_view", - "rotate_brain_y -90", "redraw", - "save_tiff $env(_SNAPSHOT_STEM)-ant.tif" - ]) + script.extend( + [ + "make_lateral_view", + "rotate_brain_y 90", + "redraw", + "save_tiff $env(_SNAPSHOT_STEM)-pos.tif", + "make_lateral_view", + "rotate_brain_y -90", + "redraw", + "save_tiff $env(_SNAPSHOT_STEM)-ant.tif", + ] + ) script.append("exit") fid.write("\n".join(script)) @@ -973,8 +1094,11 @@ def _write_tcl_script(self): def _list_outputs(self): outputs = self._outputs().get() if not isdefined(self.inputs.screenshot_stem): - stem = "%s_%s_%s" % (self.inputs.subject_id, self.inputs.hemi, - self.inputs.surface) + stem = "%s_%s_%s" % ( + self.inputs.subject_id, + self.inputs.hemi, + self.inputs.surface, + ) else: stem = self.inputs.screenshot_stem stem_args = self.inputs.stem_template_args @@ -1062,73 +1186,77 @@ class MRIsConvertInputSpec(FSTraitedSpec): """ Uses Freesurfer's mris_convert to convert surface files to various formats """ + annot_file = File( - exists=True, - argstr="--annot %s", - desc="input is annotation or gifti label data") + exists=True, argstr="--annot %s", desc="input is annotation or gifti label data" + ) parcstats_file = File( exists=True, argstr="--parcstats %s", - desc="infile is name of text file containing label/val pairs") + desc="infile is name of text file containing label/val pairs", + ) label_file = File( exists=True, argstr="--label %s", - desc="infile is .label file, label is name of this label") + desc="infile is .label file, label is name of this label", + ) scalarcurv_file = File( exists=True, argstr="-c %s", - desc="input is scalar curv overlay file (must still specify surface)") + desc="input is scalar curv overlay file (must still specify surface)", + ) functional_file = File( exists=True, argstr="-f %s", - desc= - "input is functional time-series or other multi-frame data (must specify surface)" + desc="input is functional time-series or other multi-frame data (must specify surface)", ) labelstats_outfile = File( exists=False, argstr="--labelstats %s", - desc= - "outfile is name of gifti file to which label stats will be written") + desc="outfile is name of gifti file to which label stats will be written", + ) - patch = traits.Bool( - argstr="-p", desc="input is a patch, not a full surface") + patch = traits.Bool(argstr="-p", desc="input is a patch, not a full surface") rescale = traits.Bool( - argstr="-r", - desc="rescale vertex xyz so total area is same as group average") - normal = traits.Bool( - argstr="-n", desc="output is an ascii file where vertex data") - xyz_ascii = traits.Bool( - argstr="-a", desc="Print only surface xyz to ascii file") + argstr="-r", desc="rescale vertex xyz so total area is same as group average" + ) + normal = traits.Bool(argstr="-n", desc="output is an ascii file where vertex data") + xyz_ascii = traits.Bool(argstr="-a", desc="Print only surface xyz to ascii file") vertex = traits.Bool( - argstr="-v", desc="Writes out neighbors of a vertex in each row") + argstr="-v", desc="Writes out neighbors of a vertex in each row" + ) scale = traits.Float(argstr="-s %.3f", desc="scale vertex xyz by scale") dataarray_num = traits.Int( argstr="--da_num %d", - desc="if input is gifti, 'num' specifies which data array to use") + desc="if input is gifti, 'num' specifies which data array to use", + ) talairachxfm_subjid = traits.String( - argstr="-t %s", desc="apply talairach xfm of subject to vertex xyz") + argstr="-t %s", desc="apply talairach xfm of subject to vertex xyz" + ) origname = traits.String(argstr="-o %s", desc="read orig positions") in_file = File( exists=True, mandatory=True, position=-2, - argstr='%s', - desc='File to read/convert') + argstr="%s", + desc="File to read/convert", + ) out_file = File( - argstr='%s', + argstr="%s", position=-1, genfile=True, - xor=['out_datatype'], + xor=["out_datatype"], mandatory=True, - desc='output filename or True to generate one') + desc="output filename or True to generate one", + ) out_datatype = traits.Enum( "asc", @@ -1139,18 +1267,18 @@ class MRIsConvertInputSpec(FSTraitedSpec): "gii", "mgh", "mgz", - xor=['out_file'], + xor=["out_file"], mandatory=True, desc="These file formats are supported: ASCII: .asc" - "ICO: .ico, .tri GEO: .geo STL: .stl VTK: .vtk GIFTI: .gii MGH surface-encoded 'volume': .mgh, .mgz" + "ICO: .ico, .tri GEO: .geo STL: .stl VTK: .vtk GIFTI: .gii MGH surface-encoded 'volume': .mgh, .mgz", ) to_scanner = traits.Bool( argstr="--to-scanner", - desc="convert coordinates from native FS (tkr) coords to scanner coords" + desc="convert coordinates from native FS (tkr) coords to scanner coords", ) to_tkr = traits.Bool( argstr="--to-tkr", - desc="convert coordinates from scanner coords to native FS (tkr) coords" + desc="convert coordinates from scanner coords to native FS (tkr) coords", ) @@ -1158,7 +1286,8 @@ class MRIsConvertOutputSpec(TraitedSpec): """ Uses Freesurfer's mris_convert to convert surface files to various formats """ - converted = File(exists=True, desc='converted output surface') + + converted = File(exists=True, desc="converted output surface") class MRIsConvert(FSCommand): @@ -1174,7 +1303,8 @@ class MRIsConvert(FSCommand): >>> mris.inputs.out_datatype = 'gii' >>> mris.run() # doctest: +SKIP """ - _cmd = 'mris_convert' + + _cmd = "mris_convert" input_spec = MRIsConvertInputSpec output_spec = MRIsConvertOutputSpec @@ -1189,7 +1319,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return os.path.abspath(self._gen_outfilename()) else: return None @@ -1217,30 +1347,33 @@ class MRIsCombineInputSpec(FSTraitedSpec): """ Uses Freesurfer's mris_convert to combine two surface files into one. """ + in_files = traits.List( File(Exists=True), maxlen=2, minlen=2, mandatory=True, position=1, - argstr='--combinesurfs %s', - desc='Two surfaces to be combined.') + argstr="--combinesurfs %s", + desc="Two surfaces to be combined.", + ) out_file = File( - argstr='%s', + argstr="%s", position=-1, genfile=True, mandatory=True, - desc='Output filename. Combined surfaces from in_files.') + desc="Output filename. Combined surfaces from in_files.", + ) class MRIsCombineOutputSpec(TraitedSpec): """ Uses Freesurfer's mris_convert to combine two surface files into one. """ + out_file = File( - exists=True, - desc='Output filename. Combined surfaces from ' - 'in_files.') + exists=True, desc="Output filename. Combined surfaces from " "in_files." + ) class MRIsCombine(FSSurfaceCommand): @@ -1268,7 +1401,8 @@ class MRIsCombine(FSSurfaceCommand): 'mris_convert --combinesurfs lh.pial rh.pial bh.pial' >>> mris.run() # doctest: +SKIP """ - _cmd = 'mris_convert' + + _cmd = "mris_convert" input_spec = MRIsCombineInputSpec output_spec = MRIsCombineOutputSpec @@ -1279,9 +1413,9 @@ def _list_outputs(self): # regardless of input file names, except when path info is # specified path, base = os.path.split(self.inputs.out_file) - if path == '' and base[:3] not in ('lh.', 'rh.'): - base = 'lh.' + base - outputs['out_file'] = os.path.abspath(os.path.join(path, base)) + if path == "" and base[:3] not in ("lh.", "rh."): + base = "lh." + base + outputs["out_file"] = os.path.abspath(os.path.join(path, base)) return outputs @@ -1304,33 +1438,35 @@ class MRITessellateInputSpec(FSTraitedSpec): exists=True, mandatory=True, position=-3, - argstr='%s', - desc='Input volume to tesselate voxels from.') + argstr="%s", + desc="Input volume to tesselate voxels from.", + ) label_value = traits.Int( position=-2, - argstr='%d', + argstr="%d", mandatory=True, - desc= - 'Label value which to tesselate from the input volume. (integer, if input is "filled.mgz" volume, 127 is rh, 255 is lh)' + desc='Label value which to tesselate from the input volume. (integer, if input is "filled.mgz" volume, 127 is rh, 255 is lh)', ) out_file = File( - argstr='%s', + argstr="%s", position=-1, genfile=True, - desc='output filename or True to generate one') + desc="output filename or True to generate one", + ) tesselate_all_voxels = traits.Bool( - argstr='-a', - desc='Tessellate the surface of all voxels with different labels') + argstr="-a", desc="Tessellate the surface of all voxels with different labels" + ) use_real_RAS_coordinates = traits.Bool( - argstr='-n', - desc='Saves surface with real RAS coordinates where c_(r,a,s) != 0') + argstr="-n", desc="Saves surface with real RAS coordinates where c_(r,a,s) != 0" + ) class MRITessellateOutputSpec(TraitedSpec): """ Uses Freesurfer's mri_tessellate to create surfaces by tessellating a given input volume """ - surface = File(exists=True, desc='binary surface of the tessellation ') + + surface = File(exists=True, desc="binary surface of the tessellation ") class MRITessellate(FSCommand): @@ -1347,17 +1483,18 @@ class MRITessellate(FSCommand): >>> tess.inputs.out_file = 'lh.hippocampus' >>> tess.run() # doctest: +SKIP """ - _cmd = 'mri_tessellate' + + _cmd = "mri_tessellate" input_spec = MRITessellateInputSpec output_spec = MRITessellateOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['surface'] = os.path.abspath(self._gen_outfilename()) + outputs["surface"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._gen_outfilename() else: return None @@ -1367,7 +1504,7 @@ def _gen_outfilename(self): return self.inputs.out_file else: _, name, ext = split_filename(self.inputs.in_file) - return name + ext + '_' + str(self.inputs.label_value) + return name + ext + "_" + str(self.inputs.label_value) class MRIPretessInputSpec(FSTraitedSpec): @@ -1375,49 +1512,57 @@ class MRIPretessInputSpec(FSTraitedSpec): exists=True, mandatory=True, position=-4, - argstr='%s', - desc=('filled volume, usually wm.mgz')) + argstr="%s", + desc=("filled volume, usually wm.mgz"), + ) label = traits.Either( - traits.Str('wm'), + traits.Str("wm"), traits.Int(1), - argstr='%s', - default='wm', + argstr="%s", + default="wm", mandatory=True, usedefault=True, position=-3, - desc=('label to be picked up, can be a Freesurfer\'s string like ' - '\'wm\' or a label value (e.g. 127 for rh or 255 for lh)')) + desc=( + "label to be picked up, can be a Freesurfer's string like " + "'wm' or a label value (e.g. 127 for rh or 255 for lh)" + ), + ) in_norm = File( exists=True, mandatory=True, position=-2, - argstr='%s', - desc=('the normalized, brain-extracted T1w image. Usually norm.mgz')) + argstr="%s", + desc=("the normalized, brain-extracted T1w image. Usually norm.mgz"), + ) out_file = File( position=-1, - argstr='%s', - name_source=['in_filled'], - name_template='%s_pretesswm', + argstr="%s", + name_source=["in_filled"], + name_template="%s_pretesswm", keep_extension=True, - desc='the output file after mri_pretess.') + desc="the output file after mri_pretess.", + ) nocorners = traits.Bool( False, - argstr='-nocorners', - desc=('do not remove corner configurations' - ' in addition to edge ones.')) - keep = traits.Bool(False, argstr='-keep', desc=('keep WM edits')) + argstr="-nocorners", + desc=("do not remove corner configurations" " in addition to edge ones."), + ) + keep = traits.Bool(False, argstr="-keep", desc=("keep WM edits")) test = traits.Bool( False, - argstr='-test', - desc= - ('adds a voxel that should be removed by ' - 'mri_pretess. The value of the voxel is set to that of an ON-edited WM, ' - 'so it should be kept with -keep. The output will NOT be saved.')) + argstr="-test", + desc=( + "adds a voxel that should be removed by " + "mri_pretess. The value of the voxel is set to that of an ON-edited WM, " + "so it should be kept with -keep. The output will NOT be saved." + ), + ) class MRIPretessOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='output file after mri_pretess') + out_file = File(exists=True, desc="output file after mri_pretess") class MRIPretess(FSCommand): @@ -1444,7 +1589,8 @@ class MRIPretess(FSCommand): >>> pretess.run() # doctest: +SKIP """ - _cmd = 'mri_pretess' + + _cmd = "mri_pretess" input_spec = MRIPretessInputSpec output_spec = MRIPretessOutputSpec @@ -1458,35 +1604,36 @@ class MRIMarchingCubesInputSpec(FSTraitedSpec): exists=True, mandatory=True, position=1, - argstr='%s', - desc='Input volume to tesselate voxels from.') + argstr="%s", + desc="Input volume to tesselate voxels from.", + ) label_value = traits.Int( position=2, - argstr='%d', + argstr="%d", mandatory=True, - desc= - 'Label value which to tesselate from the input volume. (integer, if input is "filled.mgz" volume, 127 is rh, 255 is lh)' + desc='Label value which to tesselate from the input volume. (integer, if input is "filled.mgz" volume, 127 is rh, 255 is lh)', ) connectivity_value = traits.Int( 1, position=-1, - argstr='%d', + argstr="%d", usedefault=True, - desc= - 'Alter the marching cubes connectivity: 1=6+,2=18,3=6,4=26 (default=1)' + desc="Alter the marching cubes connectivity: 1=6+,2=18,3=6,4=26 (default=1)", ) out_file = File( - argstr='./%s', + argstr="./%s", position=-2, genfile=True, - desc='output filename or True to generate one') + desc="output filename or True to generate one", + ) class MRIMarchingCubesOutputSpec(TraitedSpec): """ Uses Freesurfer's mri_mc to create surfaces by tessellating a given input volume """ - surface = File(exists=True, desc='binary surface of the tessellation ') + + surface = File(exists=True, desc="binary surface of the tessellation ") class MRIMarchingCubes(FSCommand): @@ -1503,17 +1650,18 @@ class MRIMarchingCubes(FSCommand): >>> mc.inputs.out_file = 'lh.hippocampus' >>> mc.run() # doctest: +SKIP """ - _cmd = 'mri_mc' + + _cmd = "mri_mc" input_spec = MRIMarchingCubesInputSpec output_spec = MRIMarchingCubesOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['surface'] = self._gen_outfilename() + outputs["surface"] = self._gen_outfilename() return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._gen_outfilename() else: return None @@ -1523,8 +1671,7 @@ def _gen_outfilename(self): return os.path.abspath(self.inputs.out_file) else: _, name, ext = split_filename(self.inputs.in_file) - return os.path.abspath( - name + ext + '_' + str(self.inputs.label_value)) + return os.path.abspath(name + ext + "_" + str(self.inputs.label_value)) class SmoothTessellationInputSpec(FSTraitedSpec): @@ -1535,50 +1682,62 @@ class SmoothTessellationInputSpec(FSTraitedSpec): in_file = File( exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, copyfile=True, - desc='Input volume to tesselate voxels from.') + desc="Input volume to tesselate voxels from.", + ) curvature_averaging_iterations = traits.Int( - argstr='-a %d', - desc='Number of curvature averaging iterations (default=10)') + argstr="-a %d", desc="Number of curvature averaging iterations (default=10)" + ) smoothing_iterations = traits.Int( - argstr='-n %d', desc='Number of smoothing iterations (default=10)') + argstr="-n %d", desc="Number of smoothing iterations (default=10)" + ) snapshot_writing_iterations = traits.Int( - argstr='-w %d', desc='Write snapshot every "n" iterations') + argstr="-w %d", desc='Write snapshot every "n" iterations' + ) use_gaussian_curvature_smoothing = traits.Bool( - argstr='-g', desc='Use Gaussian curvature smoothing') + argstr="-g", desc="Use Gaussian curvature smoothing" + ) gaussian_curvature_norm_steps = traits.Int( - argstr='%d ', desc='Use Gaussian curvature smoothing') + argstr="%d ", desc="Use Gaussian curvature smoothing" + ) gaussian_curvature_smoothing_steps = traits.Int( - argstr='%d', desc='Use Gaussian curvature smoothing') + argstr="%d", desc="Use Gaussian curvature smoothing" + ) disable_estimates = traits.Bool( - argstr='-nw', - desc='Disables the writing of curvature and area estimates') + argstr="-nw", desc="Disables the writing of curvature and area estimates" + ) normalize_area = traits.Bool( - argstr='-area', desc='Normalizes the area after smoothing') - use_momentum = traits.Bool(argstr='-m', desc='Uses momentum') + argstr="-area", desc="Normalizes the area after smoothing" + ) + use_momentum = traits.Bool(argstr="-m", desc="Uses momentum") out_file = File( - argstr='%s', + argstr="%s", position=-1, genfile=True, - desc='output filename or True to generate one') + desc="output filename or True to generate one", + ) out_curvature_file = File( - argstr='-c %s', desc='Write curvature to ?h.curvname (default "curv")') + argstr="-c %s", desc='Write curvature to ?h.curvname (default "curv")' + ) out_area_file = File( - argstr='-b %s', desc='Write area to ?h.areaname (default "area")') + argstr="-b %s", desc='Write area to ?h.areaname (default "area")' + ) seed = traits.Int( - argstr="-seed %d", desc="Seed for setting random number generator") + argstr="-seed %d", desc="Seed for setting random number generator" + ) class SmoothTessellationOutputSpec(TraitedSpec): """ This program smooths the tessellation of a surface using 'mris_smooth' """ - surface = File(exists=True, desc='Smoothed surface file ') + + surface = File(exists=True, desc="Smoothed surface file ") class SmoothTessellation(FSCommand): @@ -1598,17 +1757,18 @@ class SmoothTessellation(FSCommand): >>> smooth.inputs.in_file = 'lh.hippocampus.stl' >>> smooth.run() # doctest: +SKIP """ - _cmd = 'mris_smooth' + + _cmd = "mris_smooth" input_spec = SmoothTessellationInputSpec output_spec = SmoothTessellationOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['surface'] = self._gen_outfilename() + outputs["surface"] = self._gen_outfilename() return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._gen_outfilename() else: return None @@ -1618,7 +1778,7 @@ def _gen_outfilename(self): return os.path.abspath(self.inputs.out_file) else: _, name, ext = split_filename(self.inputs.in_file) - return os.path.abspath(name + '_smoothed' + ext) + return os.path.abspath(name + "_smoothed" + ext) def _run_interface(self, runtime): # The returncode is meaningless in BET. So check the output @@ -1633,19 +1793,21 @@ def _run_interface(self, runtime): class MakeAverageSubjectInputSpec(FSTraitedSpec): subjects_ids = traits.List( traits.Str(), - argstr='--subjects %s', - desc='freesurfer subjects ids to average', + argstr="--subjects %s", + desc="freesurfer subjects ids to average", mandatory=True, - sep=' ') + sep=" ", + ) out_name = File( - 'average', - argstr='--out %s', - desc='name for the average subject', - usedefault=True) + "average", + argstr="--out %s", + desc="name for the average subject", + usedefault=True, + ) class MakeAverageSubjectOutputSpec(TraitedSpec): - average_subject_name = traits.Str(desc='Output registration file') + average_subject_name = traits.Str(desc="Output registration file") class MakeAverageSubject(FSCommand): @@ -1661,33 +1823,31 @@ class MakeAverageSubject(FSCommand): """ - _cmd = 'make_average_subject' + _cmd = "make_average_subject" input_spec = MakeAverageSubjectInputSpec output_spec = MakeAverageSubjectOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['average_subject_name'] = self.inputs.out_name + outputs["average_subject_name"] = self.inputs.out_name return outputs class ExtractMainComponentInputSpec(CommandLineInputSpec): in_file = File( - exists=True, - mandatory=True, - argstr='%s', - position=1, - desc='input surface file') + exists=True, mandatory=True, argstr="%s", position=1, desc="input surface file" + ) out_file = File( - name_template='%s.maincmp', - name_source='in_file', - argstr='%s', + name_template="%s.maincmp", + name_source="in_file", + argstr="%s", position=2, - desc='surface containing main component') + desc="surface containing main component", + ) class ExtractMainComponentOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='surface containing main component') + out_file = File(exists=True, desc="surface containing main component") class ExtractMainComponent(CommandLine): @@ -1703,80 +1863,87 @@ class ExtractMainComponent(CommandLine): """ - _cmd = 'mris_extract_main_component' + _cmd = "mris_extract_main_component" input_spec = ExtractMainComponentInputSpec output_spec = ExtractMainComponentOutputSpec class Tkregister2InputSpec(FSTraitedSpec): target_image = File( - exists=True, argstr="--targ %s", xor=['fstarg'], desc='target volume') + exists=True, argstr="--targ %s", xor=["fstarg"], desc="target volume" + ) fstarg = traits.Bool( False, - argstr='--fstarg', - xor=['target_image'], - desc='use subject\'s T1 as reference') + argstr="--fstarg", + xor=["target_image"], + desc="use subject's T1 as reference", + ) moving_image = File( - exists=True, mandatory=True, argstr="--mov %s", desc='moving volume') + exists=True, mandatory=True, argstr="--mov %s", desc="moving volume" + ) # Input registration file options fsl_in_matrix = File( - exists=True, - argstr="--fsl %s", - desc='fsl-style registration input matrix') + exists=True, argstr="--fsl %s", desc="fsl-style registration input matrix" + ) xfm = File( exists=True, - argstr='--xfm %s', - desc='use a matrix in MNI coordinates as initial registration') + argstr="--xfm %s", + desc="use a matrix in MNI coordinates as initial registration", + ) lta_in = File( exists=True, - argstr='--lta %s', - desc='use a matrix in MNI coordinates as initial registration') + argstr="--lta %s", + desc="use a matrix in MNI coordinates as initial registration", + ) invert_lta_in = traits.Bool( - requires=['lta_in'], desc='Invert input LTA before applying') + requires=["lta_in"], desc="Invert input LTA before applying" + ) # Output registration file options fsl_out = traits.Either( True, File, - argstr='--fslregout %s', - desc='compute an FSL-compatible resgitration matrix') + argstr="--fslregout %s", + desc="compute an FSL-compatible resgitration matrix", + ) lta_out = traits.Either( - True, - File, - argstr='--ltaout %s', - desc='output registration file (LTA format)') + True, File, argstr="--ltaout %s", desc="output registration file (LTA format)" + ) invert_lta_out = traits.Bool( - argstr='--ltaout-inv', - requires=['lta_in'], - desc='Invert input LTA before applying') + argstr="--ltaout-inv", + requires=["lta_in"], + desc="Invert input LTA before applying", + ) - subject_id = traits.String(argstr="--s %s", desc='freesurfer subject ID') + subject_id = traits.String(argstr="--s %s", desc="freesurfer subject ID") noedit = traits.Bool( - True, - argstr="--noedit", - usedefault=True, - desc='do not open edit window (exit)') + True, argstr="--noedit", usedefault=True, desc="do not open edit window (exit)" + ) reg_file = File( - 'register.dat', + "register.dat", usedefault=True, mandatory=True, - argstr='--reg %s', - desc='freesurfer-style registration file') + argstr="--reg %s", + desc="freesurfer-style registration file", + ) reg_header = traits.Bool( - False, argstr='--regheader', desc='compute regstration from headers') + False, argstr="--regheader", desc="compute regstration from headers" + ) fstal = traits.Bool( False, - argstr='--fstal', - xor=['target_image', 'moving_image', 'reg_file'], - desc='set mov to be tal and reg to be tal xfm') + argstr="--fstal", + xor=["target_image", "moving_image", "reg_file"], + desc="set mov to be tal and reg to be tal xfm", + ) movscale = traits.Float( - argstr='--movscale %f', desc='adjust registration matrix to scale mov') + argstr="--movscale %f", desc="adjust registration matrix to scale mov" + ) class Tkregister2OutputSpec(TraitedSpec): - reg_file = File(exists=True, desc='freesurfer-style registration file') - fsl_file = File(desc='FSL-style registration file') - lta_file = File(desc='LTA-style registration file') + reg_file = File(exists=True, desc="freesurfer-style registration file") + fsl_file = File(desc="FSL-style registration file") + lta_file = File(desc="LTA-style registration file") class Tkregister2(FSCommand): @@ -1812,38 +1979,41 @@ class Tkregister2(FSCommand): 'tkregister2 --fsl flirt.mat --mov epi.nii --noedit --reg register.dat' >>> tk2.run() # doctest: +SKIP """ + _cmd = "tkregister2" input_spec = Tkregister2InputSpec output_spec = Tkregister2OutputSpec def _format_arg(self, name, spec, value): - if name == 'lta_in' and self.inputs.invert_lta_in: - spec = '--lta-inv %s' - if name in ('fsl_out', 'lta_out') and value is True: + if name == "lta_in" and self.inputs.invert_lta_in: + spec = "--lta-inv %s" + if name in ("fsl_out", "lta_out") and value is True: value = self._list_outputs()[name] return super(Tkregister2, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() reg_file = os.path.abspath(self.inputs.reg_file) - outputs['reg_file'] = reg_file + outputs["reg_file"] = reg_file cwd = os.getcwd() fsl_out = self.inputs.fsl_out if isdefined(fsl_out): if fsl_out is True: - outputs['fsl_file'] = fname_presuffix( - reg_file, suffix='.mat', newpath=cwd, use_ext=False) + outputs["fsl_file"] = fname_presuffix( + reg_file, suffix=".mat", newpath=cwd, use_ext=False + ) else: - outputs['fsl_file'] = os.path.abspath(self.inputs.fsl_out) + outputs["fsl_file"] = os.path.abspath(self.inputs.fsl_out) lta_out = self.inputs.lta_out if isdefined(lta_out): if lta_out is True: - outputs['lta_file'] = fname_presuffix( - reg_file, suffix='.lta', newpath=cwd, use_ext=False) + outputs["lta_file"] = fname_presuffix( + reg_file, suffix=".lta", newpath=cwd, use_ext=False + ) else: - outputs['lta_file'] = os.path.abspath(self.inputs.lta_out) + outputs["lta_file"] = os.path.abspath(self.inputs.lta_out) return outputs def _gen_outfilename(self): @@ -1851,34 +2021,26 @@ def _gen_outfilename(self): return os.path.abspath(self.inputs.out_file) else: _, name, ext = split_filename(self.inputs.in_file) - return os.path.abspath(name + '_smoothed' + ext) + return os.path.abspath(name + "_smoothed" + ext) class AddXFormToHeaderInputSpec(FSTraitedSpec): # required in_file = File( - exists=True, - mandatory=True, - position=-2, - argstr="%s", - desc="input volume") + exists=True, mandatory=True, position=-2, argstr="%s", desc="input volume" + ) # transform file does NOT need to exist at the time if using copy_name transform = File( - exists=False, - mandatory=True, - position=-3, - argstr="%s", - desc="xfm file") + exists=False, mandatory=True, position=-3, argstr="%s", desc="xfm file" + ) out_file = File( - 'output.mgz', - position=-1, - argstr="%s", - usedefault=True, - desc="output volume") + "output.mgz", position=-1, argstr="%s", usedefault=True, desc="output volume" + ) # optional copy_name = traits.Bool( - argstr="-c", desc="do not try to load the xfmfile, just copy name") + argstr="-c", desc="do not try to load the xfmfile, just copy name" + ) verbose = traits.Bool(argstr="-v", desc="be verbose") @@ -1911,12 +2073,13 @@ class AddXFormToHeader(FSCommand): [https://surfer.nmr.mgh.harvard.edu/fswiki/mri_add_xform_to_header] """ + _cmd = "mri_add_xform_to_header" input_spec = AddXFormToHeaderInputSpec output_spec = AddXFormToHeaderOutputSpec def _format_arg(self, name, spec, value): - if name == 'transform': + if name == "transform": return value # os.path.abspath(value) # if name == 'copy_name' and value: # self.input_spec.transform @@ -1930,30 +2093,32 @@ def _list_outputs(self): class CheckTalairachAlignmentInputSpec(FSTraitedSpec): in_file = File( - argstr='-xfm %s', - xor=['subject'], + argstr="-xfm %s", + xor=["subject"], exists=True, mandatory=True, position=-1, - desc="specify the talairach.xfm file to check") + desc="specify the talairach.xfm file to check", + ) subject = traits.String( - argstr='-subj %s', - xor=['in_file'], + argstr="-subj %s", + xor=["in_file"], mandatory=True, position=-1, - desc="specify subject's name") + desc="specify subject's name", + ) # optional threshold = traits.Float( default_value=0.010, usedefault=True, - argstr='-T %.3f', - desc="Talairach transforms for subjects with p-values <= T " + - "are considered as very unlikely default=0.010") + argstr="-T %.3f", + desc="Talairach transforms for subjects with p-values <= T " + + "are considered as very unlikely default=0.010", + ) class CheckTalairachAlignmentOutputSpec(TraitedSpec): - out_file = File( - exists=True, desc="The input file for CheckTalairachAlignment") + out_file = File(exists=True, desc="The input file for CheckTalairachAlignment") class CheckTalairachAlignment(FSCommand): @@ -1973,37 +2138,32 @@ class CheckTalairachAlignment(FSCommand): >>> checker.run() # doctest: +SKIP """ + _cmd = "talairach_afd" input_spec = CheckTalairachAlignmentInputSpec output_spec = CheckTalairachAlignmentOutputSpec def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = self.inputs.in_file + outputs["out_file"] = self.inputs.in_file return outputs class TalairachAVIInputSpec(FSTraitedSpec): - in_file = File( - argstr='--i %s', exists=True, mandatory=True, desc="input volume") + in_file = File(argstr="--i %s", exists=True, mandatory=True, desc="input volume") out_file = File( - argstr='--xfm %s', - mandatory=True, - exists=False, - desc="output xfm file") + argstr="--xfm %s", mandatory=True, exists=False, desc="output xfm file" + ) # optional atlas = traits.String( - argstr='--atlas %s', - desc="alternate target atlas (in freesurfer/average dir)") + argstr="--atlas %s", desc="alternate target atlas (in freesurfer/average dir)" + ) class TalairachAVIOutputSpec(TraitedSpec): - out_file = File( - exists=False, desc="The output transform for TalairachAVI") - out_log = File( - exists=False, desc="The output log file for TalairachAVI") - out_txt = File( - exists=False, desc="The output text file for TaliarachAVI") + out_file = File(exists=False, desc="The output transform for TalairachAVI") + out_log = File(exists=False, desc="The output log file for TalairachAVI") + out_txt = File(exists=False, desc="The output text file for TaliarachAVI") class TalairachAVI(FSCommand): @@ -2026,27 +2186,30 @@ class TalairachAVI(FSCommand): >>> example.run() # doctest: +SKIP """ + _cmd = "talairach_avi" input_spec = TalairachAVIInputSpec output_spec = TalairachAVIOutputSpec def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) - outputs['out_log'] = os.path.abspath('talairach_avi.log') - outputs['out_txt'] = os.path.join( + outputs["out_file"] = os.path.abspath(self.inputs.out_file) + outputs["out_log"] = os.path.abspath("talairach_avi.log") + outputs["out_txt"] = os.path.join( os.path.dirname(self.inputs.out_file), - 'talsrcimg_to_' + str(self.inputs.atlas) + 't4_vox2vox.txt') + "talsrcimg_to_" + str(self.inputs.atlas) + "t4_vox2vox.txt", + ) return outputs class TalairachQCInputSpec(FSTraitedSpec): log_file = File( - argstr='%s', + argstr="%s", mandatory=True, exists=True, position=0, - desc="The log file for TalairachQC") + desc="The log file for TalairachQC", + ) class TalairachQC(FSScriptCommand): @@ -2060,6 +2223,7 @@ class TalairachQC(FSScriptCommand): >>> qc.cmdline 'tal_QC_AZS dirs.txt' """ + _cmd = "tal_QC_AZS" input_spec = TalairachQCInputSpec output_spec = FSScriptOutputSpec @@ -2071,28 +2235,32 @@ class RemoveNeckInputSpec(FSTraitedSpec): exists=True, mandatory=True, position=-4, - desc="Input file for RemoveNeck") + desc="Input file for RemoveNeck", + ) out_file = File( argstr="%s", exists=False, - name_source=['in_file'], + name_source=["in_file"], name_template="%s_noneck", hash_files=False, keep_extension=True, position=-1, - desc="Output file for RemoveNeck") + desc="Output file for RemoveNeck", + ) transform = File( argstr="%s", exists=True, mandatory=True, position=-3, - desc="Input transform file for RemoveNeck") + desc="Input transform file for RemoveNeck", + ) template = File( argstr="%s", exists=True, mandatory=True, position=-2, - desc="Input template file for RemoveNeck") + desc="Input template file for RemoveNeck", + ) # optional radius = traits.Int(argstr="-radius %d", desc="Radius") @@ -2116,13 +2284,14 @@ class RemoveNeck(FSCommand): >>> remove_neck.cmdline 'mri_remove_neck norm.mgz trans.mat trans.mat norm_noneck.mgz' """ + _cmd = "mri_remove_neck" input_spec = RemoveNeckInputSpec output_spec = RemoveNeckOutputSpec def _gen_fname(self, name): - if name == 'out_file': - return os.path.abspath('nu_noneck.mgz') + if name == "out_file": + return os.path.abspath("nu_noneck.mgz") return None def _list_outputs(self): @@ -2137,22 +2306,24 @@ class MRIFillInputSpec(FSTraitedSpec): mandatory=True, exists=True, position=-2, - desc="Input white matter file") + desc="Input white matter file", + ) out_file = File( argstr="%s", mandatory=True, exists=False, position=-1, - desc="Output filled volume file name for MRIFill") + desc="Output filled volume file name for MRIFill", + ) # optional segmentation = File( argstr="-segmentation %s", exists=True, - desc="Input segmentation file for MRIFill") + desc="Input segmentation file for MRIFill", + ) transform = File( - argstr="-xform %s", - exists=True, - desc="Input transform file for MRIFill") + argstr="-xform %s", exists=True, desc="Input transform file for MRIFill" + ) log_file = File(argstr="-a %s", desc="Output log file for MRIFill") @@ -2195,23 +2366,23 @@ class MRIsInflateInputSpec(FSTraitedSpec): mandatory=True, exists=True, copyfile=True, - desc="Input file for MRIsInflate") + desc="Input file for MRIsInflate", + ) out_file = File( argstr="%s", position=-1, exists=False, - name_source=['in_file'], + name_source=["in_file"], name_template="%s.inflated", hash_files=False, keep_extension=True, - desc="Output file for MRIsInflate") + desc="Output file for MRIsInflate", + ) # optional - out_sulc = File( - exists=False, xor=['no_save_sulc'], desc="Output sulc file") + out_sulc = File(exists=False, xor=["no_save_sulc"], desc="Output sulc file") no_save_sulc = traits.Bool( - argstr='-no-save-sulc', - xor=['out_sulc'], - desc="Do not save sulc file as output") + argstr="-no-save-sulc", xor=["out_sulc"], desc="Do not save sulc file as output" + ) class MRIsInflateOutputSpec(TraitedSpec): @@ -2233,7 +2404,7 @@ class MRIsInflate(FSCommand): 'mris_inflate -no-save-sulc lh.pial lh.inflated' """ - _cmd = 'mris_inflate' + _cmd = "mris_inflate" input_spec = MRIsInflateInputSpec output_spec = MRIsInflateOutputSpec @@ -2253,27 +2424,30 @@ class SphereInputSpec(FSTraitedSpecOpenMP): copyfile=True, mandatory=True, exists=True, - desc="Input file for Sphere") + desc="Input file for Sphere", + ) out_file = File( argstr="%s", position=-1, exists=False, - name_source=['in_file'], + name_source=["in_file"], hash_files=False, - name_template='%s.sphere', - desc="Output file for Sphere") + name_template="%s.sphere", + desc="Output file for Sphere", + ) # optional seed = traits.Int( - argstr="-seed %d", desc="Seed for setting random number generator") + argstr="-seed %d", desc="Seed for setting random number generator" + ) magic = traits.Bool( argstr="-q", - desc= - "No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu" + desc="No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu", ) in_smoothwm = File( exists=True, copyfile=True, - desc="Input surface required when -q flag is not selected") + desc="Input surface required when -q flag is not selected", + ) class SphereOutputSpec(TraitedSpec): @@ -2292,7 +2466,8 @@ class Sphere(FSCommandOpenMP): >>> sphere.cmdline 'mris_sphere lh.pial lh.sphere' """ - _cmd = 'mris_sphere' + + _cmd = "mris_sphere" input_spec = SphereInputSpec output_spec = SphereOutputSpec @@ -2304,45 +2479,44 @@ def _list_outputs(self): class FixTopologyInputSpec(FSTraitedSpec): in_orig = File( - exists=True, - mandatory=True, - desc="Undocumented input file .orig") + exists=True, mandatory=True, desc="Undocumented input file .orig" + ) in_inflated = File( exists=True, mandatory=True, - desc="Undocumented input file .inflated") - in_brain = File( - exists=True, mandatory=True, desc="Implicit input brain.mgz") + desc="Undocumented input file .inflated", + ) + in_brain = File(exists=True, mandatory=True, desc="Implicit input brain.mgz") in_wm = File(exists=True, mandatory=True, desc="Implicit input wm.mgz") hemisphere = traits.String( - position=-1, - argstr="%s", - mandatory=True, - desc="Hemisphere being processed") + position=-1, argstr="%s", mandatory=True, desc="Hemisphere being processed" + ) subject_id = traits.String( - 'subject_id', + "subject_id", position=-2, argstr="%s", mandatory=True, usedefault=True, - desc="Subject being processed") + desc="Subject being processed", + ) copy_inputs = traits.Bool( mandatory=True, - desc="If running as a node, set this to True " + - "otherwise, the topology fixing will be done " + "in place.") + desc="If running as a node, set this to True " + + "otherwise, the topology fixing will be done " + + "in place.", + ) # optional seed = traits.Int( - argstr="-seed %d", desc="Seed for setting random number generator") + argstr="-seed %d", desc="Seed for setting random number generator" + ) ga = traits.Bool( argstr="-ga", - desc= - "No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu" + desc="No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu", ) mgz = traits.Bool( argstr="-mgz", - desc= - "No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu" + desc="No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu", ) sphere = File(argstr="-sphere %s", desc="Sphere input file") @@ -2372,38 +2546,38 @@ class FixTopology(FSCommand): 'mris_fix_topology -ga -mgz -sphere qsphere.nofix 10335 lh' """ - _cmd = 'mris_fix_topology' + _cmd = "mris_fix_topology" input_spec = FixTopologyInputSpec output_spec = FixTopologyOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir hemi = self.inputs.hemisphere - copy2subjdir(self, self.inputs.sphere, folder='surf') + copy2subjdir(self, self.inputs.sphere, folder="surf") # the orig file is edited in place self.inputs.in_orig = copy2subjdir( self, self.inputs.in_orig, - folder='surf', - basename='{0}.orig'.format(hemi)) + folder="surf", + basename="{0}.orig".format(hemi), + ) copy2subjdir( self, self.inputs.in_inflated, - folder='surf', - basename='{0}.inflated'.format(hemi)) - copy2subjdir( - self, self.inputs.in_brain, folder='mri', basename='brain.mgz') - copy2subjdir( - self, self.inputs.in_wm, folder='mri', basename='wm.mgz') + folder="surf", + basename="{0}.inflated".format(hemi), + ) + copy2subjdir(self, self.inputs.in_brain, folder="mri", basename="brain.mgz") + copy2subjdir(self, self.inputs.in_wm, folder="mri", basename="wm.mgz") return super(FixTopology, self).run(**inputs) def _format_arg(self, name, spec, value): - if name == 'sphere': + if name == "sphere": # get the basename and take out the hemisphere - suffix = os.path.basename(value).split('.', 1)[1] + suffix = os.path.basename(value).split(".", 1)[1] return spec.argstr % suffix return super(FixTopology, self)._format_arg(name, spec, value) @@ -2419,7 +2593,8 @@ class EulerNumberInputSpec(FSTraitedSpec): position=-1, mandatory=True, exists=True, - desc="Input file for EulerNumber") + desc="Input file for EulerNumber", + ) class EulerNumberOutputSpec(TraitedSpec): @@ -2438,7 +2613,8 @@ class EulerNumber(FSCommand): >>> ft.cmdline 'mris_euler_number lh.pial' """ - _cmd = 'mris_euler_number' + + _cmd = "mris_euler_number" input_spec = EulerNumberInputSpec output_spec = EulerNumberOutputSpec @@ -2455,16 +2631,18 @@ class RemoveIntersectionInputSpec(FSTraitedSpec): mandatory=True, exists=True, copyfile=True, - desc="Input file for RemoveIntersection") + desc="Input file for RemoveIntersection", + ) out_file = File( argstr="%s", position=-1, exists=False, - name_source=['in_file'], - name_template='%s', + name_source=["in_file"], + name_template="%s", hash_files=False, keep_extension=True, - desc="Output file for RemoveIntersection") + desc="Output file for RemoveIntersection", + ) class RemoveIntersectionOutputSpec(TraitedSpec): @@ -2484,7 +2662,7 @@ class RemoveIntersection(FSCommand): 'mris_remove_intersection lh.pial lh.pial' """ - _cmd = 'mris_remove_intersection' + _cmd = "mris_remove_intersection" input_spec = RemoveIntersectionInputSpec output_spec = RemoveIntersectionOutputSpec @@ -2497,82 +2675,83 @@ def _list_outputs(self): class MakeSurfacesInputSpec(FSTraitedSpec): # required hemisphere = traits.Enum( - 'lh', - 'rh', + "lh", + "rh", position=-1, argstr="%s", mandatory=True, - desc="Hemisphere being processed") + desc="Hemisphere being processed", + ) subject_id = traits.String( - 'subject_id', + "subject_id", usedefault=True, position=-2, argstr="%s", mandatory=True, - desc="Subject being processed") + desc="Subject being processed", + ) # implicit in_orig = File( exists=True, mandatory=True, - argstr='-orig %s', - desc="Implicit input file .orig") - in_wm = File( - exists=True, mandatory=True, desc="Implicit input file wm.mgz") - in_filled = File( - exists=True, mandatory=True, desc="Implicit input file filled.mgz") + argstr="-orig %s", + desc="Implicit input file .orig", + ) + in_wm = File(exists=True, mandatory=True, desc="Implicit input file wm.mgz") + in_filled = File(exists=True, mandatory=True, desc="Implicit input file filled.mgz") # optional in_white = File(exists=True, desc="Implicit input that is sometimes used") in_label = File( exists=True, - xor=['noaparc'], - desc="Implicit input label/.aparc.annot") + xor=["noaparc"], + desc="Implicit input label/.aparc.annot", + ) orig_white = File( argstr="-orig_white %s", exists=True, - desc="Specify a white surface to start with") + desc="Specify a white surface to start with", + ) orig_pial = File( argstr="-orig_pial %s", exists=True, - requires=['in_label'], - desc="Specify a pial surface to start with") + requires=["in_label"], + desc="Specify a pial surface to start with", + ) fix_mtl = traits.Bool(argstr="-fix_mtl", desc="Undocumented flag") no_white = traits.Bool(argstr="-nowhite", desc="Undocumented flag") white_only = traits.Bool(argstr="-whiteonly", desc="Undocumented flage") - in_aseg = File( - argstr="-aseg %s", exists=True, desc="Input segmentation file") + in_aseg = File(argstr="-aseg %s", exists=True, desc="Input segmentation file") in_T1 = File(argstr="-T1 %s", exists=True, desc="Input brain or T1 file") mgz = traits.Bool( argstr="-mgz", - desc= - "No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu" + desc="No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu", ) noaparc = traits.Bool( argstr="-noaparc", - xor=['in_label'], - desc= - "No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu" + xor=["in_label"], + desc="No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu", ) maximum = traits.Float( - argstr="-max %.1f", - desc="No documentation (used for longitudinal processing)") + argstr="-max %.1f", desc="No documentation (used for longitudinal processing)" + ) longitudinal = traits.Bool( - argstr="-long", - desc="No documentation (used for longitudinal processing)") + argstr="-long", desc="No documentation (used for longitudinal processing)" + ) white = traits.String(argstr="-white %s", desc="White surface name") copy_inputs = traits.Bool( - desc="If running as a node, set this to True." + - "This will copy the input files to the node " + "directory.") + desc="If running as a node, set this to True." + + "This will copy the input files to the node " + + "directory." + ) class MakeSurfacesOutputSpec(TraitedSpec): - out_white = File( - exists=False, desc="Output white matter hemisphere surface") + out_white = File(exists=False, desc="Output white matter hemisphere surface") out_curv = File(exists=False, desc="Output curv file for MakeSurfaces") out_area = File(exists=False, desc="Output area file for MakeSurfaces") out_cortex = File(exists=False, desc="Output cortex file for MakeSurfaces") out_pial = File(exists=False, desc="Output pial surface for MakeSurfaces") - out_thickness = File( - exists=False, desc="Output thickness file for MakeSurfaces") + out_thickness = File(exists=False, desc="Output thickness file for MakeSurfaces") class MakeSurfaces(FSCommand): @@ -2599,42 +2778,50 @@ class MakeSurfaces(FSCommand): 'mris_make_surfaces -T1 T1.mgz -orig pial -orig_pial pial 10335 lh' """ - _cmd = 'mris_make_surfaces' + _cmd = "mris_make_surfaces" input_spec = MakeSurfacesInputSpec output_spec = MakeSurfacesOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir + copy2subjdir(self, self.inputs.in_wm, folder="mri", basename="wm.mgz") copy2subjdir( - self, self.inputs.in_wm, folder='mri', basename='wm.mgz') + self, self.inputs.in_filled, folder="mri", basename="filled.mgz" + ) copy2subjdir( self, - self.inputs.in_filled, - folder='mri', - basename='filled.mgz') - copy2subjdir(self, self.inputs.in_white, 'surf', - '{0}.white'.format(self.inputs.hemisphere)) + self.inputs.in_white, + "surf", + "{0}.white".format(self.inputs.hemisphere), + ) for originalfile in [self.inputs.in_aseg, self.inputs.in_T1]: - copy2subjdir(self, originalfile, folder='mri') + copy2subjdir(self, originalfile, folder="mri") for originalfile in [ - self.inputs.orig_white, self.inputs.orig_pial, - self.inputs.in_orig + self.inputs.orig_white, + self.inputs.orig_pial, + self.inputs.in_orig, ]: - copy2subjdir(self, originalfile, folder='surf') + copy2subjdir(self, originalfile, folder="surf") if isdefined(self.inputs.in_label): - copy2subjdir(self, self.inputs.in_label, 'label', - '{0}.aparc.annot'.format(self.inputs.hemisphere)) + copy2subjdir( + self, + self.inputs.in_label, + "label", + "{0}.aparc.annot".format(self.inputs.hemisphere), + ) else: os.makedirs( - os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'label')) + os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id, "label" + ) + ) return super(MakeSurfaces, self).run(**inputs) def _format_arg(self, name, spec, value): - if name in ['in_T1', 'in_aseg']: + if name in ["in_T1", "in_aseg"]: # These inputs do not take full paths as inputs or even basenames basename = os.path.basename(value) # whent the -mgz flag is specified, it assumes the mgz extension @@ -2642,63 +2829,65 @@ def _format_arg(self, name, spec, value): prefix = os.path.splitext(basename)[0] else: prefix = basename - if prefix == 'aseg': + if prefix == "aseg": return # aseg is already the default return spec.argstr % prefix - elif name in ['orig_white', 'orig_pial']: + elif name in ["orig_white", "orig_pial"]: # these inputs do take full file paths or even basenames basename = os.path.basename(value) - suffix = basename.split('.')[1] + suffix = basename.split(".")[1] return spec.argstr % suffix - elif name == 'in_orig': - if value.endswith('lh.orig') or value.endswith('rh.orig'): + elif name == "in_orig": + if value.endswith("lh.orig") or value.endswith("rh.orig"): # {lh,rh}.orig inputs are not sepcified on command line return else: # if the input orig file is different than lh.orig or rh.orig # these inputs do take full file paths or even basenames basename = os.path.basename(value) - suffix = basename.split('.')[1] + suffix = basename.split(".")[1] return spec.argstr % suffix return super(MakeSurfaces, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() # Outputs are saved in the surf directory - dest_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'surf') + dest_dir = os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id, "surf" + ) # labels are saved in the label directory - label_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'label') + label_dir = os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id, "label" + ) if not self.inputs.no_white: outputs["out_white"] = os.path.join( - dest_dir, - str(self.inputs.hemisphere) + '.white') + dest_dir, str(self.inputs.hemisphere) + ".white" + ) # The curv and area files must have the hemisphere names as a prefix outputs["out_curv"] = os.path.join( - dest_dir, - str(self.inputs.hemisphere) + '.curv') + dest_dir, str(self.inputs.hemisphere) + ".curv" + ) outputs["out_area"] = os.path.join( - dest_dir, - str(self.inputs.hemisphere) + '.area') + dest_dir, str(self.inputs.hemisphere) + ".area" + ) # Something determines when a pial surface and thickness file is generated # but documentation doesn't say what. # The orig_pial input is just a guess - if isdefined(self.inputs.orig_pial) or self.inputs.white == 'NOWRITE': + if isdefined(self.inputs.orig_pial) or self.inputs.white == "NOWRITE": outputs["out_curv"] = outputs["out_curv"] + ".pial" outputs["out_area"] = outputs["out_area"] + ".pial" outputs["out_pial"] = os.path.join( - dest_dir, - str(self.inputs.hemisphere) + '.pial') + dest_dir, str(self.inputs.hemisphere) + ".pial" + ) outputs["out_thickness"] = os.path.join( - dest_dir, - str(self.inputs.hemisphere) + '.thickness') + dest_dir, str(self.inputs.hemisphere) + ".thickness" + ) else: # when a pial surface is generated, the cortex label file is not # generated outputs["out_cortex"] = os.path.join( - label_dir, - str(self.inputs.hemisphere) + '.cortex.label') + label_dir, str(self.inputs.hemisphere) + ".cortex.label" + ) return outputs @@ -2709,26 +2898,25 @@ class CurvatureInputSpec(FSTraitedSpec): mandatory=True, exists=True, copyfile=True, - desc="Input file for Curvature") + desc="Input file for Curvature", + ) # optional - threshold = traits.Float( - argstr="-thresh %.3f", desc="Undocumented input threshold") + threshold = traits.Float(argstr="-thresh %.3f", desc="Undocumented input threshold") n = traits.Bool(argstr="-n", desc="Undocumented boolean flag") averages = traits.Int( argstr="-a %d", - desc= - "Perform this number iterative averages of curvature measure before saving" + desc="Perform this number iterative averages of curvature measure before saving", ) save = traits.Bool( argstr="-w", - desc= - "Save curvature files (will only generate screen output without this option)" + desc="Save curvature files (will only generate screen output without this option)", ) distances = traits.Tuple( traits.Int, traits.Int, argstr="-distances %d %d", - desc="Undocumented input integer distances") + desc="Undocumented input integer distances", + ) copy_input = traits.Bool(desc="Copy input file to current directory") @@ -2753,13 +2941,13 @@ class Curvature(FSCommand): 'mris_curvature -w lh.pial' """ - _cmd = 'mris_curvature' + _cmd = "mris_curvature" input_spec = CurvatureInputSpec output_spec = CurvatureOutputSpec def _format_arg(self, name, spec, value): if self.inputs.copy_input: - if name == 'in_file': + if name == "in_file": basename = os.path.basename(value) return spec.argstr % basename return super(Curvature, self)._format_arg(name, spec, value) @@ -2770,60 +2958,66 @@ def _list_outputs(self): in_file = os.path.basename(self.inputs.in_file) else: in_file = self.inputs.in_file - outputs["out_mean"] = os.path.abspath(in_file) + '.H' - outputs["out_gauss"] = os.path.abspath(in_file) + '.K' + outputs["out_mean"] = os.path.abspath(in_file) + ".H" + outputs["out_gauss"] = os.path.abspath(in_file) + ".K" return outputs class CurvatureStatsInputSpec(FSTraitedSpec): surface = File( - argstr="-F %s", - exists=True, - desc="Specify surface file for CurvatureStats") + argstr="-F %s", exists=True, desc="Specify surface file for CurvatureStats" + ) curvfile1 = File( argstr="%s", position=-2, mandatory=True, exists=True, - desc="Input file for CurvatureStats") + desc="Input file for CurvatureStats", + ) curvfile2 = File( argstr="%s", position=-1, mandatory=True, exists=True, - desc="Input file for CurvatureStats") + desc="Input file for CurvatureStats", + ) hemisphere = traits.Enum( - 'lh', - 'rh', + "lh", + "rh", position=-3, argstr="%s", mandatory=True, - desc="Hemisphere being processed") + desc="Hemisphere being processed", + ) subject_id = traits.String( - 'subject_id', + "subject_id", usedefault=True, position=-4, argstr="%s", mandatory=True, - desc="Subject being processed") + desc="Subject being processed", + ) out_file = File( argstr="-o %s", exists=False, - name_source=['hemisphere'], - name_template='%s.curv.stats', + name_source=["hemisphere"], + name_template="%s.curv.stats", hash_files=False, - desc="Output curvature stats file") + desc="Output curvature stats file", + ) # optional min_max = traits.Bool( - argstr="-m", - desc="Output min / max information for the processed curvature.") + argstr="-m", desc="Output min / max information for the processed curvature." + ) values = traits.Bool( - argstr="-G", desc="Triggers a series of derived curvature values") - write = traits.Bool( - argstr="--writeCurvatureFiles", desc="Write curvature files") + argstr="-G", desc="Triggers a series of derived curvature values" + ) + write = traits.Bool(argstr="--writeCurvatureFiles", desc="Write curvature files") copy_inputs = traits.Bool( - desc="If running as a node, set this to True." + - "This will copy the input files to the node " + "directory.") + desc="If running as a node, set this to True." + + "This will copy the input files to the node " + + "directory." + ) class CurvatureStatsOutputSpec(TraitedSpec): @@ -2869,13 +3063,13 @@ class CurvatureStats(FSCommand): 'mris_curvature_stats -m -o lh.curv.stats -F pial -G --writeCurvatureFiles subject_id lh pial pial' """ - _cmd = 'mris_curvature_stats' + _cmd = "mris_curvature_stats" input_spec = CurvatureStatsInputSpec output_spec = CurvatureStatsOutputSpec def _format_arg(self, name, spec, value): - if name in ['surface', 'curvfile1', 'curvfile2']: - prefix = os.path.basename(value).split('.')[1] + if name in ["surface", "curvfile1", "curvfile2"]: + prefix = os.path.basename(value).split(".")[1] return spec.argstr % prefix return super(CurvatureStats, self)._format_arg(name, spec, value) @@ -2887,43 +3081,37 @@ def _list_outputs(self): def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir - copy2subjdir(self, self.inputs.surface, 'surf') - copy2subjdir(self, self.inputs.curvfile1, 'surf') - copy2subjdir(self, self.inputs.curvfile2, 'surf') + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir + copy2subjdir(self, self.inputs.surface, "surf") + copy2subjdir(self, self.inputs.curvfile1, "surf") + copy2subjdir(self, self.inputs.curvfile2, "surf") return super(CurvatureStats, self).run(**inputs) class JacobianInputSpec(FSTraitedSpec): # required in_origsurf = File( - argstr="%s", - position=-3, - mandatory=True, - exists=True, - desc="Original surface") + argstr="%s", position=-3, mandatory=True, exists=True, desc="Original surface" + ) in_mappedsurf = File( - argstr="%s", - position=-2, - mandatory=True, - exists=True, - desc="Mapped surface") + argstr="%s", position=-2, mandatory=True, exists=True, desc="Mapped surface" + ) # optional out_file = File( argstr="%s", exists=False, position=-1, - name_source=['in_origsurf'], + name_source=["in_origsurf"], hash_files=False, - name_template='%s.jacobian', + name_template="%s.jacobian", keep_extension=False, - desc="Output Jacobian of the surface mapping") + desc="Output Jacobian of the surface mapping", + ) class JacobianOutputSpec(TraitedSpec): - out_file = File( - exists=False, desc="Output Jacobian of the surface mapping") + out_file = File(exists=False, desc="Output Jacobian of the surface mapping") class Jacobian(FSCommand): @@ -2940,49 +3128,45 @@ class Jacobian(FSCommand): 'mris_jacobian lh.pial lh.pial lh.jacobian' """ - _cmd = 'mris_jacobian' + _cmd = "mris_jacobian" input_spec = JacobianInputSpec output_spec = JacobianOutputSpec def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class MRIsCalcInputSpec(FSTraitedSpec): # required in_file1 = File( - argstr="%s", - position=-3, - mandatory=True, - exists=True, - desc="Input file 1") + argstr="%s", position=-3, mandatory=True, exists=True, desc="Input file 1" + ) action = traits.String( argstr="%s", position=-2, mandatory=True, - desc="Action to perform on input file(s)") + desc="Action to perform on input file(s)", + ) out_file = File( - argstr="-o %s", mandatory=True, desc="Output file after calculation") + argstr="-o %s", mandatory=True, desc="Output file after calculation" + ) # optional in_file2 = File( argstr="%s", exists=True, position=-1, - xor=['in_float', 'in_int'], - desc="Input file 2") + xor=["in_float", "in_int"], + desc="Input file 2", + ) in_float = traits.Float( - argstr="%f", - position=-1, - xor=['in_file2', 'in_int'], - desc="Input float") + argstr="%f", position=-1, xor=["in_file2", "in_int"], desc="Input float" + ) in_int = traits.Int( - argstr="%d", - position=-1, - xor=['in_file2', 'in_float'], - desc="Input integer") + argstr="%d", position=-1, xor=["in_file2", "in_float"], desc="Input integer" + ) class MRIsCalcOutputSpec(TraitedSpec): @@ -3014,70 +3198,74 @@ class MRIsCalc(FSCommand): 'mris_calc -o lh.area.mid lh.area add lh.area.pial' """ - _cmd = 'mris_calc' + _cmd = "mris_calc" input_spec = MRIsCalcInputSpec output_spec = MRIsCalcOutputSpec def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class VolumeMaskInputSpec(FSTraitedSpec): left_whitelabel = traits.Int( - argstr="--label_left_white %d", - mandatory=True, - desc="Left white matter label") + argstr="--label_left_white %d", mandatory=True, desc="Left white matter label" + ) left_ribbonlabel = traits.Int( argstr="--label_left_ribbon %d", mandatory=True, - desc="Left cortical ribbon label") + desc="Left cortical ribbon label", + ) right_whitelabel = traits.Int( - argstr="--label_right_white %d", - mandatory=True, - desc="Right white matter label") + argstr="--label_right_white %d", mandatory=True, desc="Right white matter label" + ) right_ribbonlabel = traits.Int( argstr="--label_right_ribbon %d", mandatory=True, - desc="Right cortical ribbon label") - lh_pial = File( - mandatory=True, exists=True, desc="Implicit input left pial surface") + desc="Right cortical ribbon label", + ) + lh_pial = File(mandatory=True, exists=True, desc="Implicit input left pial surface") rh_pial = File( - mandatory=True, exists=True, desc="Implicit input right pial surface") + mandatory=True, exists=True, desc="Implicit input right pial surface" + ) lh_white = File( - mandatory=True, - exists=True, - desc="Implicit input left white matter surface") + mandatory=True, exists=True, desc="Implicit input left white matter surface" + ) rh_white = File( - mandatory=True, - exists=True, - desc="Implicit input right white matter surface") + mandatory=True, exists=True, desc="Implicit input right white matter surface" + ) aseg = File( exists=True, - xor=['in_aseg'], - desc="Implicit aseg.mgz segmentation. " + - "Specify a different aseg by using the 'in_aseg' input.") + xor=["in_aseg"], + desc="Implicit aseg.mgz segmentation. " + + "Specify a different aseg by using the 'in_aseg' input.", + ) subject_id = traits.String( - 'subject_id', + "subject_id", usedefault=True, position=-1, argstr="%s", mandatory=True, - desc="Subject being processed") + desc="Subject being processed", + ) # optional in_aseg = File( argstr="--aseg_name %s", exists=True, - xor=['aseg'], - desc="Input aseg file for VolumeMask") + xor=["aseg"], + desc="Input aseg file for VolumeMask", + ) save_ribbon = traits.Bool( argstr="--save_ribbon", - desc="option to save just the ribbon for the " + - "hemispheres in the format ?h.ribbon.mgz") + desc="option to save just the ribbon for the " + + "hemispheres in the format ?h.ribbon.mgz", + ) copy_inputs = traits.Bool( - desc="If running as a node, set this to True." + - "This will copy the implicit input files to the " + "node directory.") + desc="If running as a node, set this to True." + + "This will copy the implicit input files to the " + + "node directory." + ) class VolumeMaskOutputSpec(TraitedSpec): @@ -3114,142 +3302,153 @@ class VolumeMask(FSCommand): 'mris_volmask --label_left_ribbon 3 --label_left_white 2 --label_right_ribbon 42 --label_right_white 41 --save_ribbon 10335' """ - _cmd = 'mris_volmask' + _cmd = "mris_volmask" input_spec = VolumeMaskInputSpec output_spec = VolumeMaskOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir - copy2subjdir(self, self.inputs.lh_pial, 'surf', 'lh.pial') - copy2subjdir(self, self.inputs.rh_pial, 'surf', 'rh.pial') - copy2subjdir(self, self.inputs.lh_white, 'surf', 'lh.white') - copy2subjdir(self, self.inputs.rh_white, 'surf', 'rh.white') - copy2subjdir(self, self.inputs.in_aseg, 'mri') - copy2subjdir(self, self.inputs.aseg, 'mri', 'aseg.mgz') + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir + copy2subjdir(self, self.inputs.lh_pial, "surf", "lh.pial") + copy2subjdir(self, self.inputs.rh_pial, "surf", "rh.pial") + copy2subjdir(self, self.inputs.lh_white, "surf", "lh.white") + copy2subjdir(self, self.inputs.rh_white, "surf", "rh.white") + copy2subjdir(self, self.inputs.in_aseg, "mri") + copy2subjdir(self, self.inputs.aseg, "mri", "aseg.mgz") return super(VolumeMask, self).run(**inputs) def _format_arg(self, name, spec, value): - if name == 'in_aseg': - return spec.argstr % os.path.basename(value).rstrip('.mgz') + if name == "in_aseg": + return spec.argstr % os.path.basename(value).rstrip(".mgz") return super(VolumeMask, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() - out_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'mri') - outputs["out_ribbon"] = os.path.join(out_dir, 'ribbon.mgz') + out_dir = os.path.join(self.inputs.subjects_dir, self.inputs.subject_id, "mri") + outputs["out_ribbon"] = os.path.join(out_dir, "ribbon.mgz") if self.inputs.save_ribbon: - outputs["rh_ribbon"] = os.path.join(out_dir, 'rh.ribbon.mgz') - outputs["lh_ribbon"] = os.path.join(out_dir, 'lh.ribbon.mgz') + outputs["rh_ribbon"] = os.path.join(out_dir, "rh.ribbon.mgz") + outputs["lh_ribbon"] = os.path.join(out_dir, "lh.ribbon.mgz") return outputs class ParcellationStatsInputSpec(FSTraitedSpec): # required subject_id = traits.String( - 'subject_id', + "subject_id", usedefault=True, position=-3, argstr="%s", mandatory=True, - desc="Subject being processed") + desc="Subject being processed", + ) hemisphere = traits.Enum( - 'lh', - 'rh', + "lh", + "rh", position=-2, argstr="%s", mandatory=True, - desc="Hemisphere being processed") + desc="Hemisphere being processed", + ) # implicit wm = File( - mandatory=True, - exists=True, - desc="Input file must be /mri/wm.mgz") + mandatory=True, exists=True, desc="Input file must be /mri/wm.mgz" + ) lh_white = File( mandatory=True, exists=True, - desc="Input file must be /surf/lh.white") + desc="Input file must be /surf/lh.white", + ) rh_white = File( mandatory=True, exists=True, - desc="Input file must be /surf/rh.white") + desc="Input file must be /surf/rh.white", + ) lh_pial = File( - mandatory=True, - exists=True, - desc="Input file must be /surf/lh.pial") + mandatory=True, exists=True, desc="Input file must be /surf/lh.pial" + ) rh_pial = File( - mandatory=True, - exists=True, - desc="Input file must be /surf/rh.pial") + mandatory=True, exists=True, desc="Input file must be /surf/rh.pial" + ) transform = File( mandatory=True, exists=True, - desc="Input file must be /mri/transforms/talairach.xfm") + desc="Input file must be /mri/transforms/talairach.xfm", + ) thickness = File( mandatory=True, exists=True, - desc="Input file must be /surf/?h.thickness") + desc="Input file must be /surf/?h.thickness", + ) brainmask = File( mandatory=True, exists=True, - desc="Input file must be /mri/brainmask.mgz") + desc="Input file must be /mri/brainmask.mgz", + ) aseg = File( mandatory=True, exists=True, - desc="Input file must be /mri/aseg.presurf.mgz") + desc="Input file must be /mri/aseg.presurf.mgz", + ) ribbon = File( mandatory=True, exists=True, - desc="Input file must be /mri/ribbon.mgz") - cortex_label = File( - exists=True, desc="implicit input file {hemi}.cortex.label") + desc="Input file must be /mri/ribbon.mgz", + ) + cortex_label = File(exists=True, desc="implicit input file {hemi}.cortex.label") # optional surface = traits.String( - position=-1, argstr="%s", desc="Input surface (e.g. 'white')") + position=-1, argstr="%s", desc="Input surface (e.g. 'white')" + ) mgz = traits.Bool(argstr="-mgz", desc="Look for mgz files") - in_cortex = File( - argstr="-cortex %s", exists=True, desc="Input cortex label") + in_cortex = File(argstr="-cortex %s", exists=True, desc="Input cortex label") in_annotation = File( argstr="-a %s", exists=True, - xor=['in_label'], - desc= - "compute properties for each label in the annotation file separately") + xor=["in_label"], + desc="compute properties for each label in the annotation file separately", + ) in_label = File( argstr="-l %s", exists=True, - xor=['in_annotatoin', 'out_color'], - desc="limit calculations to specified label") + xor=["in_annotatoin", "out_color"], + desc="limit calculations to specified label", + ) tabular_output = traits.Bool(argstr="-b", desc="Tabular output") out_table = File( argstr="-f %s", exists=False, genfile=True, - requires=['tabular_output'], - desc="Table output to tablefile") + requires=["tabular_output"], + desc="Table output to tablefile", + ) out_color = File( argstr="-c %s", exists=False, genfile=True, - xor=['in_label'], - desc="Output annotation files's colortable to text file") + xor=["in_label"], + desc="Output annotation files's colortable to text file", + ) copy_inputs = traits.Bool( - desc="If running as a node, set this to True." + - "This will copy the input files to the node " + "directory.") + desc="If running as a node, set this to True." + + "This will copy the input files to the node " + + "directory." + ) th3 = traits.Bool( argstr="-th3", requires=["cortex_label"], - desc="turns on new vertex-wise volume calc for mris_anat_stats") + desc="turns on new vertex-wise volume calc for mris_anat_stats", + ) class ParcellationStatsOutputSpec(TraitedSpec): out_table = File(exists=False, desc="Table output to tablefile") out_color = File( - exists=False, desc="Output annotation files's colortable to text file") + exists=False, desc="Output annotation files's colortable to text file" + ) class ParcellationStats(FSCommand): @@ -3280,35 +3479,47 @@ class ParcellationStats(FSCommand): 'mris_anatomical_stats -c test.ctab -f lh.test.stats 10335 lh white' """ - _cmd = 'mris_anatomical_stats' + _cmd = "mris_anatomical_stats" input_spec = ParcellationStatsInputSpec output_spec = ParcellationStatsOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir - copy2subjdir(self, self.inputs.lh_white, 'surf', 'lh.white') - copy2subjdir(self, self.inputs.lh_pial, 'surf', 'lh.pial') - copy2subjdir(self, self.inputs.rh_white, 'surf', 'rh.white') - copy2subjdir(self, self.inputs.rh_pial, 'surf', 'rh.pial') - copy2subjdir(self, self.inputs.wm, 'mri', 'wm.mgz') - copy2subjdir(self, self.inputs.transform, - os.path.join('mri', 'transforms'), 'talairach.xfm') - copy2subjdir(self, self.inputs.brainmask, 'mri', 'brainmask.mgz') - copy2subjdir(self, self.inputs.aseg, 'mri', 'aseg.presurf.mgz') - copy2subjdir(self, self.inputs.ribbon, 'mri', 'ribbon.mgz') - copy2subjdir(self, self.inputs.thickness, 'surf', - '{0}.thickness'.format(self.inputs.hemisphere)) + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir + copy2subjdir(self, self.inputs.lh_white, "surf", "lh.white") + copy2subjdir(self, self.inputs.lh_pial, "surf", "lh.pial") + copy2subjdir(self, self.inputs.rh_white, "surf", "rh.white") + copy2subjdir(self, self.inputs.rh_pial, "surf", "rh.pial") + copy2subjdir(self, self.inputs.wm, "mri", "wm.mgz") + copy2subjdir( + self, + self.inputs.transform, + os.path.join("mri", "transforms"), + "talairach.xfm", + ) + copy2subjdir(self, self.inputs.brainmask, "mri", "brainmask.mgz") + copy2subjdir(self, self.inputs.aseg, "mri", "aseg.presurf.mgz") + copy2subjdir(self, self.inputs.ribbon, "mri", "ribbon.mgz") + copy2subjdir( + self, + self.inputs.thickness, + "surf", + "{0}.thickness".format(self.inputs.hemisphere), + ) if isdefined(self.inputs.cortex_label): - copy2subjdir(self, self.inputs.cortex_label, 'label', - '{0}.cortex.label'.format(self.inputs.hemisphere)) + copy2subjdir( + self, + self.inputs.cortex_label, + "label", + "{0}.cortex.label".format(self.inputs.hemisphere), + ) createoutputdirs(self._list_outputs()) return super(ParcellationStats, self).run(**inputs) def _gen_filename(self, name): - if name in ['out_table', 'out_color']: + if name in ["out_table", "out_color"]: return self._list_outputs()[name] return None @@ -3318,103 +3529,110 @@ def _list_outputs(self): outputs["out_table"] = os.path.abspath(self.inputs.out_table) else: # subject stats directory - stats_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'stats') + stats_dir = os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id, "stats" + ) if isdefined(self.inputs.in_annotation): # if out_table is not defined just tag .stats on the end # instead of .annot - if self.inputs.surface == 'pial': - basename = os.path.basename( - self.inputs.in_annotation).replace( - '.annot', '.pial.stats') + if self.inputs.surface == "pial": + basename = os.path.basename(self.inputs.in_annotation).replace( + ".annot", ".pial.stats" + ) else: - basename = os.path.basename( - self.inputs.in_annotation).replace('.annot', '.stats') + basename = os.path.basename(self.inputs.in_annotation).replace( + ".annot", ".stats" + ) elif isdefined(self.inputs.in_label): # if out_table is not defined just tag .stats on the end # instead of .label - if self.inputs.surface == 'pial': + if self.inputs.surface == "pial": basename = os.path.basename(self.inputs.in_label).replace( - '.label', '.pial.stats') + ".label", ".pial.stats" + ) else: basename = os.path.basename(self.inputs.in_label).replace( - '.label', '.stats') + ".label", ".stats" + ) else: - basename = str(self.inputs.hemisphere) + '.aparc.annot.stats' + basename = str(self.inputs.hemisphere) + ".aparc.annot.stats" outputs["out_table"] = os.path.join(stats_dir, basename) if isdefined(self.inputs.out_color): outputs["out_color"] = os.path.abspath(self.inputs.out_color) else: # subject label directory - out_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id, 'label') + out_dir = os.path.join( + self.inputs.subjects_dir, self.inputs.subject_id, "label" + ) if isdefined(self.inputs.in_annotation): # find the annotation name (if it exists) basename = os.path.basename(self.inputs.in_annotation) - for item in ['lh.', 'rh.', 'aparc.', 'annot']: - basename = basename.replace(item, '') + for item in ["lh.", "rh.", "aparc.", "annot"]: + basename = basename.replace(item, "") annot = basename # if the out_color table is not defined, one with the annotation # name will be created - if 'BA' in annot: - outputs["out_color"] = os.path.join( - out_dir, annot + 'ctab') + if "BA" in annot: + outputs["out_color"] = os.path.join(out_dir, annot + "ctab") else: outputs["out_color"] = os.path.join( - out_dir, 'aparc.annot.' + annot + 'ctab') + out_dir, "aparc.annot." + annot + "ctab" + ) else: - outputs["out_color"] = os.path.join(out_dir, - 'aparc.annot.ctab') + outputs["out_color"] = os.path.join(out_dir, "aparc.annot.ctab") return outputs class ContrastInputSpec(FSTraitedSpec): # required subject_id = traits.String( - 'subject_id', + "subject_id", argstr="--s %s", usedefault=True, mandatory=True, - desc="Subject being processed") + desc="Subject being processed", + ) hemisphere = traits.Enum( - 'lh', - 'rh', + "lh", + "rh", argstr="--%s-only", mandatory=True, - desc="Hemisphere being processed") + desc="Hemisphere being processed", + ) # implicit thickness = File( mandatory=True, exists=True, - desc="Input file must be /surf/?h.thickness") + desc="Input file must be /surf/?h.thickness", + ) white = File( mandatory=True, exists=True, - desc="Input file must be /surf/.white") + desc="Input file must be /surf/.white", + ) annotation = File( mandatory=True, exists=True, - desc= - "Input annotation file must be /label/.aparc.annot" + desc="Input annotation file must be /label/.aparc.annot", ) cortex = File( mandatory=True, exists=True, - desc= - "Input cortex label must be /label/.cortex.label" + desc="Input cortex label must be /label/.cortex.label", ) - orig = File( - exists=True, mandatory=True, desc="Implicit input file mri/orig.mgz") + orig = File(exists=True, mandatory=True, desc="Implicit input file mri/orig.mgz") rawavg = File( - exists=True, mandatory=True, desc="Implicit input file mri/rawavg.mgz") + exists=True, mandatory=True, desc="Implicit input file mri/rawavg.mgz" + ) copy_inputs = traits.Bool( - desc="If running as a node, set this to True." + - "This will copy the input files to the node " + "directory.") + desc="If running as a node, set this to True." + + "This will copy the input files to the node " + + "directory." + ) class ContrastOutputSpec(TraitedSpec): - out_contrast = File( - exists=False, desc="Output contrast file from Contrast") + out_contrast = File(exists=False, desc="Output contrast file from Contrast") out_stats = File(exists=False, desc="Output stats file from Contrast") out_log = File(exists=True, desc="Output log from Contrast") @@ -3439,42 +3657,42 @@ class Contrast(FSCommand): 'pctsurfcon --lh-only --s 10335' """ - _cmd = 'pctsurfcon' + _cmd = "pctsurfcon" input_spec = ContrastInputSpec output_spec = ContrastOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir hemi = self.inputs.hemisphere - copy2subjdir(self, self.inputs.annotation, 'label', - '{0}.aparc.annot'.format(hemi)) - copy2subjdir(self, self.inputs.cortex, 'label', - '{0}.cortex.label'.format(hemi)) - copy2subjdir(self, self.inputs.white, 'surf', - '{0}.white'.format(hemi)) - copy2subjdir(self, self.inputs.thickness, 'surf', - '{0}.thickness'.format(hemi)) - copy2subjdir(self, self.inputs.orig, 'mri', 'orig.mgz') - copy2subjdir(self, self.inputs.rawavg, 'mri', 'rawavg.mgz') + copy2subjdir( + self, self.inputs.annotation, "label", "{0}.aparc.annot".format(hemi) + ) + copy2subjdir( + self, self.inputs.cortex, "label", "{0}.cortex.label".format(hemi) + ) + copy2subjdir(self, self.inputs.white, "surf", "{0}.white".format(hemi)) + copy2subjdir( + self, self.inputs.thickness, "surf", "{0}.thickness".format(hemi) + ) + copy2subjdir(self, self.inputs.orig, "mri", "orig.mgz") + copy2subjdir(self, self.inputs.rawavg, "mri", "rawavg.mgz") # need to create output directories createoutputdirs(self._list_outputs()) return super(Contrast, self).run(**inputs) def _list_outputs(self): outputs = self._outputs().get() - subject_dir = os.path.join(self.inputs.subjects_dir, - self.inputs.subject_id) + subject_dir = os.path.join(self.inputs.subjects_dir, self.inputs.subject_id) outputs["out_contrast"] = os.path.join( - subject_dir, 'surf', - str(self.inputs.hemisphere) + '.w-g.pct.mgh') + subject_dir, "surf", str(self.inputs.hemisphere) + ".w-g.pct.mgh" + ) outputs["out_stats"] = os.path.join( - subject_dir, 'stats', - str(self.inputs.hemisphere) + '.w-g.pct.stats') - outputs["out_log"] = os.path.join(subject_dir, 'scripts', - 'pctsurfcon.log') + subject_dir, "stats", str(self.inputs.hemisphere) + ".w-g.pct.stats" + ) + outputs["out_log"] = os.path.join(subject_dir, "scripts", "pctsurfcon.log") return outputs @@ -3484,34 +3702,35 @@ class RelabelHypointensitiesInputSpec(FSTraitedSpec): mandatory=True, exists=True, copyfile=True, - desc="Implicit input file must be lh.white") + desc="Implicit input file must be lh.white", + ) rh_white = File( mandatory=True, exists=True, copyfile=True, - desc="Implicit input file must be rh.white") + desc="Implicit input file must be rh.white", + ) aseg = File( - argstr="%s", - position=-3, - mandatory=True, - exists=True, - desc="Input aseg file") + argstr="%s", position=-3, mandatory=True, exists=True, desc="Input aseg file" + ) surf_directory = Directory( - '.', + ".", argstr="%s", position=-2, exists=True, usedefault=True, - desc="Directory containing lh.white and rh.white") + desc="Directory containing lh.white and rh.white", + ) out_file = File( argstr="%s", position=-1, exists=False, - name_source=['aseg'], - name_template='%s.hypos.mgz', + name_source=["aseg"], + name_template="%s.hypos.mgz", hash_files=False, keep_extension=False, - desc="Output aseg file") + desc="Output aseg file", + ) class RelabelHypointensitiesOutputSpec(TraitedSpec): @@ -3534,7 +3753,7 @@ class RelabelHypointensities(FSCommand): 'mri_relabel_hypointensities aseg.mgz . aseg.hypos.mgz' """ - _cmd = 'mri_relabel_hypointensities' + _cmd = "mri_relabel_hypointensities" input_spec = RelabelHypointensitiesInputSpec output_spec = RelabelHypointensitiesOutputSpec @@ -3547,57 +3766,64 @@ def _list_outputs(self): class Aparc2AsegInputSpec(FSTraitedSpec): # required subject_id = traits.String( - 'subject_id', + "subject_id", argstr="--s %s", usedefault=True, mandatory=True, - desc="Subject being processed") + desc="Subject being processed", + ) out_file = File( argstr="--o %s", exists=False, mandatory=True, - desc="Full path of file to save the output segmentation in") + desc="Full path of file to save the output segmentation in", + ) # implicit lh_white = File( mandatory=True, exists=True, - desc="Input file must be /surf/lh.white") + desc="Input file must be /surf/lh.white", + ) rh_white = File( mandatory=True, exists=True, - desc="Input file must be /surf/rh.white") + desc="Input file must be /surf/rh.white", + ) lh_pial = File( - mandatory=True, - exists=True, - desc="Input file must be /surf/lh.pial") + mandatory=True, exists=True, desc="Input file must be /surf/lh.pial" + ) rh_pial = File( - mandatory=True, - exists=True, - desc="Input file must be /surf/rh.pial") + mandatory=True, exists=True, desc="Input file must be /surf/rh.pial" + ) lh_ribbon = File( mandatory=True, exists=True, - desc="Input file must be /mri/lh.ribbon.mgz") + desc="Input file must be /mri/lh.ribbon.mgz", + ) rh_ribbon = File( mandatory=True, exists=True, - desc="Input file must be /mri/rh.ribbon.mgz") + desc="Input file must be /mri/rh.ribbon.mgz", + ) ribbon = File( mandatory=True, exists=True, - desc="Input file must be /mri/ribbon.mgz") + desc="Input file must be /mri/ribbon.mgz", + ) lh_annotation = File( mandatory=True, exists=True, - desc="Input file must be /label/lh.aparc.annot") + desc="Input file must be /label/lh.aparc.annot", + ) rh_annotation = File( mandatory=True, exists=True, - desc="Input file must be /label/rh.aparc.annot") + desc="Input file must be /label/rh.aparc.annot", + ) # optional filled = File( - exists=True, - desc="Implicit input filled file. Only required with FS v5.3.") + exists=True, desc="Implicit input filled file. Only required with FS v5.3." + ) aseg = File(argstr="--aseg %s", exists=True, desc="Input aseg file") volmask = traits.Bool(argstr="--volmask", desc="Volume mask flag") ctxseg = File(argstr="--ctxseg %s", exists=True, desc="") @@ -3607,16 +3833,18 @@ class Aparc2AsegInputSpec(FSTraitedSpec): For each voxel labeled as white matter in the aseg, re-assign its label to be that of the closest cortical point if its distance is less than dmaxctx - """) - hypo_wm = traits.Bool( - argstr="--hypo-as-wm", desc="Label hypointensities as WM") + """, + ) + hypo_wm = traits.Bool(argstr="--hypo-as-wm", desc="Label hypointensities as WM") rip_unknown = traits.Bool( - argstr="--rip-unknown", - desc="Do not label WM based on 'unknown' corical label") + argstr="--rip-unknown", desc="Do not label WM based on 'unknown' corical label" + ) a2009s = traits.Bool(argstr="--a2009s", desc="Using the a2009s atlas") copy_inputs = traits.Bool( - desc="If running as a node, set this to True." + - "This will copy the input files to the node " + "directory.") + desc="If running as a node, set this to True." + + "This will copy the input files to the node " + + "directory." + ) class Aparc2AsegOutputSpec(TraitedSpec): @@ -3660,35 +3888,35 @@ class Aparc2Aseg(FSCommand): 'mri_aparc2aseg --labelwm --o aparc+aseg.mgz --rip-unknown --s subject_id' """ - _cmd = 'mri_aparc2aseg' + _cmd = "mri_aparc2aseg" input_spec = Aparc2AsegInputSpec output_spec = Aparc2AsegOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() - if 'subjects_dir' in inputs: - inputs['subjects_dir'] = self.inputs.subjects_dir - copy2subjdir(self, self.inputs.lh_white, 'surf', 'lh.white') - copy2subjdir(self, self.inputs.lh_pial, 'surf', 'lh.pial') - copy2subjdir(self, self.inputs.rh_white, 'surf', 'rh.white') - copy2subjdir(self, self.inputs.rh_pial, 'surf', 'rh.pial') - copy2subjdir(self, self.inputs.lh_ribbon, 'mri', 'lh.ribbon.mgz') - copy2subjdir(self, self.inputs.rh_ribbon, 'mri', 'rh.ribbon.mgz') - copy2subjdir(self, self.inputs.ribbon, 'mri', 'ribbon.mgz') - copy2subjdir(self, self.inputs.aseg, 'mri') - copy2subjdir(self, self.inputs.filled, 'mri', 'filled.mgz') - copy2subjdir(self, self.inputs.lh_annotation, 'label') - copy2subjdir(self, self.inputs.rh_annotation, 'label') + if "subjects_dir" in inputs: + inputs["subjects_dir"] = self.inputs.subjects_dir + copy2subjdir(self, self.inputs.lh_white, "surf", "lh.white") + copy2subjdir(self, self.inputs.lh_pial, "surf", "lh.pial") + copy2subjdir(self, self.inputs.rh_white, "surf", "rh.white") + copy2subjdir(self, self.inputs.rh_pial, "surf", "rh.pial") + copy2subjdir(self, self.inputs.lh_ribbon, "mri", "lh.ribbon.mgz") + copy2subjdir(self, self.inputs.rh_ribbon, "mri", "rh.ribbon.mgz") + copy2subjdir(self, self.inputs.ribbon, "mri", "ribbon.mgz") + copy2subjdir(self, self.inputs.aseg, "mri") + copy2subjdir(self, self.inputs.filled, "mri", "filled.mgz") + copy2subjdir(self, self.inputs.lh_annotation, "label") + copy2subjdir(self, self.inputs.rh_annotation, "label") return super(Aparc2Aseg, self).run(**inputs) def _format_arg(self, name, spec, value): - if name == 'aseg': + if name == "aseg": # aseg does not take a full filename - basename = os.path.basename(value).replace('.mgz', '') + basename = os.path.basename(value).replace(".mgz", "") return spec.argstr % basename - elif name == 'out_file': + elif name == "out_file": return spec.argstr % os.path.abspath(value) return super(Aparc2Aseg, self)._format_arg(name, spec, value) @@ -3702,10 +3930,8 @@ def _list_outputs(self): class Apas2AsegInputSpec(FSTraitedSpec): # required in_file = File( - argstr="--i %s", - mandatory=True, - exists=True, - desc="Input aparc+aseg.mgz") + argstr="--i %s", mandatory=True, exists=True, desc="Input aparc+aseg.mgz" + ) out_file = File(argstr="--o %s", mandatory=True, desc="Output aseg file") @@ -3730,7 +3956,7 @@ class Apas2Aseg(FSCommand): 'apas2aseg --i aseg.mgz --o output.mgz' """ - _cmd = 'apas2aseg' + _cmd = "apas2aseg" input_spec = Apas2AsegInputSpec output_spec = Apas2AsegOutputSpec @@ -3746,52 +3972,66 @@ class MRIsExpandInputSpec(FSTraitedSpec): in_file = File( exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-3, copyfile=False, - desc='Surface to expand') + desc="Surface to expand", + ) distance = traits.Float( mandatory=True, - argstr='%g', + argstr="%g", position=-2, - desc='Distance in mm or fraction of cortical thickness') + desc="Distance in mm or fraction of cortical thickness", + ) out_name = traits.Str( - 'expanded', - argstr='%s', + "expanded", + argstr="%s", position=-1, usedefault=True, - desc=('Output surface file\n' - 'If no path, uses directory of `in_file`\n' - 'If no path AND missing "lh." or "rh.", derive from `in_file`')) + desc=( + "Output surface file\n" + "If no path, uses directory of `in_file`\n" + 'If no path AND missing "lh." or "rh.", derive from `in_file`' + ), + ) thickness = traits.Bool( - argstr='-thickness', - desc='Expand by fraction of cortical thickness, not mm') + argstr="-thickness", desc="Expand by fraction of cortical thickness, not mm" + ) thickness_name = traits.Str( argstr="-thickness_name %s", copyfile=False, - desc=('Name of thickness file (implicit: "thickness")\n' - 'If no path, uses directory of `in_file`\n' - 'If no path AND missing "lh." or "rh.", derive from `in_file`')) + desc=( + 'Name of thickness file (implicit: "thickness")\n' + "If no path, uses directory of `in_file`\n" + 'If no path AND missing "lh." or "rh.", derive from `in_file`' + ), + ) pial = traits.Str( - argstr='-pial %s', + argstr="-pial %s", copyfile=False, - desc=('Name of pial file (implicit: "pial")\n' - 'If no path, uses directory of `in_file`\n' - 'If no path AND missing "lh." or "rh.", derive from `in_file`')) + desc=( + 'Name of pial file (implicit: "pial")\n' + "If no path, uses directory of `in_file`\n" + 'If no path AND missing "lh." or "rh.", derive from `in_file`' + ), + ) sphere = traits.Str( - 'sphere', + "sphere", copyfile=False, usedefault=True, - desc='WARNING: Do not change this trait') - spring = traits.Float(argstr='-S %g', desc="Spring term (implicit: 0.05)") - dt = traits.Float(argstr='-T %g', desc='dt (implicit: 0.25)') + desc="WARNING: Do not change this trait", + ) + spring = traits.Float(argstr="-S %g", desc="Spring term (implicit: 0.05)") + dt = traits.Float(argstr="-T %g", desc="dt (implicit: 0.25)") write_iterations = traits.Int( - argstr='-W %d', desc='Write snapshots of expansion every N iterations') + argstr="-W %d", desc="Write snapshots of expansion every N iterations" + ) smooth_averages = traits.Int( - argstr='-A %d', - desc='Smooth surface with N iterations after expansion') + argstr="-A %d", desc="Smooth surface with N iterations after expansion" + ) nsurfaces = traits.Int( - argstr='-N %d', desc='Number of surfacces to write during expansion') + argstr="-N %d", desc="Number of surfacces to write during expansion" + ) # # Requires dev version - Re-add when min_ver/max_ver support this # # https://github.com/freesurfer/freesurfer/blob/9730cb9/mris_expand/mris_expand.c # navgs = traits.Tuple( @@ -3806,7 +4046,7 @@ class MRIsExpandInputSpec(FSTraitedSpec): class MRIsExpandOutputSpec(TraitedSpec): - out_file = File(desc='Output surface file') + out_file = File(desc="Output surface file") class MRIsExpand(FSSurfaceCommand): @@ -3825,14 +4065,16 @@ class MRIsExpand(FSSurfaceCommand): >>> mris_expand.cmdline 'mris_expand -thickness lh.white 0.5 graymid' """ - _cmd = 'mris_expand' + + _cmd = "mris_expand" input_spec = MRIsExpandInputSpec output_spec = MRIsExpandOutputSpec def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = self._associated_file(self.inputs.in_file, - self.inputs.out_name) + outputs["out_file"] = self._associated_file( + self.inputs.in_file, self.inputs.out_name + ) return outputs def normalize_filenames(self): @@ -3845,100 +4087,103 @@ def normalize_filenames(self): pial = self.inputs.pial if not isdefined(pial): - pial = 'pial' + pial = "pial" self.inputs.pial = self._associated_file(in_file, pial) if isdefined(self.inputs.thickness) and self.inputs.thickness: thickness_name = self.inputs.thickness_name if not isdefined(thickness_name): - thickness_name = 'thickness' - self.inputs.thickness_name = self._associated_file( - in_file, thickness_name) + thickness_name = "thickness" + self.inputs.thickness_name = self._associated_file(in_file, thickness_name) self.inputs.sphere = self._associated_file(in_file, self.inputs.sphere) class LTAConvertInputSpec(CommandLineInputSpec): # Inputs - _in_xor = ('in_lta', 'in_fsl', 'in_mni', 'in_reg', 'in_niftyreg', 'in_itk') + _in_xor = ("in_lta", "in_fsl", "in_mni", "in_reg", "in_niftyreg", "in_itk") in_lta = traits.Either( File(exists=True), - 'identity.nofile', - argstr='--inlta %s', + "identity.nofile", + argstr="--inlta %s", mandatory=True, xor=_in_xor, - desc='input transform of LTA type') + desc="input transform of LTA type", + ) in_fsl = File( exists=True, - argstr='--infsl %s', + argstr="--infsl %s", mandatory=True, xor=_in_xor, - desc='input transform of FSL type') + desc="input transform of FSL type", + ) in_mni = File( exists=True, - argstr='--inmni %s', + argstr="--inmni %s", mandatory=True, xor=_in_xor, - desc='input transform of MNI/XFM type') + desc="input transform of MNI/XFM type", + ) in_reg = File( exists=True, - argstr='--inreg %s', + argstr="--inreg %s", mandatory=True, xor=_in_xor, - desc='input transform of TK REG type (deprecated format)') + desc="input transform of TK REG type (deprecated format)", + ) in_niftyreg = File( exists=True, - argstr='--inniftyreg %s', + argstr="--inniftyreg %s", mandatory=True, xor=_in_xor, - desc='input transform of Nifty Reg type (inverse RAS2RAS)') + desc="input transform of Nifty Reg type (inverse RAS2RAS)", + ) in_itk = File( exists=True, - argstr='--initk %s', + argstr="--initk %s", mandatory=True, xor=_in_xor, - desc='input transform of ITK type') + desc="input transform of ITK type", + ) # Outputs out_lta = traits.Either( traits.Bool, File, - argstr='--outlta %s', - desc='output linear transform (LTA Freesurfer format)') + argstr="--outlta %s", + desc="output linear transform (LTA Freesurfer format)", + ) out_fsl = traits.Either( - traits.Bool, - File, - argstr='--outfsl %s', - desc='output transform in FSL format') + traits.Bool, File, argstr="--outfsl %s", desc="output transform in FSL format" + ) out_mni = traits.Either( traits.Bool, File, - argstr='--outmni %s', - desc='output transform in MNI/XFM format') + argstr="--outmni %s", + desc="output transform in MNI/XFM format", + ) out_reg = traits.Either( traits.Bool, File, - argstr='--outreg %s', - desc='output transform in reg dat format') + argstr="--outreg %s", + desc="output transform in reg dat format", + ) out_itk = traits.Either( - traits.Bool, - File, - argstr='--outitk %s', - desc='output transform in ITK format') + traits.Bool, File, argstr="--outitk %s", desc="output transform in ITK format" + ) # Optional flags - invert = traits.Bool(argstr='--invert') - ltavox2vox = traits.Bool(argstr='--ltavox2vox', requires=['out_lta']) - source_file = File(exists=True, argstr='--src %s') - target_file = File(exists=True, argstr='--trg %s') - target_conform = traits.Bool(argstr='--trgconform') + invert = traits.Bool(argstr="--invert") + ltavox2vox = traits.Bool(argstr="--ltavox2vox", requires=["out_lta"]) + source_file = File(exists=True, argstr="--src %s") + target_file = File(exists=True, argstr="--trg %s") + target_conform = traits.Bool(argstr="--trgconform") class LTAConvertOutputSpec(TraitedSpec): - out_lta = File( - exists=True, desc='output linear transform (LTA Freesurfer format)') - out_fsl = File(exists=True, desc='output transform in FSL format') - out_mni = File(exists=True, desc='output transform in MNI/XFM format') - out_reg = File(exists=True, desc='output transform in reg dat format') - out_itk = File(exists=True, desc='output transform in ITK format') + out_lta = File(exists=True, desc="output linear transform (LTA Freesurfer format)") + out_fsl = File(exists=True, desc="output transform in FSL format") + out_mni = File(exists=True, desc="output transform in MNI/XFM format") + out_reg = File(exists=True, desc="output transform in reg dat format") + out_itk = File(exists=True, desc="output transform in ITK format") class LTAConvert(CommandLine): @@ -3949,20 +4194,25 @@ class LTAConvert(CommandLine): For complete details, see the `lta_convert documentation. `_ """ + input_spec = LTAConvertInputSpec output_spec = LTAConvertOutputSpec - _cmd = 'lta_convert' + _cmd = "lta_convert" def _format_arg(self, name, spec, value): - if name.startswith('out_') and value is True: + if name.startswith("out_") and value is True: value = self._list_outputs()[name] return super(LTAConvert, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() - for name, default in (('out_lta', 'out.lta'), ('out_fsl', 'out.mat'), - ('out_mni', 'out.xfm'), ('out_reg', 'out.dat'), - ('out_itk', 'out.txt')): + for name, default in ( + ("out_lta", "out.lta"), + ("out_fsl", "out.mat"), + ("out_mni", "out.xfm"), + ("out_reg", "out.dat"), + ("out_itk", "out.txt"), + ): attr = getattr(self.inputs, name) if attr: fname = default if attr is True else attr diff --git a/nipype/interfaces/fsl/__init__.py b/nipype/interfaces/fsl/__init__.py index c6de303307..dd7b3d76d7 100644 --- a/nipype/interfaces/fsl/__init__.py +++ b/nipype/interfaces/fsl/__init__.py @@ -7,30 +7,121 @@ Top-level namespace for fsl. """ -from .base import (FSLCommand, Info, check_fsl, no_fsl, no_fsl_course_data) -from .preprocess import (FAST, FLIRT, ApplyXFM, BET, MCFLIRT, FNIRT, ApplyWarp, - SliceTimer, SUSAN, PRELUDE, FUGUE, FIRST) -from .model import (Level1Design, FEAT, FEATModel, FILMGLS, FEATRegister, - FLAMEO, ContrastMgr, MultipleRegressDesign, L2Model, SMM, - MELODIC, SmoothEstimate, Cluster, Randomise, GLM) +from .base import FSLCommand, Info, check_fsl, no_fsl, no_fsl_course_data +from .preprocess import ( + FAST, + FLIRT, + ApplyXFM, + BET, + MCFLIRT, + FNIRT, + ApplyWarp, + SliceTimer, + SUSAN, + PRELUDE, + FUGUE, + FIRST, +) +from .model import ( + Level1Design, + FEAT, + FEATModel, + FILMGLS, + FEATRegister, + FLAMEO, + ContrastMgr, + MultipleRegressDesign, + L2Model, + SMM, + MELODIC, + SmoothEstimate, + Cluster, + Randomise, + GLM, +) from .utils import ( - AvScale, Smooth, Slice, Merge, ExtractROI, Split, ImageMaths, ImageMeants, - ImageStats, FilterRegressor, Overlay, Slicer, PlotTimeSeries, - PlotMotionParams, ConvertXFM, SwapDimensions, PowerSpectrum, Reorient2Std, - Complex, InvWarp, WarpUtils, ConvertWarp, WarpPoints, WarpPointsToStd, - WarpPointsFromStd, RobustFOV, CopyGeom, MotionOutliers) + AvScale, + Smooth, + Slice, + Merge, + ExtractROI, + Split, + ImageMaths, + ImageMeants, + ImageStats, + FilterRegressor, + Overlay, + Slicer, + PlotTimeSeries, + PlotMotionParams, + ConvertXFM, + SwapDimensions, + PowerSpectrum, + Reorient2Std, + Complex, + InvWarp, + WarpUtils, + ConvertWarp, + WarpPoints, + WarpPointsToStd, + WarpPointsFromStd, + RobustFOV, + CopyGeom, + MotionOutliers, +) -from .epi import (PrepareFieldmap, TOPUP, ApplyTOPUP, Eddy, EPIDeWarp, SigLoss, - EddyCorrect, EpiReg, EddyQuad) -from .dti import (BEDPOSTX, XFibres, DTIFit, ProbTrackX, ProbTrackX2, VecReg, - ProjThresh, FindTheBiggest, DistanceMap, TractSkeleton, - MakeDyadicVectors, BEDPOSTX5, XFibres5) -from .maths import (ChangeDataType, Threshold, MeanImage, ApplyMask, - IsotropicSmooth, TemporalFilter, DilateImage, ErodeImage, - SpatialFilter, UnaryMaths, BinaryMaths, MultiImageMaths, - MaxnImage, MinImage, MedianImage, PercentileImage, - AR1Image) +from .epi import ( + PrepareFieldmap, + TOPUP, + ApplyTOPUP, + Eddy, + EPIDeWarp, + SigLoss, + EddyCorrect, + EpiReg, + EddyQuad, +) +from .dti import ( + BEDPOSTX, + XFibres, + DTIFit, + ProbTrackX, + ProbTrackX2, + VecReg, + ProjThresh, + FindTheBiggest, + DistanceMap, + TractSkeleton, + MakeDyadicVectors, + BEDPOSTX5, + XFibres5, +) +from .maths import ( + ChangeDataType, + Threshold, + MeanImage, + ApplyMask, + IsotropicSmooth, + TemporalFilter, + DilateImage, + ErodeImage, + SpatialFilter, + UnaryMaths, + BinaryMaths, + MultiImageMaths, + MaxnImage, + MinImage, + MedianImage, + PercentileImage, + AR1Image, +) from .possum import B0Calc -from .fix import (AccuracyTester, Classifier, Cleaner, FeatureExtractor, - Training, TrainingSetCreator) +from .fix import ( + AccuracyTester, + Classifier, + Cleaner, + FeatureExtractor, + Training, + TrainingSetCreator, +) from .aroma import ICA_AROMA diff --git a/nipype/interfaces/fsl/aroma.py b/nipype/interfaces/fsl/aroma.py index ed0b85df90..c40a285989 100644 --- a/nipype/interfaces/fsl/aroma.py +++ b/nipype/interfaces/fsl/aroma.py @@ -5,8 +5,15 @@ `ICA-AROMA.py`_ command line tool. """ -from ..base import (TraitedSpec, CommandLineInputSpec, CommandLine, File, - Directory, traits, isdefined) +from ..base import ( + TraitedSpec, + CommandLineInputSpec, + CommandLine, + File, + Directory, + traits, + isdefined, +) import os @@ -14,83 +21,93 @@ class ICA_AROMAInputSpec(CommandLineInputSpec): feat_dir = Directory( exists=True, mandatory=True, - argstr='-feat %s', - xor=['in_file', 'mat_file', 'fnirt_warp_file', 'motion_parameters'], - desc='If a feat directory exists and temporal filtering ' - 'has not been run yet, ICA_AROMA can use the files in ' - 'this directory.') + argstr="-feat %s", + xor=["in_file", "mat_file", "fnirt_warp_file", "motion_parameters"], + desc="If a feat directory exists and temporal filtering " + "has not been run yet, ICA_AROMA can use the files in " + "this directory.", + ) in_file = File( exists=True, mandatory=True, - argstr='-i %s', - xor=['feat_dir'], - desc='volume to be denoised') + argstr="-i %s", + xor=["feat_dir"], + desc="volume to be denoised", + ) out_dir = Directory( - 'out', usedefault=True, mandatory=True, - argstr='-o %s', desc='output directory') + "out", usedefault=True, mandatory=True, argstr="-o %s", desc="output directory" + ) mask = File( - exists=True, - argstr='-m %s', - xor=['feat_dir'], - desc='path/name volume mask') + exists=True, argstr="-m %s", xor=["feat_dir"], desc="path/name volume mask" + ) dim = traits.Int( - argstr='-dim %d', - desc='Dimensionality reduction when running ' - 'MELODIC (defualt is automatic estimation)') + argstr="-dim %d", + desc="Dimensionality reduction when running " + "MELODIC (defualt is automatic estimation)", + ) TR = traits.Float( - argstr='-tr %.3f', - desc='TR in seconds. If this is not specified ' - 'the TR will be extracted from the ' - 'header of the fMRI nifti file.') + argstr="-tr %.3f", + desc="TR in seconds. If this is not specified " + "the TR will be extracted from the " + "header of the fMRI nifti file.", + ) melodic_dir = Directory( exists=True, - argstr='-meldir %s', - desc='path to MELODIC directory if MELODIC has already been run') + argstr="-meldir %s", + desc="path to MELODIC directory if MELODIC has already been run", + ) mat_file = File( exists=True, - argstr='-affmat %s', - xor=['feat_dir'], - desc='path/name of the mat-file describing the ' - 'affine registration (e.g. FSL FLIRT) of the ' - 'functional data to structural space (.mat file)') + argstr="-affmat %s", + xor=["feat_dir"], + desc="path/name of the mat-file describing the " + "affine registration (e.g. FSL FLIRT) of the " + "functional data to structural space (.mat file)", + ) fnirt_warp_file = File( exists=True, - argstr='-warp %s', - xor=['feat_dir'], - desc='File name of the warp-file describing ' - 'the non-linear registration (e.g. FSL FNIRT) ' - 'of the structural data to MNI152 space (.nii.gz)') + argstr="-warp %s", + xor=["feat_dir"], + desc="File name of the warp-file describing " + "the non-linear registration (e.g. FSL FNIRT) " + "of the structural data to MNI152 space (.nii.gz)", + ) motion_parameters = File( exists=True, mandatory=True, - argstr='-mc %s', - xor=['feat_dir'], - desc='motion parameters file') + argstr="-mc %s", + xor=["feat_dir"], + desc="motion parameters file", + ) denoise_type = traits.Enum( - 'nonaggr', - 'aggr', - 'both', - 'no', + "nonaggr", + "aggr", + "both", + "no", usedefault=True, mandatory=True, - argstr='-den %s', - desc='Type of denoising strategy:\n' - '-no: only classification, no denoising\n' - '-nonaggr (default): non-aggresssive denoising, i.e. partial component regression\n' - '-aggr: aggressive denoising, i.e. full component regression\n' - '-both: both aggressive and non-aggressive denoising (two outputs)') + argstr="-den %s", + desc="Type of denoising strategy:\n" + "-no: only classification, no denoising\n" + "-nonaggr (default): non-aggresssive denoising, i.e. partial component regression\n" + "-aggr: aggressive denoising, i.e. full component regression\n" + "-both: both aggressive and non-aggressive denoising (two outputs)", + ) class ICA_AROMAOutputSpec(TraitedSpec): aggr_denoised_file = File( - exists=True, desc='if generated: aggressively denoised volume') + exists=True, desc="if generated: aggressively denoised volume" + ) nonaggr_denoised_file = File( - exists=True, desc='if generated: non aggressively denoised volume') + exists=True, desc="if generated: non aggressively denoised volume" + ) out_dir = Directory( exists=True, - desc='directory contains (in addition to the denoised files): ' - 'melodic.ica + classified_motion_components + ' - 'classification_overview + feature_scores + melodic_ic_mni)') + desc="directory contains (in addition to the denoised files): " + "melodic.ica + classified_motion_components + " + "classification_overview + feature_scores + melodic_ic_mni)", + ) class ICA_AROMA(CommandLine): @@ -121,24 +138,27 @@ class ICA_AROMA(CommandLine): >>> AROMA_obj.cmdline # doctest: +ELLIPSIS 'ICA_AROMA.py -den both -warp warpfield.nii -i functional.nii -m mask.nii.gz -affmat func_to_struct.mat -mc fsl_mcflirt_movpar.txt -o .../ICA_testout' """ - _cmd = 'ICA_AROMA.py' + + _cmd = "ICA_AROMA.py" input_spec = ICA_AROMAInputSpec output_spec = ICA_AROMAOutputSpec def _format_arg(self, name, trait_spec, value): - if name == 'out_dir': + if name == "out_dir": return trait_spec.argstr % os.path.abspath(value) return super(ICA_AROMA, self)._format_arg(name, trait_spec, value) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_dir'] = os.path.abspath(self.inputs.out_dir) - out_dir = outputs['out_dir'] + outputs["out_dir"] = os.path.abspath(self.inputs.out_dir) + out_dir = outputs["out_dir"] - if self.inputs.denoise_type in ('aggr', 'both'): - outputs['aggr_denoised_file'] = os.path.join( - out_dir, 'denoised_func_data_aggr.nii.gz') - if self.inputs.denoise_type in ('nonaggr', 'both'): - outputs['nonaggr_denoised_file'] = os.path.join( - out_dir, 'denoised_func_data_nonaggr.nii.gz') + if self.inputs.denoise_type in ("aggr", "both"): + outputs["aggr_denoised_file"] = os.path.join( + out_dir, "denoised_func_data_aggr.nii.gz" + ) + if self.inputs.denoise_type in ("nonaggr", "both"): + outputs["nonaggr_denoised_file"] = os.path.join( + out_dir, "denoised_func_data_nonaggr.nii.gz" + ) return outputs diff --git a/nipype/interfaces/fsl/base.py b/nipype/interfaces/fsl/base.py index 07ddc4d146..43ad7b9f9c 100644 --- a/nipype/interfaces/fsl/base.py +++ b/nipype/interfaces/fsl/base.py @@ -31,11 +31,10 @@ from ... import logging from ...utils.filemanip import fname_presuffix -from ..base import (traits, isdefined, CommandLine, CommandLineInputSpec, - PackageInfo) +from ..base import traits, isdefined, CommandLine, CommandLineInputSpec, PackageInfo from ...external.due import BibTeX -IFLOGGER = logging.getLogger('nipype.interface') +IFLOGGER = logging.getLogger("nipype.interface") class Info(PackageInfo): @@ -56,14 +55,14 @@ class Info(PackageInfo): """ ftypes = { - 'NIFTI': '.nii', - 'NIFTI_PAIR': '.img', - 'NIFTI_GZ': '.nii.gz', - 'NIFTI_PAIR_GZ': '.img.gz' + "NIFTI": ".nii", + "NIFTI_PAIR": ".img", + "NIFTI_GZ": ".nii.gz", + "NIFTI_PAIR_GZ": ".img.gz", } - if os.getenv('FSLDIR'): - version_file = os.path.join(os.getenv('FSLDIR'), 'etc', 'fslversion') + if os.getenv("FSLDIR"): + version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") @staticmethod def parse_version(raw_info): @@ -87,7 +86,7 @@ def output_type_to_ext(cls, output_type): try: return cls.ftypes[output_type] except KeyError: - msg = 'Invalid FSLOUTPUTTYPE: ', output_type + msg = "Invalid FSLOUTPUTTYPE: ", output_type raise KeyError(msg) @classmethod @@ -103,28 +102,30 @@ def output_type(cls): Represents the current environment setting of FSLOUTPUTTYPE """ try: - return os.environ['FSLOUTPUTTYPE'] + return os.environ["FSLOUTPUTTYPE"] except KeyError: - IFLOGGER.warning('FSLOUTPUTTYPE environment variable is not set. ' - 'Setting FSLOUTPUTTYPE=NIFTI') - return 'NIFTI' + IFLOGGER.warning( + "FSLOUTPUTTYPE environment variable is not set. " + "Setting FSLOUTPUTTYPE=NIFTI" + ) + return "NIFTI" @staticmethod def standard_image(img_name=None): - '''Grab an image from the standard location. + """Grab an image from the standard location. Returns a list of standard images if called without arguments. - Could be made more fancy to allow for more relocatability''' + Could be made more fancy to allow for more relocatability""" try: - fsldir = os.environ['FSLDIR'] + fsldir = os.environ["FSLDIR"] except KeyError: - raise Exception('FSL environment variables not set') - stdpath = os.path.join(fsldir, 'data', 'standard') + raise Exception("FSL environment variables not set") + stdpath = os.path.join(fsldir, "data", "standard") if img_name is None: return [ - filename.replace(stdpath + '/', '') - for filename in glob(os.path.join(stdpath, '*nii*')) + filename.replace(stdpath + "/", "") + for filename in glob(os.path.join(stdpath, "*nii*")) ] return os.path.join(stdpath, img_name) @@ -140,8 +141,8 @@ class FSLCommandInputSpec(CommandLineInputSpec): ------- fsl.ExtractRoi(tmin=42, tsize=1, output_type='NIFTI') """ - output_type = traits.Enum( - 'NIFTI', list(Info.ftypes.keys()), desc='FSL output type') + + output_type = traits.Enum("NIFTI", list(Info.ftypes.keys()), desc="FSL output type") class FSLCommand(CommandLine): @@ -152,23 +153,26 @@ class FSLCommand(CommandLine): input_spec = FSLCommandInputSpec _output_type = None - references_ = [{ - 'entry': - BibTeX('@article{JenkinsonBeckmannBehrensWoolrichSmith2012,' - 'author={M. Jenkinson, C.F. Beckmann, T.E. Behrens, ' - 'M.W. Woolrich, and S.M. Smith},' - 'title={FSL},' - 'journal={NeuroImage},' - 'volume={62},' - 'pages={782-790},' - 'year={2012},' - '}'), - 'tags': ['implementation'], - }] + references_ = [ + { + "entry": BibTeX( + "@article{JenkinsonBeckmannBehrensWoolrichSmith2012," + "author={M. Jenkinson, C.F. Beckmann, T.E. Behrens, " + "M.W. Woolrich, and S.M. Smith}," + "title={FSL}," + "journal={NeuroImage}," + "volume={62}," + "pages={782-790}," + "year={2012}," + "}" + ), + "tags": ["implementation"], + } + ] def __init__(self, **inputs): super(FSLCommand, self).__init__(**inputs) - self.inputs.on_trait_change(self._output_update, 'output_type') + self.inputs.on_trait_change(self._output_update, "output_type") if self._output_type is None: self._output_type = Info.output_type() @@ -180,7 +184,7 @@ def __init__(self, **inputs): def _output_update(self): self._output_type = self.inputs.output_type - self.inputs.environ.update({'FSLOUTPUTTYPE': self.inputs.output_type}) + self.inputs.environ.update({"FSLOUTPUTTYPE": self.inputs.output_type}) @classmethod def set_default_output_type(cls, output_type): @@ -195,18 +199,13 @@ def set_default_output_type(cls, output_type): if output_type in Info.ftypes: cls._output_type = output_type else: - raise AttributeError('Invalid FSL output_type: %s' % output_type) + raise AttributeError("Invalid FSL output_type: %s" % output_type) @property def version(self): return Info.version() - def _gen_fname(self, - basename, - cwd=None, - suffix=None, - change_ext=True, - ext=None): + def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True, ext=None): """Generate a filename based on the given parameters. The filename will take the form: cwd/basename. @@ -232,9 +231,9 @@ def _gen_fname(self, """ - if basename == '': - msg = 'Unable to generate filename for command %s. ' % self.cmd - msg += 'basename is not set!' + if basename == "": + msg = "Unable to generate filename for command %s. " % self.cmd + msg += "basename is not set!" raise ValueError(msg) if cwd is None: cwd = os.getcwd() @@ -242,13 +241,12 @@ def _gen_fname(self, ext = Info.output_type_to_ext(self.inputs.output_type) if change_ext: if suffix: - suffix = ''.join((suffix, ext)) + suffix = "".join((suffix, ext)) else: suffix = ext if suffix is None: - suffix = '' - fname = fname_presuffix( - basename, suffix=suffix, use_ext=False, newpath=cwd) + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) return fname def _overload_extension(self, value, name=None): @@ -276,5 +274,7 @@ def no_fsl(): def no_fsl_course_data(): """check if fsl_course data is present""" - return not ('FSL_COURSE_DATA' in os.environ and os.path.isdir( - os.path.abspath(os.environ['FSL_COURSE_DATA']))) + return not ( + "FSL_COURSE_DATA" in os.environ + and os.path.isdir(os.path.abspath(os.environ["FSL_COURSE_DATA"])) + ) diff --git a/nipype/interfaces/fsl/dti.py b/nipype/interfaces/fsl/dti.py index 86ee527d5a..90f05e3bab 100644 --- a/nipype/interfaces/fsl/dti.py +++ b/nipype/interfaces/fsl/dti.py @@ -9,80 +9,83 @@ import warnings from ...utils.filemanip import fname_presuffix, split_filename, copyfile -from ..base import (TraitedSpec, isdefined, File, Directory, InputMultiPath, - OutputMultiPath, traits) -from .base import (FSLCommand, FSLCommandInputSpec, Info) +from ..base import ( + TraitedSpec, + isdefined, + File, + Directory, + InputMultiPath, + OutputMultiPath, + traits, +) +from .base import FSLCommand, FSLCommandInputSpec, Info class DTIFitInputSpec(FSLCommandInputSpec): dwi = File( exists=True, - desc='diffusion weighted image data file', - argstr='-k %s', + desc="diffusion weighted image data file", + argstr="-k %s", position=0, - mandatory=True) + mandatory=True, + ) base_name = traits.Str( "dtifit_", - desc=('base_name that all output files ' - 'will start with'), - argstr='-o %s', + desc=("base_name that all output files " "will start with"), + argstr="-o %s", position=1, - usedefault=True) + usedefault=True, + ) mask = File( exists=True, - desc='bet binary mask file', - argstr='-m %s', + desc="bet binary mask file", + argstr="-m %s", position=2, - mandatory=True) + mandatory=True, + ) bvecs = File( - exists=True, - desc='b vectors file', - argstr='-r %s', - position=3, - mandatory=True) + exists=True, desc="b vectors file", argstr="-r %s", position=3, mandatory=True + ) bvals = File( - exists=True, - desc='b values file', - argstr='-b %s', - position=4, - mandatory=True) - min_z = traits.Int(argstr='-z %d', desc='min z') - max_z = traits.Int(argstr='-Z %d', desc='max z') - min_y = traits.Int(argstr='-y %d', desc='min y') - max_y = traits.Int(argstr='-Y %d', desc='max y') - min_x = traits.Int(argstr='-x %d', desc='min x') - max_x = traits.Int(argstr='-X %d', desc='max x') + exists=True, desc="b values file", argstr="-b %s", position=4, mandatory=True + ) + min_z = traits.Int(argstr="-z %d", desc="min z") + max_z = traits.Int(argstr="-Z %d", desc="max z") + min_y = traits.Int(argstr="-y %d", desc="min y") + max_y = traits.Int(argstr="-Y %d", desc="max y") + min_x = traits.Int(argstr="-x %d", desc="min x") + max_x = traits.Int(argstr="-X %d", desc="max x") save_tensor = traits.Bool( - desc='save the elements of the tensor', argstr='--save_tensor') - sse = traits.Bool(desc='output sum of squared errors', argstr='--sse') - cni = File( - exists=True, desc='input counfound regressors', argstr='--cni=%s') + desc="save the elements of the tensor", argstr="--save_tensor" + ) + sse = traits.Bool(desc="output sum of squared errors", argstr="--sse") + cni = File(exists=True, desc="input counfound regressors", argstr="--cni=%s") little_bit = traits.Bool( - desc='only process small area of brain', argstr='--littlebit') + desc="only process small area of brain", argstr="--littlebit" + ) gradnonlin = File( - exists=True, argstr='--gradnonlin=%s', desc='gradient non linearities') + exists=True, argstr="--gradnonlin=%s", desc="gradient non linearities" + ) class DTIFitOutputSpec(TraitedSpec): - V1 = File(exists=True, desc='path/name of file with the 1st eigenvector') - V2 = File(exists=True, desc='path/name of file with the 2nd eigenvector') - V3 = File(exists=True, desc='path/name of file with the 3rd eigenvector') - L1 = File(exists=True, desc='path/name of file with the 1st eigenvalue') - L2 = File(exists=True, desc='path/name of file with the 2nd eigenvalue') - L3 = File(exists=True, desc='path/name of file with the 3rd eigenvalue') - MD = File(exists=True, desc='path/name of file with the mean diffusivity') - FA = File( - exists=True, desc='path/name of file with the fractional anisotropy') - MO = File( - exists=True, desc='path/name of file with the mode of anisotropy') + V1 = File(exists=True, desc="path/name of file with the 1st eigenvector") + V2 = File(exists=True, desc="path/name of file with the 2nd eigenvector") + V3 = File(exists=True, desc="path/name of file with the 3rd eigenvector") + L1 = File(exists=True, desc="path/name of file with the 1st eigenvalue") + L2 = File(exists=True, desc="path/name of file with the 2nd eigenvalue") + L3 = File(exists=True, desc="path/name of file with the 3rd eigenvalue") + MD = File(exists=True, desc="path/name of file with the mean diffusivity") + FA = File(exists=True, desc="path/name of file with the fractional anisotropy") + MO = File(exists=True, desc="path/name of file with the mode of anisotropy") S0 = File( exists=True, - desc=('path/name of file with the raw T2 signal with no ' - 'diffusion weighting')) - tensor = File( - exists=True, desc='path/name of file with the 4D tensor volume') - sse = File( - exists=True, desc='path/name of file with the summed squared error') + desc=( + "path/name of file with the raw T2 signal with no " "diffusion weighting" + ), + ) + tensor = File(exists=True, desc="path/name of file with the 4D tensor volume") + sse = File(exists=True, desc="path/name of file with the summed squared error") class DTIFit(FSLCommand): @@ -104,15 +107,14 @@ class DTIFit(FSLCommand): """ - _cmd = 'dtifit' + _cmd = "dtifit" input_spec = DTIFitInputSpec output_spec = DTIFitOutputSpec def _list_outputs(self): - keys_to_ignore = {'outputtype', 'environ', 'args'} + keys_to_ignore = {"outputtype", "environ", "args"} # Optional output: Map output name to input flag - opt_output = {'tensor': self.inputs.save_tensor, - 'sse': self.inputs.sse} + opt_output = {"tensor": self.inputs.save_tensor, "sse": self.inputs.sse} # Ignore optional output, whose corresponding input-flag is not defined # or set to False for output, input_flag in opt_output.items(): @@ -123,155 +125,166 @@ def _list_outputs(self): outputs = self.output_spec().get() for k in set(outputs.keys()) - keys_to_ignore: - outputs[k] = self._gen_fname(self.inputs.base_name, suffix='_' + k) + outputs[k] = self._gen_fname(self.inputs.base_name, suffix="_" + k) return outputs class FSLXCommandInputSpec(FSLCommandInputSpec): dwi = File( exists=True, - argstr='--data=%s', + argstr="--data=%s", mandatory=True, - desc='diffusion weighted image data file') + desc="diffusion weighted image data file", + ) mask = File( exists=True, - argstr='--mask=%s', + argstr="--mask=%s", mandatory=True, - desc='brain binary mask file (i.e. from BET)') + desc="brain binary mask file (i.e. from BET)", + ) bvecs = File( - exists=True, - argstr='--bvecs=%s', - mandatory=True, - desc='b vectors file') - bvals = File( - exists=True, argstr='--bvals=%s', mandatory=True, desc='b values file') + exists=True, argstr="--bvecs=%s", mandatory=True, desc="b vectors file" + ) + bvals = File(exists=True, argstr="--bvals=%s", mandatory=True, desc="b values file") - logdir = Directory('.', argstr='--logdir=%s', usedefault=True) + logdir = Directory(".", argstr="--logdir=%s", usedefault=True) n_fibres = traits.Range( usedefault=True, low=1, value=2, - argstr='--nfibres=%d', - desc=('Maximum number of fibres to fit in each voxel'), - mandatory=True) + argstr="--nfibres=%d", + desc=("Maximum number of fibres to fit in each voxel"), + mandatory=True, + ) model = traits.Enum( 1, 2, 3, - argstr='--model=%d', - desc=('use monoexponential (1, default, required for ' - 'single-shell) or multiexponential (2, multi-' - 'shell) model')) - fudge = traits.Int(argstr='--fudge=%d', desc='ARD fudge factor') + argstr="--model=%d", + desc=( + "use monoexponential (1, default, required for " + "single-shell) or multiexponential (2, multi-" + "shell) model" + ), + ) + fudge = traits.Int(argstr="--fudge=%d", desc="ARD fudge factor") n_jumps = traits.Int( - 5000, usedefault=True, - argstr='--njumps=%d', desc='Num of jumps to be made by MCMC') + 5000, + usedefault=True, + argstr="--njumps=%d", + desc="Num of jumps to be made by MCMC", + ) burn_in = traits.Range( low=0, value=0, usedefault=True, - argstr='--burnin=%d', - desc=('Total num of jumps at start of MCMC to be ' - 'discarded')) + argstr="--burnin=%d", + desc=("Total num of jumps at start of MCMC to be " "discarded"), + ) burn_in_no_ard = traits.Range( low=0, value=0, usedefault=True, - argstr='--burnin_noard=%d', - desc=('num of burnin jumps before the ard is' - ' imposed')) + argstr="--burnin_noard=%d", + desc=("num of burnin jumps before the ard is" " imposed"), + ) sample_every = traits.Range( low=0, value=1, usedefault=True, - argstr='--sampleevery=%d', - desc='Num of jumps for each sample (MCMC)') + argstr="--sampleevery=%d", + desc="Num of jumps for each sample (MCMC)", + ) update_proposal_every = traits.Range( low=1, value=40, usedefault=True, - argstr='--updateproposalevery=%d', - desc=('Num of jumps for each update ' - 'to the proposal density std ' - '(MCMC)')) + argstr="--updateproposalevery=%d", + desc=("Num of jumps for each update " "to the proposal density std " "(MCMC)"), + ) seed = traits.Int( - argstr='--seed=%d', desc='seed for pseudo random number generator') + argstr="--seed=%d", desc="seed for pseudo random number generator" + ) - _xor_inputs1 = ('no_ard', 'all_ard') + _xor_inputs1 = ("no_ard", "all_ard") no_ard = traits.Bool( - argstr='--noard', xor=_xor_inputs1, desc='Turn ARD off on all fibres') + argstr="--noard", xor=_xor_inputs1, desc="Turn ARD off on all fibres" + ) all_ard = traits.Bool( - argstr='--allard', xor=_xor_inputs1, desc='Turn ARD on on all fibres') + argstr="--allard", xor=_xor_inputs1, desc="Turn ARD on on all fibres" + ) - _xor_inputs2 = ('no_spat', 'non_linear', 'cnlinear') + _xor_inputs2 = ("no_spat", "non_linear", "cnlinear") no_spat = traits.Bool( - argstr='--nospat', + argstr="--nospat", xor=_xor_inputs2, - desc='Initialise with tensor, not spatially') + desc="Initialise with tensor, not spatially", + ) non_linear = traits.Bool( - argstr='--nonlinear', - xor=_xor_inputs2, - desc='Initialise with nonlinear fitting') + argstr="--nonlinear", xor=_xor_inputs2, desc="Initialise with nonlinear fitting" + ) cnlinear = traits.Bool( - argstr='--cnonlinear', + argstr="--cnonlinear", xor=_xor_inputs2, - desc=('Initialise with constrained nonlinear ' - 'fitting')) - rician = traits.Bool(argstr='--rician', desc=('use Rician noise modeling')) + desc=("Initialise with constrained nonlinear " "fitting"), + ) + rician = traits.Bool(argstr="--rician", desc=("use Rician noise modeling")) - _xor_inputs3 = ['f0_noard', 'f0_ard'] + _xor_inputs3 = ["f0_noard", "f0_ard"] f0_noard = traits.Bool( - argstr='--f0', + argstr="--f0", xor=_xor_inputs3, - desc=('Noise floor model: add to the model an ' - 'unattenuated signal compartment f0')) + desc=( + "Noise floor model: add to the model an " + "unattenuated signal compartment f0" + ), + ) f0_ard = traits.Bool( - argstr='--f0 --ardf0', - xor=_xor_inputs3 + ['all_ard'], - desc=('Noise floor model: add to the model an ' - 'unattenuated signal compartment f0')) + argstr="--f0 --ardf0", + xor=_xor_inputs3 + ["all_ard"], + desc=( + "Noise floor model: add to the model an " + "unattenuated signal compartment f0" + ), + ) force_dir = traits.Bool( True, - argstr='--forcedir', + argstr="--forcedir", usedefault=True, - desc=('use the actual directory name given ' - '(do not add + to make a new directory)')) + desc=( + "use the actual directory name given " + "(do not add + to make a new directory)" + ), + ) class FSLXCommandOutputSpec(TraitedSpec): dyads = OutputMultiPath( - File(exists=True), - desc=('Mean of PDD distribution' - ' in vector form.')) + File(exists=True), desc=("Mean of PDD distribution" " in vector form.") + ) fsamples = OutputMultiPath( - File(exists=True), - desc=('Samples from the ' - 'distribution on f ' - 'anisotropy')) - mean_dsamples = File( - exists=True, desc='Mean of distribution on diffusivity d') + File(exists=True), desc=("Samples from the " "distribution on f " "anisotropy") + ) + mean_dsamples = File(exists=True, desc="Mean of distribution on diffusivity d") mean_fsamples = OutputMultiPath( - File(exists=True), desc=('Mean of distribution on f ' - 'anisotropy')) + File(exists=True), desc=("Mean of distribution on f " "anisotropy") + ) mean_S0samples = File( - exists=True, - desc=('Mean of distribution on T2w' - 'baseline signal intensity S0')) + exists=True, desc=("Mean of distribution on T2w" "baseline signal intensity S0") + ) mean_tausamples = File( exists=True, - desc=('Mean of distribution on ' - 'tau samples (only with rician ' - 'noise)')) - phsamples = OutputMultiPath( - File(exists=True), desc=('phi samples, per fiber')) - thsamples = OutputMultiPath( - File(exists=True), desc=('theta samples, per fiber')) + desc=("Mean of distribution on " "tau samples (only with rician " "noise)"), + ) + phsamples = OutputMultiPath(File(exists=True), desc=("phi samples, per fiber")) + thsamples = OutputMultiPath(File(exists=True), desc=("theta samples, per fiber")) class FSLXCommand(FSLCommand): """ Base support for ``xfibres`` and ``bedpostx`` """ + input_spec = FSLXCommandInputSpec output_spec = FSLXCommandOutputSpec @@ -289,123 +302,120 @@ def _list_outputs(self, out_dir=None): if isdefined(self.inputs.logdir): out_dir = os.path.abspath(self.inputs.logdir) else: - out_dir = os.path.abspath('logdir') + out_dir = os.path.abspath("logdir") - multi_out = [ - 'dyads', 'fsamples', 'mean_fsamples', 'phsamples', 'thsamples' - ] - single_out = ['mean_dsamples', 'mean_S0samples'] + multi_out = ["dyads", "fsamples", "mean_fsamples", "phsamples", "thsamples"] + single_out = ["mean_dsamples", "mean_S0samples"] for k in single_out: outputs[k] = self._gen_fname(k, cwd=out_dir) if isdefined(self.inputs.rician) and self.inputs.rician: - outputs['mean_tausamples'] = self._gen_fname( - 'mean_tausamples', cwd=out_dir) + outputs["mean_tausamples"] = self._gen_fname("mean_tausamples", cwd=out_dir) for k in multi_out: outputs[k] = [] for i in range(1, n_fibres + 1): - outputs['fsamples'].append( - self._gen_fname('f%dsamples' % i, cwd=out_dir)) - outputs['mean_fsamples'].append( - self._gen_fname('mean_f%dsamples' % i, cwd=out_dir)) + outputs["fsamples"].append(self._gen_fname("f%dsamples" % i, cwd=out_dir)) + outputs["mean_fsamples"].append( + self._gen_fname("mean_f%dsamples" % i, cwd=out_dir) + ) for i in range(1, n_fibres + 1): - outputs['dyads'].append( - self._gen_fname('dyads%d' % i, cwd=out_dir)) - outputs['phsamples'].append( - self._gen_fname('ph%dsamples' % i, cwd=out_dir)) - outputs['thsamples'].append( - self._gen_fname('th%dsamples' % i, cwd=out_dir)) + outputs["dyads"].append(self._gen_fname("dyads%d" % i, cwd=out_dir)) + outputs["phsamples"].append(self._gen_fname("ph%dsamples" % i, cwd=out_dir)) + outputs["thsamples"].append(self._gen_fname("th%dsamples" % i, cwd=out_dir)) return outputs class BEDPOSTX5InputSpec(FSLXCommandInputSpec): - dwi = File( - exists=True, desc='diffusion weighted image data file', mandatory=True) - mask = File(exists=True, desc='bet binary mask file', mandatory=True) - bvecs = File(exists=True, desc='b vectors file', mandatory=True) - bvals = File(exists=True, desc='b values file', mandatory=True) - logdir = Directory(argstr='--logdir=%s') + dwi = File(exists=True, desc="diffusion weighted image data file", mandatory=True) + mask = File(exists=True, desc="bet binary mask file", mandatory=True) + bvecs = File(exists=True, desc="b vectors file", mandatory=True) + bvals = File(exists=True, desc="b values file", mandatory=True) + logdir = Directory(argstr="--logdir=%s") n_fibres = traits.Range( usedefault=True, low=1, value=2, - argstr='-n %d', - desc=('Maximum number of fibres to fit in each voxel'), - mandatory=True) + argstr="-n %d", + desc=("Maximum number of fibres to fit in each voxel"), + mandatory=True, + ) model = traits.Enum( 1, 2, 3, - argstr='-model %d', - desc=('use monoexponential (1, default, required for ' - 'single-shell) or multiexponential (2, multi-' - 'shell) model')) - fudge = traits.Int(argstr='-w %d', desc='ARD fudge factor') + argstr="-model %d", + desc=( + "use monoexponential (1, default, required for " + "single-shell) or multiexponential (2, multi-" + "shell) model" + ), + ) + fudge = traits.Int(argstr="-w %d", desc="ARD fudge factor") n_jumps = traits.Int( - 5000, usedefault=True, - argstr='-j %d', desc='Num of jumps to be made by MCMC') + 5000, usedefault=True, argstr="-j %d", desc="Num of jumps to be made by MCMC" + ) burn_in = traits.Range( low=0, value=0, usedefault=True, - argstr='-b %d', - desc=('Total num of jumps at start of MCMC to be ' - 'discarded')) + argstr="-b %d", + desc=("Total num of jumps at start of MCMC to be " "discarded"), + ) sample_every = traits.Range( low=0, value=1, usedefault=True, - argstr='-s %d', - desc='Num of jumps for each sample (MCMC)') + argstr="-s %d", + desc="Num of jumps for each sample (MCMC)", + ) out_dir = Directory( - 'bedpostx', + "bedpostx", mandatory=True, - desc='output directory', + desc="output directory", usedefault=True, position=1, - argstr='%s') + argstr="%s", + ) gradnonlin = traits.Bool( - False, - argstr='-g', - desc=('consider gradient nonlinearities, ' - 'default off')) - grad_dev = File( - exists=True, desc='grad_dev file, if gradnonlin, -g is True') - use_gpu = traits.Bool(False, desc='Use the GPU version of bedpostx') + False, argstr="-g", desc=("consider gradient nonlinearities, " "default off") + ) + grad_dev = File(exists=True, desc="grad_dev file, if gradnonlin, -g is True") + use_gpu = traits.Bool(False, desc="Use the GPU version of bedpostx") class BEDPOSTX5OutputSpec(TraitedSpec): - mean_dsamples = File( - exists=True, desc='Mean of distribution on diffusivity d') + mean_dsamples = File(exists=True, desc="Mean of distribution on diffusivity d") mean_fsamples = OutputMultiPath( - File(exists=True), desc=('Mean of distribution on f ' - 'anisotropy')) + File(exists=True), desc=("Mean of distribution on f " "anisotropy") + ) mean_S0samples = File( - exists=True, - desc=('Mean of distribution on T2w' - 'baseline signal intensity S0')) + exists=True, desc=("Mean of distribution on T2w" "baseline signal intensity S0") + ) mean_phsamples = OutputMultiPath( - File(exists=True), desc='Mean of distribution on phi') + File(exists=True), desc="Mean of distribution on phi" + ) mean_thsamples = OutputMultiPath( - File(exists=True), desc='Mean of distribution on theta') + File(exists=True), desc="Mean of distribution on theta" + ) merged_thsamples = OutputMultiPath( - File(exists=True), desc=('Samples from the distribution ' - 'on theta')) + File(exists=True), desc=("Samples from the distribution " "on theta") + ) merged_phsamples = OutputMultiPath( - File(exists=True), desc=('Samples from the distribution ' - 'on phi')) + File(exists=True), desc=("Samples from the distribution " "on phi") + ) merged_fsamples = OutputMultiPath( File(exists=True), - desc=('Samples from the distribution on ' - 'anisotropic volume fraction')) + desc=("Samples from the distribution on " "anisotropic volume fraction"), + ) dyads = OutputMultiPath( - File(exists=True), desc='Mean of PDD distribution in vector form.') - dyads_dispersion = OutputMultiPath(File(exists=True), desc=('Dispersion')) + File(exists=True), desc="Mean of PDD distribution in vector form." + ) + dyads_dispersion = OutputMultiPath(File(exists=True), desc=("Dispersion")) class BEDPOSTX5(FSLXCommand): @@ -435,7 +445,7 @@ class BEDPOSTX5(FSLXCommand): """ - _cmd = 'bedpostx' + _cmd = "bedpostx" _default_cmd = _cmd input_spec = BEDPOSTX5InputSpec output_spec = BEDPOSTX5OutputSpec @@ -443,11 +453,11 @@ class BEDPOSTX5(FSLXCommand): def __init__(self, **inputs): super(BEDPOSTX5, self).__init__(**inputs) - self.inputs.on_trait_change(self._cuda_update, 'use_gpu') + self.inputs.on_trait_change(self._cuda_update, "use_gpu") def _cuda_update(self): if isdefined(self.inputs.use_gpu) and self.inputs.use_gpu: - self._cmd = 'bedpostx_gpu' + self._cmd = "bedpostx_gpu" else: self._cmd = self._default_cmd @@ -457,20 +467,18 @@ def _run_interface(self, runtime): if not os.path.exists(subjectdir): os.makedirs(subjectdir) _, _, ext = split_filename(self.inputs.mask) - copyfile(self.inputs.mask, - os.path.join(subjectdir, 'nodif_brain_mask' + ext)) + copyfile(self.inputs.mask, os.path.join(subjectdir, "nodif_brain_mask" + ext)) _, _, ext = split_filename(self.inputs.dwi) - copyfile(self.inputs.dwi, os.path.join(subjectdir, 'data' + ext)) - copyfile(self.inputs.bvals, os.path.join(subjectdir, 'bvals')) - copyfile(self.inputs.bvecs, os.path.join(subjectdir, 'bvecs')) + copyfile(self.inputs.dwi, os.path.join(subjectdir, "data" + ext)) + copyfile(self.inputs.bvals, os.path.join(subjectdir, "bvals")) + copyfile(self.inputs.bvecs, os.path.join(subjectdir, "bvecs")) if isdefined(self.inputs.grad_dev): _, _, ext = split_filename(self.inputs.grad_dev) - copyfile(self.inputs.grad_dev, - os.path.join(subjectdir, 'grad_dev' + ext)) + copyfile(self.inputs.grad_dev, os.path.join(subjectdir, "grad_dev" + ext)) retval = super(BEDPOSTX5, self)._run_interface(runtime) - self._out_dir = subjectdir + '.bedpostX' + self._out_dir = subjectdir + ".bedpostX" return retval def _list_outputs(self): @@ -478,12 +486,17 @@ def _list_outputs(self): n_fibres = self.inputs.n_fibres multi_out = [ - 'merged_thsamples', 'merged_fsamples', 'merged_phsamples', - 'mean_phsamples', 'mean_thsamples', 'mean_fsamples', - 'dyads_dispersion', 'dyads' + "merged_thsamples", + "merged_fsamples", + "merged_phsamples", + "mean_phsamples", + "mean_thsamples", + "mean_fsamples", + "dyads_dispersion", + "dyads", ] - single_out = ['mean_dsamples', 'mean_S0samples'] + single_out = ["mean_dsamples", "mean_S0samples"] for k in single_out: outputs[k] = self._gen_fname(k, cwd=self._out_dir) @@ -492,30 +505,37 @@ def _list_outputs(self): outputs[k] = [] for i in range(1, n_fibres + 1): - outputs['merged_thsamples'].append( - self._gen_fname('merged_th%dsamples' % i, cwd=self._out_dir)) - outputs['merged_fsamples'].append( - self._gen_fname('merged_f%dsamples' % i, cwd=self._out_dir)) - outputs['merged_phsamples'].append( - self._gen_fname('merged_ph%dsamples' % i, cwd=self._out_dir)) - outputs['mean_thsamples'].append( - self._gen_fname('mean_th%dsamples' % i, cwd=self._out_dir)) - outputs['mean_phsamples'].append( - self._gen_fname('mean_ph%dsamples' % i, cwd=self._out_dir)) - outputs['mean_fsamples'].append( - self._gen_fname('mean_f%dsamples' % i, cwd=self._out_dir)) - outputs['dyads'].append( - self._gen_fname('dyads%d' % i, cwd=self._out_dir)) - outputs['dyads_dispersion'].append( - self._gen_fname('dyads%d_dispersion' % i, cwd=self._out_dir)) + outputs["merged_thsamples"].append( + self._gen_fname("merged_th%dsamples" % i, cwd=self._out_dir) + ) + outputs["merged_fsamples"].append( + self._gen_fname("merged_f%dsamples" % i, cwd=self._out_dir) + ) + outputs["merged_phsamples"].append( + self._gen_fname("merged_ph%dsamples" % i, cwd=self._out_dir) + ) + outputs["mean_thsamples"].append( + self._gen_fname("mean_th%dsamples" % i, cwd=self._out_dir) + ) + outputs["mean_phsamples"].append( + self._gen_fname("mean_ph%dsamples" % i, cwd=self._out_dir) + ) + outputs["mean_fsamples"].append( + self._gen_fname("mean_f%dsamples" % i, cwd=self._out_dir) + ) + outputs["dyads"].append(self._gen_fname("dyads%d" % i, cwd=self._out_dir)) + outputs["dyads_dispersion"].append( + self._gen_fname("dyads%d_dispersion" % i, cwd=self._out_dir) + ) return outputs class XFibres5InputSpec(FSLXCommandInputSpec): gradnonlin = File( exists=True, - argstr='--gradnonlin=%s', - desc='gradient file corresponding to slice') + argstr="--gradnonlin=%s", + desc="gradient file corresponding to slice", + ) class XFibres5(FSLXCommand): @@ -523,7 +543,8 @@ class XFibres5(FSLXCommand): Perform model parameters estimation for local (voxelwise) diffusion parameters """ - _cmd = 'xfibres' + + _cmd = "xfibres" input_spec = XFibres5InputSpec output_spec = FSLXCommandOutputSpec @@ -538,143 +559,176 @@ class ProbTrackXBaseInputSpec(FSLCommandInputSpec): fsamples = InputMultiPath(File(exists=True), mandatory=True) samples_base_name = traits.Str( "merged", - desc=('the rootname/base_name for samples ' - 'files'), - argstr='--samples=%s', - usedefault=True) + desc=("the rootname/base_name for samples " "files"), + argstr="--samples=%s", + usedefault=True, + ) mask = File( exists=True, - desc='bet binary mask file in diffusion space', - argstr='-m %s', - mandatory=True) + desc="bet binary mask file in diffusion space", + argstr="-m %s", + mandatory=True, + ) seed = traits.Either( File(exists=True), traits.List(File(exists=True)), traits.List(traits.List(traits.Int(), minlen=3, maxlen=3)), - desc=('seed volume(s), or voxel(s) or freesurfer ' - 'label file'), - argstr='--seed=%s', - mandatory=True) + desc=("seed volume(s), or voxel(s) or freesurfer " "label file"), + argstr="--seed=%s", + mandatory=True, + ) target_masks = InputMultiPath( File(exits=True), - desc=('list of target masks - required for ' - 'seeds_to_targets classification'), - argstr='--targetmasks=%s') + desc=("list of target masks - required for " "seeds_to_targets classification"), + argstr="--targetmasks=%s", + ) waypoints = File( exists=True, - desc=('waypoint mask or ascii list of waypoint masks - ' - 'only keep paths going through ALL the masks'), - argstr='--waypoints=%s') + desc=( + "waypoint mask or ascii list of waypoint masks - " + "only keep paths going through ALL the masks" + ), + argstr="--waypoints=%s", + ) network = traits.Bool( - desc=('activate network mode - only keep paths ' - 'going through at least one seed mask ' - '(required if multiple seed masks)'), - argstr='--network') + desc=( + "activate network mode - only keep paths " + "going through at least one seed mask " + "(required if multiple seed masks)" + ), + argstr="--network", + ) seed_ref = File( exists=True, - desc=('reference vol to define seed space in simple mode ' - '- diffusion space assumed if absent'), - argstr='--seedref=%s') + desc=( + "reference vol to define seed space in simple mode " + "- diffusion space assumed if absent" + ), + argstr="--seedref=%s", + ) out_dir = Directory( exists=True, - argstr='--dir=%s', - desc='directory to put the final volumes in', - genfile=True) + argstr="--dir=%s", + desc="directory to put the final volumes in", + genfile=True, + ) force_dir = traits.Bool( True, - desc=('use the actual directory name given - i.e. ' - 'do not add + to make a new directory'), - argstr='--forcedir', - usedefault=True) + desc=( + "use the actual directory name given - i.e. " + "do not add + to make a new directory" + ), + argstr="--forcedir", + usedefault=True, + ) opd = traits.Bool( - True, - desc='outputs path distributions', - argstr='--opd', - usedefault=True) + True, desc="outputs path distributions", argstr="--opd", usedefault=True + ) correct_path_distribution = traits.Bool( - desc=('correct path distribution ' - 'for the length of the ' - 'pathways'), - argstr='--pd') - os2t = traits.Bool(desc='Outputs seeds to targets', argstr='--os2t') + desc=("correct path distribution " "for the length of the " "pathways"), + argstr="--pd", + ) + os2t = traits.Bool(desc="Outputs seeds to targets", argstr="--os2t") # paths_file = File('nipype_fdtpaths', usedefault=True, argstr='--out=%s', # desc='produces an output file (default is fdt_paths)') avoid_mp = File( exists=True, - desc=('reject pathways passing through locations given by ' - 'this mask'), - argstr='--avoid=%s') + desc=("reject pathways passing through locations given by " "this mask"), + argstr="--avoid=%s", + ) stop_mask = File( exists=True, - argstr='--stop=%s', - desc='stop tracking at locations given by this mask file') + argstr="--stop=%s", + desc="stop tracking at locations given by this mask file", + ) xfm = File( exists=True, - argstr='--xfm=%s', - desc=('transformation matrix taking seed space to DTI space ' - '(either FLIRT matrix or FNIRT warp_field) - default is ' - 'identity')) + argstr="--xfm=%s", + desc=( + "transformation matrix taking seed space to DTI space " + "(either FLIRT matrix or FNIRT warp_field) - default is " + "identity" + ), + ) inv_xfm = File( - argstr='--invxfm=%s', - desc=('transformation matrix taking DTI space to seed ' - 'space (compulsory when using a warp_field for ' - 'seeds_to_dti)')) + argstr="--invxfm=%s", + desc=( + "transformation matrix taking DTI space to seed " + "space (compulsory when using a warp_field for " + "seeds_to_dti)" + ), + ) n_samples = traits.Int( 5000, - argstr='--nsamples=%d', - desc='number of samples - default=5000', - usedefault=True) + argstr="--nsamples=%d", + desc="number of samples - default=5000", + usedefault=True, + ) n_steps = traits.Int( - argstr='--nsteps=%d', desc='number of steps per sample - default=2000') + argstr="--nsteps=%d", desc="number of steps per sample - default=2000" + ) dist_thresh = traits.Float( - argstr='--distthresh=%.3f', - desc=('discards samples shorter than this ' - 'threshold (in mm - default=0)')) + argstr="--distthresh=%.3f", + desc=("discards samples shorter than this " "threshold (in mm - default=0)"), + ) c_thresh = traits.Float( - argstr='--cthr=%.3f', desc='curvature threshold - default=0.2') + argstr="--cthr=%.3f", desc="curvature threshold - default=0.2" + ) sample_random_points = traits.Bool( - argstr='--sampvox', - desc=('sample random points within ' - 'seed voxels')) + argstr="--sampvox", desc=("sample random points within " "seed voxels") + ) step_length = traits.Float( - argstr='--steplength=%.3f', desc='step_length in mm - default=0.5') + argstr="--steplength=%.3f", desc="step_length in mm - default=0.5" + ) loop_check = traits.Bool( - argstr='--loopcheck', - desc=('perform loop_checks on paths - slower, ' - 'but allows lower curvature threshold')) + argstr="--loopcheck", + desc=( + "perform loop_checks on paths - slower, " + "but allows lower curvature threshold" + ), + ) use_anisotropy = traits.Bool( - argstr='--usef', desc='use anisotropy to constrain tracking') + argstr="--usef", desc="use anisotropy to constrain tracking" + ) rand_fib = traits.Enum( 0, 1, 2, 3, - argstr='--randfib=%d', - desc=('options: 0 - default, 1 - to randomly ' - 'sample initial fibres (with f > fibthresh), ' - '2 - to sample in proportion fibres (with ' - 'f>fibthresh) to f, 3 - to sample ALL ' - 'populations at random (even if ' - 'f fibthresh), " + "2 - to sample in proportion fibres (with " + "f>fibthresh) to f, 3 - to sample ALL " + "populations at random (even if " + "f>> pbx2.cmdline 'probtrackx2 --forcedir -m nodif_brain_mask.nii.gz --nsamples=3 --nsteps=10 --opd --dir=. --samples=merged --seed=seed_source.nii.gz' """ - _cmd = 'probtrackx2' + + _cmd = "probtrackx2" input_spec = ProbTrackX2InputSpec output_spec = ProbTrackX2OutputSpec @@ -980,87 +1076,101 @@ def _list_outputs(self): else: out_dir = self.inputs.out_dir - outputs['way_total'] = os.path.abspath( - os.path.join(out_dir, 'waytotal')) + outputs["way_total"] = os.path.abspath(os.path.join(out_dir, "waytotal")) if isdefined(self.inputs.omatrix1): - outputs['network_matrix'] = os.path.abspath( - os.path.join(out_dir, 'matrix_seeds_to_all_targets')) - outputs['matrix1_dot'] = os.path.abspath( - os.path.join(out_dir, 'fdt_matrix1.dot')) + outputs["network_matrix"] = os.path.abspath( + os.path.join(out_dir, "matrix_seeds_to_all_targets") + ) + outputs["matrix1_dot"] = os.path.abspath( + os.path.join(out_dir, "fdt_matrix1.dot") + ) if isdefined(self.inputs.omatrix2): - outputs['lookup_tractspace'] = os.path.abspath( - os.path.join(out_dir, 'lookup_tractspace_fdt_matrix2.nii.gz')) - outputs['matrix2_dot'] = os.path.abspath( - os.path.join(out_dir, 'fdt_matrix2.dot')) + outputs["lookup_tractspace"] = os.path.abspath( + os.path.join(out_dir, "lookup_tractspace_fdt_matrix2.nii.gz") + ) + outputs["matrix2_dot"] = os.path.abspath( + os.path.join(out_dir, "fdt_matrix2.dot") + ) if isdefined(self.inputs.omatrix3): - outputs['matrix3_dot'] = os.path.abspath( - os.path.join(out_dir, 'fdt_matrix3.dot')) + outputs["matrix3_dot"] = os.path.abspath( + os.path.join(out_dir, "fdt_matrix3.dot") + ) return outputs class VecRegInputSpec(FSLCommandInputSpec): in_file = File( exists=True, - argstr='-i %s', - desc='filename for input vector or tensor field', - mandatory=True) + argstr="-i %s", + desc="filename for input vector or tensor field", + mandatory=True, + ) out_file = File( - argstr='-o %s', - desc=('filename for output registered vector or tensor ' - 'field'), + argstr="-o %s", + desc=("filename for output registered vector or tensor " "field"), genfile=True, - hash_files=False) + hash_files=False, + ) ref_vol = File( exists=True, - argstr='-r %s', - desc='filename for reference (target) volume', - mandatory=True) + argstr="-r %s", + desc="filename for reference (target) volume", + mandatory=True, + ) affine_mat = File( - exists=True, - argstr='-t %s', - desc='filename for affine transformation matrix') + exists=True, argstr="-t %s", desc="filename for affine transformation matrix" + ) warp_field = File( exists=True, - argstr='-w %s', - desc=('filename for 4D warp field for nonlinear ' - 'registration')) + argstr="-w %s", + desc=("filename for 4D warp field for nonlinear " "registration"), + ) rotation_mat = File( exists=True, - argstr='--rotmat=%s', - desc=('filename for secondary affine matrix if set, ' - 'this will be used for the rotation of the ' - 'vector/tensor field')) + argstr="--rotmat=%s", + desc=( + "filename for secondary affine matrix if set, " + "this will be used for the rotation of the " + "vector/tensor field" + ), + ) rotation_warp = File( exists=True, - argstr='--rotwarp=%s', - desc=('filename for secondary warp field if set, ' - 'this will be used for the rotation of the ' - 'vector/tensor field')) + argstr="--rotwarp=%s", + desc=( + "filename for secondary warp field if set, " + "this will be used for the rotation of the " + "vector/tensor field" + ), + ) interpolation = traits.Enum( "nearestneighbour", "trilinear", "sinc", "spline", - argstr='--interp=%s', - desc=('interpolation method : ' - 'nearestneighbour, trilinear (default), ' - 'sinc or spline')) - mask = File(exists=True, argstr='-m %s', desc='brain mask in input space') + argstr="--interp=%s", + desc=( + "interpolation method : " + "nearestneighbour, trilinear (default), " + "sinc or spline" + ), + ) + mask = File(exists=True, argstr="-m %s", desc="brain mask in input space") ref_mask = File( exists=True, - argstr='--refmask=%s', - desc=('brain mask in output space (useful for speed up of ' - 'nonlinear reg)')) + argstr="--refmask=%s", + desc=("brain mask in output space (useful for speed up of " "nonlinear reg)"), + ) class VecRegOutputSpec(TraitedSpec): out_file = File( exists=True, - desc=('path/name of filename for the registered vector or ' - 'tensor field')) + desc=("path/name of filename for the registered vector or " "tensor field"), + ) class VecReg(FSLCommand): @@ -1081,7 +1191,7 @@ class VecReg(FSLCommand): """ - _cmd = 'vecreg' + _cmd = "vecreg" input_spec = VecRegInputSpec output_spec = VecRegOutputSpec @@ -1089,22 +1199,23 @@ def _run_interface(self, runtime): if not isdefined(self.inputs.out_file): pth, base_name = os.path.split(self.inputs.in_file) self.inputs.out_file = self._gen_fname( - base_name, cwd=os.path.abspath(pth), suffix='_vreg') + base_name, cwd=os.path.abspath(pth), suffix="_vreg" + ) return super(VecReg, self)._run_interface(runtime) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_file - if (not isdefined(outputs['out_file']) - and isdefined(self.inputs.in_file)): + outputs["out_file"] = self.inputs.out_file + if not isdefined(outputs["out_file"]) and isdefined(self.inputs.in_file): pth, base_name = os.path.split(self.inputs.in_file) - outputs['out_file'] = self._gen_fname( - base_name, cwd=os.path.abspath(pth), suffix='_vreg') - outputs['out_file'] = os.path.abspath(outputs['out_file']) + outputs["out_file"] = self._gen_fname( + base_name, cwd=os.path.abspath(pth), suffix="_vreg" + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._list_outputs()[name] else: return None @@ -1113,23 +1224,26 @@ def _gen_filename(self, name): class ProjThreshInputSpec(FSLCommandInputSpec): in_files = traits.List( File(exists=True), - argstr='%s', - desc='a list of input volumes', + argstr="%s", + desc="a list of input volumes", mandatory=True, - position=0) + position=0, + ) threshold = traits.Int( - argstr='%d', - desc=('threshold indicating minimum number of seed ' - 'voxels entering this mask region'), + argstr="%d", + desc=( + "threshold indicating minimum number of seed " + "voxels entering this mask region" + ), mandatory=True, - position=1) + position=1, + ) class ProjThreshOuputSpec(TraitedSpec): out_files = traits.List( - File(exists=True), - desc=('path/name of output volume after ' - 'thresholding')) + File(exists=True), desc=("path/name of output volume after " "thresholding") + ) class ProjThresh(FSLCommand): @@ -1148,44 +1262,46 @@ class ProjThresh(FSLCommand): """ - _cmd = 'proj_thresh' + _cmd = "proj_thresh" input_spec = ProjThreshInputSpec output_spec = ProjThreshOuputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_files'] = [] + outputs["out_files"] = [] for name in self.inputs.in_files: cwd, base_name = os.path.split(name) - outputs['out_files'].append( + outputs["out_files"].append( self._gen_fname( base_name, cwd=cwd, - suffix='_proj_seg_thr_{}'.format(self.inputs.threshold))) + suffix="_proj_seg_thr_{}".format(self.inputs.threshold), + ) + ) return outputs class FindTheBiggestInputSpec(FSLCommandInputSpec): in_files = traits.List( File(exists=True), - argstr='%s', - desc=('a list of input volumes or a ' - 'singleMatrixFile'), + argstr="%s", + desc=("a list of input volumes or a " "singleMatrixFile"), position=0, - mandatory=True) + mandatory=True, + ) out_file = File( - argstr='%s', - desc='file with the resulting segmentation', + argstr="%s", + desc="file with the resulting segmentation", position=2, genfile=True, - hash_files=False) + hash_files=False, + ) class FindTheBiggestOutputSpec(TraitedSpec): out_file = File( - exists=True, - argstr='%s', - desc='output file indexed in order of input files') + exists=True, argstr="%s", desc="output file indexed in order of input files" + ) class FindTheBiggest(FSLCommand): @@ -1205,27 +1321,26 @@ class FindTheBiggest(FSLCommand): 'find_the_biggest seeds_to_M1.nii seeds_to_M2.nii biggestSegmentation' """ - _cmd = 'find_the_biggest' + + _cmd = "find_the_biggest" input_spec = FindTheBiggestInputSpec output_spec = FindTheBiggestOutputSpec def _run_interface(self, runtime): if not isdefined(self.inputs.out_file): - self.inputs.out_file = self._gen_fname( - 'biggestSegmentation', suffix='') + self.inputs.out_file = self._gen_fname("biggestSegmentation", suffix="") return super(FindTheBiggest, self)._run_interface(runtime) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_file - if not isdefined(outputs['out_file']): - outputs['out_file'] = self._gen_fname( - 'biggestSegmentation', suffix='') - outputs['out_file'] = os.path.abspath(outputs['out_file']) + outputs["out_file"] = self.inputs.out_file + if not isdefined(outputs["out_file"]): + outputs["out_file"] = self._gen_fname("biggestSegmentation", suffix="") + outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._list_outputs()[name] else: return None @@ -1237,35 +1352,36 @@ class TractSkeletonInputSpec(FSLCommandInputSpec): exists=True, mandatory=True, argstr="-i %s", - desc="input image (typcially mean FA volume)") + desc="input image (typcially mean FA volume)", + ) _proj_inputs = ["threshold", "distance_map", "data_file"] project_data = traits.Bool( argstr="-p %.3f %s %s %s %s", requires=_proj_inputs, - desc="project data onto skeleton") + desc="project data onto skeleton", + ) threshold = traits.Float(desc="skeleton threshold value") distance_map = File(exists=True, desc="distance map image") search_mask_file = File( exists=True, xor=["use_cingulum_mask"], - desc="mask in which to use alternate search rule") + desc="mask in which to use alternate search rule", + ) use_cingulum_mask = traits.Bool( True, usedefault=True, xor=["search_mask_file"], - desc=("perform alternate search using " - "built-in cingulum mask")) - data_file = File( - exists=True, desc="4D data to project onto skeleton (usually FA)") + desc=("perform alternate search using " "built-in cingulum mask"), + ) + data_file = File(exists=True, desc="4D data to project onto skeleton (usually FA)") alt_data_file = File( - exists=True, - argstr="-a %s", - desc="4D non-FA data to project onto skeleton") - alt_skeleton = File( - exists=True, argstr="-s %s", desc="alternate skeleton to use") + exists=True, argstr="-a %s", desc="4D non-FA data to project onto skeleton" + ) + alt_skeleton = File(exists=True, argstr="-s %s", desc="alternate skeleton to use") projected_data = File(desc="input data projected onto skeleton") skeleton_file = traits.Either( - traits.Bool, File, argstr="-o %s", desc="write out skeleton image") + traits.Bool, File, argstr="-o %s", desc="write out skeleton image" + ) class TractSkeletonOutputSpec(TraitedSpec): @@ -1315,8 +1431,13 @@ def _format_arg(self, name, spec, value): proj_file = self._list_outputs()["projected_data"] else: proj_file = _si.projected_data - return spec.argstr % (_si.threshold, _si.distance_map, - mask_file, _si.data_file, proj_file) + return spec.argstr % ( + _si.threshold, + _si.distance_map, + mask_file, + _si.data_file, + proj_file, + ) elif name == "skeleton_file": if isinstance(value, bool): return spec.argstr % self._list_outputs()["skeleton_file"] @@ -1335,18 +1456,14 @@ def _list_outputs(self): if isdefined(_si.alt_data_file): stem = _si.alt_data_file outputs["projected_data"] = fname_presuffix( - stem, - suffix="_skeletonised", - newpath=os.getcwd(), - use_ext=True) + stem, suffix="_skeletonised", newpath=os.getcwd(), use_ext=True + ) if isdefined(_si.skeleton_file) and _si.skeleton_file: outputs["skeleton_file"] = _si.skeleton_file if isinstance(_si.skeleton_file, bool): outputs["skeleton_file"] = fname_presuffix( - _si.in_file, - suffix="_skeleton", - newpath=os.getcwd(), - use_ext=True) + _si.in_file, suffix="_skeleton", newpath=os.getcwd(), use_ext=True + ) return outputs @@ -1356,29 +1473,27 @@ class DistanceMapInputSpec(FSLCommandInputSpec): exists=True, mandatory=True, argstr="--in=%s", - desc="image to calculate distance values for") + desc="image to calculate distance values for", + ) mask_file = File( - exists=True, - argstr="--mask=%s", - desc="binary mask to contrain calculations") + exists=True, argstr="--mask=%s", desc="binary mask to contrain calculations" + ) invert_input = traits.Bool(argstr="--invert", desc="invert input image") local_max_file = traits.Either( traits.Bool, File, argstr="--localmax=%s", desc="write an image of the local maxima", - hash_files=False) + hash_files=False, + ) distance_map = File( - genfile=True, - argstr="--out=%s", - desc="distance map to write", - hash_files=False) + genfile=True, argstr="--out=%s", desc="distance map to write", hash_files=False + ) class DistanceMapOutputSpec(TraitedSpec): - distance_map = File( - exists=True, desc="value is distance to nearest nonzero voxels") + distance_map = File(exists=True, desc="value is distance to nearest nonzero voxels") local_max_file = File(desc="image of local maxima") @@ -1412,21 +1527,16 @@ def _list_outputs(self): outputs["distance_map"] = _si.distance_map if not isdefined(_si.distance_map): outputs["distance_map"] = fname_presuffix( - _si.in_file, - suffix="_dstmap", - use_ext=True, - newpath=os.getcwd()) + _si.in_file, suffix="_dstmap", use_ext=True, newpath=os.getcwd() + ) outputs["distance_map"] = os.path.abspath(outputs["distance_map"]) if isdefined(_si.local_max_file): outputs["local_max_file"] = _si.local_max_file if isinstance(_si.local_max_file, bool): outputs["local_max_file"] = fname_presuffix( - _si.in_file, - suffix="_lclmax", - use_ext=True, - newpath=os.getcwd()) - outputs["local_max_file"] = os.path.abspath( - outputs["local_max_file"]) + _si.in_file, suffix="_lclmax", use_ext=True, newpath=os.getcwd() + ) + outputs["local_max_file"] = os.path.abspath(outputs["local_max_file"]) return outputs def _gen_filename(self, name): @@ -1439,13 +1549,15 @@ class MakeDyadicVectorsInputSpec(FSLCommandInputSpec): theta_vol = File(exists=True, mandatory=True, position=0, argstr="%s") phi_vol = File(exists=True, mandatory=True, position=1, argstr="%s") mask = File(exists=True, position=2, argstr="%s") - output = File( - "dyads", position=3, usedefault=True, argstr="%s", hash_files=False) + output = File("dyads", position=3, usedefault=True, argstr="%s", hash_files=False) perc = traits.Float( - desc=("the {perc}% angle of the output cone of " - "uncertainty (output will be in degrees)"), + desc=( + "the {perc}% angle of the output cone of " + "uncertainty (output will be in degrees)" + ), position=4, - argstr="%f") + argstr="%f", + ) class MakeDyadicVectorsOutputSpec(TraitedSpec): @@ -1465,6 +1577,7 @@ def _list_outputs(self): outputs = self.output_spec().get() outputs["dyads"] = self._gen_fname(self.inputs.output) outputs["dispersion"] = self._gen_fname( - self.inputs.output, suffix="_dispersion") + self.inputs.output, suffix="_dispersion" + ) return outputs diff --git a/nipype/interfaces/fsl/epi.py b/nipype/interfaces/fsl/epi.py index a3dceb1902..e7f3ff4318 100644 --- a/nipype/interfaces/fsl/epi.py +++ b/nipype/interfaces/fsl/epi.py @@ -13,52 +13,56 @@ from ...utils.filemanip import split_filename from ...utils import NUMPY_MMAP -from ..base import (traits, TraitedSpec, InputMultiPath, File, isdefined) +from ..base import traits, TraitedSpec, InputMultiPath, File, isdefined from .base import FSLCommand, FSLCommandInputSpec, Info class PrepareFieldmapInputSpec(FSLCommandInputSpec): scanner = traits.String( - 'SIEMENS', - argstr='%s', - position=1, - desc='must be SIEMENS', - usedefault=True) + "SIEMENS", argstr="%s", position=1, desc="must be SIEMENS", usedefault=True + ) in_phase = File( exists=True, - argstr='%s', + argstr="%s", position=2, mandatory=True, - desc=('Phase difference map, in SIEMENS format range from ' - '0-4096 or 0-8192)')) + desc=( + "Phase difference map, in SIEMENS format range from " "0-4096 or 0-8192)" + ), + ) in_magnitude = File( exists=True, - argstr='%s', + argstr="%s", position=3, mandatory=True, - desc='Magnitude difference map, brain extracted') + desc="Magnitude difference map, brain extracted", + ) delta_TE = traits.Float( 2.46, usedefault=True, mandatory=True, - argstr='%f', + argstr="%f", position=-2, - desc=('echo time difference of the ' - 'fieldmap sequence in ms. (usually 2.46ms in' - ' Siemens)')) + desc=( + "echo time difference of the " + "fieldmap sequence in ms. (usually 2.46ms in" + " Siemens)" + ), + ) nocheck = traits.Bool( False, position=-1, - argstr='--nocheck', + argstr="--nocheck", usedefault=True, - desc=('do not perform sanity checks for image ' - 'size/range/dimensions')) + desc=("do not perform sanity checks for image " "size/range/dimensions"), + ) out_fieldmap = File( - argstr='%s', position=4, desc='output name for prepared fieldmap') + argstr="%s", position=4, desc="output name for prepared fieldmap" + ) class PrepareFieldmapOutputSpec(TraitedSpec): - out_fieldmap = File(exists=True, desc='output name for prepared fieldmap') + out_fieldmap = File(exists=True, desc="output name for prepared fieldmap") class PrepareFieldmap(FSLCommand): @@ -85,7 +89,8 @@ class PrepareFieldmap(FSLCommand): """ - _cmd = 'fsl_prepare_fieldmap' + + _cmd = "fsl_prepare_fieldmap" input_spec = PrepareFieldmapInputSpec output_spec = PrepareFieldmapOutputSpec @@ -95,16 +100,17 @@ def _parse_inputs(self, skip=None): if not isdefined(self.inputs.out_fieldmap): self.inputs.out_fieldmap = self._gen_fname( - self.inputs.in_phase, suffix='_fslprepared') + self.inputs.in_phase, suffix="_fslprepared" + ) if not isdefined(self.inputs.nocheck) or not self.inputs.nocheck: - skip += ['nocheck'] + skip += ["nocheck"] return super(PrepareFieldmap, self)._parse_inputs(skip=skip) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_fieldmap'] = self.inputs.out_fieldmap + outputs["out_fieldmap"] = self.inputs.out_fieldmap return outputs def _run_interface(self, runtime): @@ -124,171 +130,192 @@ class TOPUPInputSpec(FSLCommandInputSpec): in_file = File( exists=True, mandatory=True, - desc='name of 4D file with images', - argstr='--imain=%s') + desc="name of 4D file with images", + argstr="--imain=%s", + ) encoding_file = File( exists=True, mandatory=True, - xor=['encoding_direction'], - desc='name of text file with PE directions/times', - argstr='--datain=%s') + xor=["encoding_direction"], + desc="name of text file with PE directions/times", + argstr="--datain=%s", + ) encoding_direction = traits.List( - traits.Enum('y', 'x', 'z', 'x-', 'y-', 'z-'), + traits.Enum("y", "x", "z", "x-", "y-", "z-"), mandatory=True, - xor=['encoding_file'], - requires=['readout_times'], - argstr='--datain=%s', - desc=('encoding direction for automatic ' - 'generation of encoding_file')) + xor=["encoding_file"], + requires=["readout_times"], + argstr="--datain=%s", + desc=("encoding direction for automatic " "generation of encoding_file"), + ) readout_times = InputMultiPath( traits.Float, - requires=['encoding_direction'], - xor=['encoding_file'], + requires=["encoding_direction"], + xor=["encoding_file"], mandatory=True, - desc=('readout times (dwell times by # ' - 'phase-encode steps minus 1)')) + desc=("readout times (dwell times by # " "phase-encode steps minus 1)"), + ) out_base = File( - desc=('base-name of output files (spline ' - 'coefficients (Hz) and movement parameters)'), - name_source=['in_file'], - name_template='%s_base', - argstr='--out=%s', - hash_files=False) + desc=( + "base-name of output files (spline " + "coefficients (Hz) and movement parameters)" + ), + name_source=["in_file"], + name_template="%s_base", + argstr="--out=%s", + hash_files=False, + ) out_field = File( - argstr='--fout=%s', + argstr="--fout=%s", hash_files=False, - name_source=['in_file'], - name_template='%s_field', - desc='name of image file with field (Hz)') + name_source=["in_file"], + name_template="%s_field", + desc="name of image file with field (Hz)", + ) out_warp_prefix = traits.Str( "warpfield", - argstr='--dfout=%s', + argstr="--dfout=%s", hash_files=False, - desc='prefix for the warpfield images (in mm)', - usedefault=True) + desc="prefix for the warpfield images (in mm)", + usedefault=True, + ) out_mat_prefix = traits.Str( "xfm", - argstr='--rbmout=%s', + argstr="--rbmout=%s", hash_files=False, - desc='prefix for the realignment matrices', - usedefault=True) + desc="prefix for the realignment matrices", + usedefault=True, + ) out_jac_prefix = traits.Str( "jac", - argstr='--jacout=%s', + argstr="--jacout=%s", hash_files=False, - desc='prefix for the warpfield images', - usedefault=True) + desc="prefix for the warpfield images", + usedefault=True, + ) out_corrected = File( - argstr='--iout=%s', + argstr="--iout=%s", hash_files=False, - name_source=['in_file'], - name_template='%s_corrected', - desc='name of 4D image file with unwarped images') + name_source=["in_file"], + name_template="%s_corrected", + desc="name of 4D image file with unwarped images", + ) out_logfile = File( - argstr='--logout=%s', - desc='name of log-file', - name_source=['in_file'], - name_template='%s_topup.log', + argstr="--logout=%s", + desc="name of log-file", + name_source=["in_file"], + name_template="%s_topup.log", keep_extension=True, - hash_files=False) + hash_files=False, + ) # TODO: the following traits admit values separated by commas, one value # per registration level inside topup. warp_res = traits.Float( - argstr='--warpres=%f', - desc=('(approximate) resolution (in mm) of warp ' - 'basis for the different sub-sampling levels')) - subsamp = traits.Int(argstr='--subsamp=%d', - desc='sub-sampling scheme') + argstr="--warpres=%f", + desc=( + "(approximate) resolution (in mm) of warp " + "basis for the different sub-sampling levels" + ), + ) + subsamp = traits.Int(argstr="--subsamp=%d", desc="sub-sampling scheme") fwhm = traits.Float( - argstr='--fwhm=%f', - desc='FWHM (in mm) of gaussian smoothing kernel') + argstr="--fwhm=%f", desc="FWHM (in mm) of gaussian smoothing kernel" + ) config = traits.String( - 'b02b0.cnf', - argstr='--config=%s', + "b02b0.cnf", + argstr="--config=%s", usedefault=True, - desc=('Name of config file specifying command line ' - 'arguments')) - max_iter = traits.Int( - argstr='--miter=%d', - desc='max # of non-linear iterations') + desc=("Name of config file specifying command line " "arguments"), + ) + max_iter = traits.Int(argstr="--miter=%d", desc="max # of non-linear iterations") reg_lambda = traits.Float( - argstr='--lambda=%0.f', - desc=('Weight of regularisation, default ' - 'depending on --ssqlambda and --regmod switches.')) + argstr="--lambda=%0.f", + desc=( + "Weight of regularisation, default " + "depending on --ssqlambda and --regmod switches." + ), + ) ssqlambda = traits.Enum( 1, 0, - argstr='--ssqlambda=%d', - desc=('Weight lambda by the current value of the ' - 'ssd. If used (=1), the effective weight of ' - 'regularisation term becomes higher for the ' - 'initial iterations, therefore initial steps' - ' are a little smoother than they would ' - 'without weighting. This reduces the ' - 'risk of finding a local minimum.')) + argstr="--ssqlambda=%d", + desc=( + "Weight lambda by the current value of the " + "ssd. If used (=1), the effective weight of " + "regularisation term becomes higher for the " + "initial iterations, therefore initial steps" + " are a little smoother than they would " + "without weighting. This reduces the " + "risk of finding a local minimum." + ), + ) regmod = traits.Enum( - 'bending_energy', - 'membrane_energy', - argstr='--regmod=%s', - desc=('Regularisation term implementation. Defaults ' - 'to bending_energy. Note that the two functions' - ' have vastly different scales. The membrane ' - 'energy is based on the first derivatives and ' - 'the bending energy on the second derivatives. ' - 'The second derivatives will typically be much ' - 'smaller than the first derivatives, so input ' - 'lambda will have to be larger for ' - 'bending_energy to yield approximately the same' - ' level of regularisation.')) - estmov = traits.Enum( - 1, 0, argstr='--estmov=%d', desc='estimate movements if set') + "bending_energy", + "membrane_energy", + argstr="--regmod=%s", + desc=( + "Regularisation term implementation. Defaults " + "to bending_energy. Note that the two functions" + " have vastly different scales. The membrane " + "energy is based on the first derivatives and " + "the bending energy on the second derivatives. " + "The second derivatives will typically be much " + "smaller than the first derivatives, so input " + "lambda will have to be larger for " + "bending_energy to yield approximately the same" + " level of regularisation." + ), + ) + estmov = traits.Enum(1, 0, argstr="--estmov=%d", desc="estimate movements if set") minmet = traits.Enum( 0, 1, - argstr='--minmet=%d', - desc=('Minimisation method 0=Levenberg-Marquardt, ' - '1=Scaled Conjugate Gradient')) + argstr="--minmet=%d", + desc=( + "Minimisation method 0=Levenberg-Marquardt, " "1=Scaled Conjugate Gradient" + ), + ) splineorder = traits.Int( - argstr='--splineorder=%d', - desc=('order of spline, 2->Qadratic spline, ' - '3->Cubic spline')) + argstr="--splineorder=%d", + desc=("order of spline, 2->Qadratic spline, " "3->Cubic spline"), + ) numprec = traits.Enum( - 'double', - 'float', - argstr='--numprec=%s', - desc=('Precision for representing Hessian, double ' - 'or float.')) + "double", + "float", + argstr="--numprec=%s", + desc=("Precision for representing Hessian, double " "or float."), + ) interp = traits.Enum( - 'spline', - 'linear', - argstr='--interp=%s', - desc='Image interpolation model, linear or spline.') + "spline", + "linear", + argstr="--interp=%s", + desc="Image interpolation model, linear or spline.", + ) scale = traits.Enum( 0, 1, - argstr='--scale=%d', - desc=('If set (=1), the images are individually scaled' - ' to a common mean')) + argstr="--scale=%d", + desc=("If set (=1), the images are individually scaled" " to a common mean"), + ) regrid = traits.Enum( 1, 0, - argstr='--regrid=%d', - desc=('If set (=1), the calculations are done in a ' - 'different grid')) + argstr="--regrid=%d", + desc=("If set (=1), the calculations are done in a " "different grid"), + ) class TOPUPOutputSpec(TraitedSpec): - out_fieldcoef = File( - exists=True, desc='file containing the field coefficients') - out_movpar = File(exists=True, desc='movpar.txt output file') - out_enc_file = File(desc='encoding directions file output for applytopup') - out_field = File(desc='name of image file with field (Hz)') - out_warps = traits.List(File(exists=True), desc='warpfield images') - out_jacs = traits.List(File(exists=True), desc='Jacobian images') - out_mats = traits.List(File(exists=True), desc='realignment matrices') - out_corrected = File(desc='name of 4D image file with unwarped images') - out_logfile = File(desc='name of log-file') + out_fieldcoef = File(exists=True, desc="file containing the field coefficients") + out_movpar = File(exists=True, desc="movpar.txt output file") + out_enc_file = File(desc="encoding directions file output for applytopup") + out_field = File(desc="name of image file with field (Hz)") + out_warps = traits.List(File(exists=True), desc="warpfield images") + out_jacs = traits.List(File(exists=True), desc="Jacobian images") + out_mats = traits.List(File(exists=True), desc="realignment matrices") + out_corrected = File(desc="name of 4D image file with unwarped images") + out_logfile = File(desc="name of log-file") class TOPUP(FSLCommand): @@ -317,59 +344,62 @@ class TOPUP(FSLCommand): >>> res = topup.run() # doctest: +SKIP """ - _cmd = 'topup' + + _cmd = "topup" input_spec = TOPUPInputSpec output_spec = TOPUPOutputSpec def _format_arg(self, name, trait_spec, value): - if name == 'encoding_direction': + if name == "encoding_direction": return trait_spec.argstr % self._generate_encfile() - if name == 'out_base': + if name == "out_base": path, name, ext = split_filename(value) - if path != '': + if path != "": if not os.path.exists(path): - raise ValueError('out_base path must exist if provided') + raise ValueError("out_base path must exist if provided") return super(TOPUP, self)._format_arg(name, trait_spec, value) def _list_outputs(self): outputs = super(TOPUP, self)._list_outputs() - del outputs['out_base'] + del outputs["out_base"] base_path = None if isdefined(self.inputs.out_base): base_path, base, _ = split_filename(self.inputs.out_base) - if base_path == '': + if base_path == "": base_path = None else: - base = split_filename(self.inputs.in_file)[1] + '_base' - outputs['out_fieldcoef'] = self._gen_fname( - base, suffix='_fieldcoef', cwd=base_path) - outputs['out_movpar'] = self._gen_fname( - base, suffix='_movpar', ext='.txt', cwd=base_path) + base = split_filename(self.inputs.in_file)[1] + "_base" + outputs["out_fieldcoef"] = self._gen_fname( + base, suffix="_fieldcoef", cwd=base_path + ) + outputs["out_movpar"] = self._gen_fname( + base, suffix="_movpar", ext=".txt", cwd=base_path + ) n_vols = nb.load(self.inputs.in_file).shape[-1] ext = Info.output_type_to_ext(self.inputs.output_type) - fmt = os.path.abspath('{prefix}_{i:02d}{ext}').format - outputs['out_warps'] = [ + fmt = os.path.abspath("{prefix}_{i:02d}{ext}").format + outputs["out_warps"] = [ fmt(prefix=self.inputs.out_warp_prefix, i=i, ext=ext) for i in range(1, n_vols + 1) ] - outputs['out_jacs'] = [ + outputs["out_jacs"] = [ fmt(prefix=self.inputs.out_jac_prefix, i=i, ext=ext) for i in range(1, n_vols + 1) ] - outputs['out_mats'] = [ + outputs["out_mats"] = [ fmt(prefix=self.inputs.out_mat_prefix, i=i, ext=".mat") for i in range(1, n_vols + 1) ] if isdefined(self.inputs.encoding_direction): - outputs['out_enc_file'] = self._get_encfilename() + outputs["out_enc_file"] = self._get_encfilename() return outputs def _get_encfilename(self): out_file = os.path.join( - os.getcwd(), - ('%s_encfile.txt' % split_filename(self.inputs.in_file)[1])) + os.getcwd(), ("%s_encfile.txt" % split_filename(self.inputs.in_file)[1]) + ) return out_file def _generate_encfile(self): @@ -379,25 +409,28 @@ def _generate_encfile(self): durations = self.inputs.readout_times if len(self.inputs.encoding_direction) != len(durations): if len(self.inputs.readout_times) != 1: - raise ValueError(('Readout time must be a float or match the' - 'length of encoding directions')) + raise ValueError( + ( + "Readout time must be a float or match the" + "length of encoding directions" + ) + ) durations = durations * len(self.inputs.encoding_direction) lines = [] for idx, encdir in enumerate(self.inputs.encoding_direction): direction = 1.0 - if encdir.endswith('-'): + if encdir.endswith("-"): direction = -1.0 line = [ - float(val[0] == encdir[0]) * direction - for val in ['x', 'y', 'z'] + float(val[0] == encdir[0]) * direction for val in ["x", "y", "z"] ] + [durations[idx]] lines.append(line) - np.savetxt(out_file, np.array(lines), fmt=b'%d %d %d %.8f') + np.savetxt(out_file, np.array(lines), fmt=b"%d %d %d %.8f") return out_file def _overload_extension(self, value, name=None): - if name == 'out_base': + if name == "out_base": return value return super(TOPUP, self)._overload_extension(value, name) @@ -406,61 +439,65 @@ class ApplyTOPUPInputSpec(FSLCommandInputSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, - desc='name of file with images', - argstr='--imain=%s', - sep=',') + desc="name of file with images", + argstr="--imain=%s", + sep=",", + ) encoding_file = File( exists=True, mandatory=True, - desc='name of text file with PE directions/times', - argstr='--datain=%s') + desc="name of text file with PE directions/times", + argstr="--datain=%s", + ) in_index = traits.List( traits.Int, - argstr='--inindex=%s', - sep=',', - desc='comma separated list of indices corresponding to --datain') + argstr="--inindex=%s", + sep=",", + desc="comma separated list of indices corresponding to --datain", + ) in_topup_fieldcoef = File( exists=True, argstr="--topup=%s", copyfile=False, - requires=['in_topup_movpar'], - desc=('topup file containing the field ' - 'coefficients')) + requires=["in_topup_movpar"], + desc=("topup file containing the field " "coefficients"), + ) in_topup_movpar = File( exists=True, - requires=['in_topup_fieldcoef'], + requires=["in_topup_fieldcoef"], copyfile=False, - desc='topup movpar.txt file') + desc="topup movpar.txt file", + ) out_corrected = File( - desc='output (warped) image', - name_source=['in_files'], - name_template='%s_corrected', - argstr='--out=%s') + desc="output (warped) image", + name_source=["in_files"], + name_template="%s_corrected", + argstr="--out=%s", + ) method = traits.Enum( - 'jac', - 'lsr', - argstr='--method=%s', - desc=('use jacobian modulation (jac) or least-squares' - ' resampling (lsr)')) + "jac", + "lsr", + argstr="--method=%s", + desc=("use jacobian modulation (jac) or least-squares" " resampling (lsr)"), + ) interp = traits.Enum( - 'trilinear', - 'spline', - argstr='--interp=%s', - desc='interpolation method') + "trilinear", "spline", argstr="--interp=%s", desc="interpolation method" + ) datatype = traits.Enum( - 'char', - 'short', - 'int', - 'float', - 'double', - argstr='-d=%s', - desc='force output data type') + "char", + "short", + "int", + "float", + "double", + argstr="-d=%s", + desc="force output data type", + ) class ApplyTOPUPOutputSpec(TraitedSpec): out_corrected = File( - exists=True, desc=('name of 4D image file with ' - 'unwarped images')) + exists=True, desc=("name of 4D image file with " "unwarped images") + ) class ApplyTOPUP(FSLCommand): @@ -489,7 +526,8 @@ class ApplyTOPUP(FSLCommand): >>> res = applytopup.run() # doctest: +SKIP """ - _cmd = 'applytopup' + + _cmd = "applytopup" input_spec = ApplyTOPUPInputSpec output_spec = ApplyTOPUPOutputSpec @@ -500,15 +538,13 @@ def _parse_inputs(self, skip=None): # If not defined, assume index are the first N entries in the # parameters file, for N input images. if not isdefined(self.inputs.in_index): - self.inputs.in_index = list( - range(1, - len(self.inputs.in_files) + 1)) + self.inputs.in_index = list(range(1, len(self.inputs.in_files) + 1)) return super(ApplyTOPUP, self)._parse_inputs(skip=skip) def _format_arg(self, name, spec, value): - if name == 'in_topup_fieldcoef': - return spec.argstr % value.split('_fieldcoef')[0] + if name == "in_topup_fieldcoef": + return spec.argstr % value.split("_fieldcoef")[0] return super(ApplyTOPUP, self)._format_arg(name, spec, value) @@ -516,179 +552,196 @@ class EddyInputSpec(FSLCommandInputSpec): in_file = File( exists=True, mandatory=True, - argstr='--imain=%s', - desc=('File containing all the images to estimate ' - 'distortions for')) + argstr="--imain=%s", + desc=("File containing all the images to estimate " "distortions for"), + ) in_mask = File( - exists=True, - mandatory=True, - argstr='--mask=%s', - desc='Mask to indicate brain') + exists=True, mandatory=True, argstr="--mask=%s", desc="Mask to indicate brain" + ) in_index = File( exists=True, mandatory=True, - argstr='--index=%s', - desc=('File containing indices for all volumes in --imain ' - 'into --acqp and --topup')) + argstr="--index=%s", + desc=( + "File containing indices for all volumes in --imain " + "into --acqp and --topup" + ), + ) in_acqp = File( exists=True, mandatory=True, - argstr='--acqp=%s', - desc='File containing acquisition parameters') + argstr="--acqp=%s", + desc="File containing acquisition parameters", + ) in_bvec = File( exists=True, mandatory=True, - argstr='--bvecs=%s', - desc=('File containing the b-vectors for all volumes in ' - '--imain')) + argstr="--bvecs=%s", + desc=("File containing the b-vectors for all volumes in " "--imain"), + ) in_bval = File( exists=True, mandatory=True, - argstr='--bvals=%s', - desc=('File containing the b-values for all volumes in ' - '--imain')) + argstr="--bvals=%s", + desc=("File containing the b-values for all volumes in " "--imain"), + ) out_base = traits.Str( - 'eddy_corrected', - argstr='--out=%s', + "eddy_corrected", + argstr="--out=%s", usedefault=True, - desc=('basename for output (warped) image')) + desc=("basename for output (warped) image"), + ) session = File( exists=True, - argstr='--session=%s', - desc=('File containing session indices for all volumes in ' - '--imain')) + argstr="--session=%s", + desc=("File containing session indices for all volumes in " "--imain"), + ) in_topup_fieldcoef = File( exists=True, argstr="--topup=%s", - requires=['in_topup_movpar'], - desc=('topup file containing the field ' - 'coefficients')) + requires=["in_topup_movpar"], + desc=("topup file containing the field " "coefficients"), + ) in_topup_movpar = File( - exists=True, - requires=['in_topup_fieldcoef'], - desc='topup movpar.txt file') + exists=True, requires=["in_topup_fieldcoef"], desc="topup movpar.txt file" + ) flm = traits.Enum( - 'linear', - 'quadratic', - 'cubic', - argstr='--flm=%s', - desc='First level EC model') + "linear", "quadratic", "cubic", argstr="--flm=%s", desc="First level EC model" + ) slm = traits.Enum( - 'none', - 'linear', - 'quadratic', - argstr='--slm=%s', - desc='Second level EC model') + "none", "linear", "quadratic", argstr="--slm=%s", desc="Second level EC model" + ) fep = traits.Bool( - False, argstr='--fep', desc='Fill empty planes in x- or y-directions') + False, argstr="--fep", desc="Fill empty planes in x- or y-directions" + ) interp = traits.Enum( - 'spline', - 'trilinear', - argstr='--interp=%s', - desc='Interpolation model for estimation step') + "spline", + "trilinear", + argstr="--interp=%s", + desc="Interpolation model for estimation step", + ) nvoxhp = traits.Int( - 1000, usedefault=True, - argstr='--nvoxhp=%s', - desc=('# of voxels used to estimate the ' - 'hyperparameters')) + 1000, + usedefault=True, + argstr="--nvoxhp=%s", + desc=("# of voxels used to estimate the " "hyperparameters"), + ) fudge_factor = traits.Float( - 10.0, usedefault=True, - argstr='--ff=%s', - desc=('Fudge factor for hyperparameter ' - 'error variance')) + 10.0, + usedefault=True, + argstr="--ff=%s", + desc=("Fudge factor for hyperparameter " "error variance"), + ) dont_sep_offs_move = traits.Bool( False, - argstr='--dont_sep_offs_move', - desc=('Do NOT attempt to separate ' - 'field offset from subject ' - 'movement')) + argstr="--dont_sep_offs_move", + desc=("Do NOT attempt to separate " "field offset from subject " "movement"), + ) dont_peas = traits.Bool( False, - argstr='--dont_peas', - desc="Do NOT perform a post-eddy alignment of " - "shells") + argstr="--dont_peas", + desc="Do NOT perform a post-eddy alignment of " "shells", + ) fwhm = traits.Float( - desc=('FWHM for conditioning filter when estimating ' - 'the parameters'), - argstr='--fwhm=%s') + desc=("FWHM for conditioning filter when estimating " "the parameters"), + argstr="--fwhm=%s", + ) - niter = traits.Int(5, usedefault=True, - argstr='--niter=%s', desc='Number of iterations') + niter = traits.Int( + 5, usedefault=True, argstr="--niter=%s", desc="Number of iterations" + ) method = traits.Enum( - 'jac', - 'lsr', - argstr='--resamp=%s', - desc=('Final resampling method (jacobian/least ' - 'squares)')) + "jac", + "lsr", + argstr="--resamp=%s", + desc=("Final resampling method (jacobian/least " "squares)"), + ) repol = traits.Bool( - False, argstr='--repol', desc='Detect and replace outlier slices') + False, argstr="--repol", desc="Detect and replace outlier slices" + ) num_threads = traits.Int( - 1, - usedefault=True, - nohash=True, - desc="Number of openmp threads to use") + 1, usedefault=True, nohash=True, desc="Number of openmp threads to use" + ) is_shelled = traits.Bool( False, - argstr='--data_is_shelled', + argstr="--data_is_shelled", desc="Override internal check to ensure that " "date are acquired on a set of b-value " - "shells") + "shells", + ) field = traits.Str( - argstr='--field=%s', + argstr="--field=%s", desc="NonTOPUP fieldmap scaled in Hz - filename has " "to be provided without an extension. TOPUP is " - "strongly recommended") + "strongly recommended", + ) field_mat = File( exists=True, - argstr='--field_mat=%s', + argstr="--field_mat=%s", desc="Matrix that specifies the relative locations of " "the field specified by --field and first volume " - "in file --imain") + "in file --imain", + ) use_cuda = traits.Bool(False, desc="Run eddy using cuda gpu") cnr_maps = traits.Bool( - False, desc='Output CNR-Maps', argstr='--cnr_maps', min_ver='5.0.10') + False, desc="Output CNR-Maps", argstr="--cnr_maps", min_ver="5.0.10" + ) residuals = traits.Bool( - False, desc='Output Residuals', argstr='--residuals', min_ver='5.0.10') + False, desc="Output Residuals", argstr="--residuals", min_ver="5.0.10" + ) class EddyOutputSpec(TraitedSpec): out_corrected = File( - exists=True, desc='4D image file containing all the corrected volumes') + exists=True, desc="4D image file containing all the corrected volumes" + ) out_parameter = File( exists=True, - desc=('text file with parameters definining the field and' - 'movement for each scan')) + desc=( + "text file with parameters definining the field and" + "movement for each scan" + ), + ) out_rotated_bvecs = File( - exists=True, desc='File containing rotated b-values for all volumes') + exists=True, desc="File containing rotated b-values for all volumes" + ) out_movement_rms = File( - exists=True, desc='Summary of the "total movement" in each volume') + exists=True, desc='Summary of the "total movement" in each volume' + ) out_restricted_movement_rms = File( exists=True, - desc=('Summary of the "total movement" in each volume ' - 'disregarding translation in the PE direction')) + desc=( + 'Summary of the "total movement" in each volume ' + "disregarding translation in the PE direction" + ), + ) out_shell_alignment_parameters = File( exists=True, - desc=('File containing rigid body movement parameters ' - 'between the different shells as estimated by a ' - 'post-hoc mutual information based registration')) + desc=( + "File containing rigid body movement parameters " + "between the different shells as estimated by a " + "post-hoc mutual information based registration" + ), + ) out_outlier_report = File( exists=True, - desc=('Text-file with a plain language report on what ' - 'outlier slices eddy has found')) - out_cnr_maps = File( - exists=True, desc='path/name of file with the cnr_maps') - out_residuals = File( - exists=True, desc='path/name of file with the residuals') + desc=( + "Text-file with a plain language report on what " + "outlier slices eddy has found" + ), + ) + out_cnr_maps = File(exists=True, desc="path/name of file with the cnr_maps") + out_residuals = File(exists=True, desc="path/name of file with the residuals") class Eddy(FSLCommand): @@ -723,7 +776,8 @@ class Eddy(FSLCommand): >>> res = eddy.run() # doctest: +SKIP """ - _cmd = 'eddy_openmp' + + _cmd = "eddy_openmp" input_spec = EddyInputSpec output_spec = EddyOutputSpec @@ -731,34 +785,38 @@ class Eddy(FSLCommand): def __init__(self, **inputs): super(Eddy, self).__init__(**inputs) - self.inputs.on_trait_change(self._num_threads_update, 'num_threads') + self.inputs.on_trait_change(self._num_threads_update, "num_threads") if not isdefined(self.inputs.num_threads): self.inputs.num_threads = self._num_threads else: self._num_threads_update() - self.inputs.on_trait_change(self._use_cuda, 'use_cuda') + self.inputs.on_trait_change(self._use_cuda, "use_cuda") if isdefined(self.inputs.use_cuda): self._use_cuda() def _num_threads_update(self): self._num_threads = self.inputs.num_threads if not isdefined(self.inputs.num_threads): - if 'OMP_NUM_THREADS' in self.inputs.environ: - del self.inputs.environ['OMP_NUM_THREADS'] + if "OMP_NUM_THREADS" in self.inputs.environ: + del self.inputs.environ["OMP_NUM_THREADS"] else: - self.inputs.environ['OMP_NUM_THREADS'] = str( - self.inputs.num_threads) + self.inputs.environ["OMP_NUM_THREADS"] = str(self.inputs.num_threads) def _use_cuda(self): - self._cmd = 'eddy_cuda' if self.inputs.use_cuda else 'eddy_openmp' + self._cmd = "eddy_cuda" if self.inputs.use_cuda else "eddy_openmp" def _run_interface(self, runtime): # If 'eddy_openmp' is missing, use 'eddy' - FSLDIR = os.getenv('FSLDIR', '') + FSLDIR = os.getenv("FSLDIR", "") cmd = self._cmd - if all((FSLDIR != '', cmd == 'eddy_openmp', - not os.path.exists(os.path.join(FSLDIR, 'bin', cmd)))): - self._cmd = 'eddy' + if all( + ( + FSLDIR != "", + cmd == "eddy_openmp", + not os.path.exists(os.path.join(FSLDIR, "bin", cmd)), + ) + ): + self._cmd = "eddy" runtime = super(Eddy, self)._run_interface(runtime) # Restore command to avoid side-effects @@ -766,72 +824,77 @@ def _run_interface(self, runtime): return runtime def _format_arg(self, name, spec, value): - if name == 'in_topup_fieldcoef': - return spec.argstr % value.split('_fieldcoef')[0] - if name == 'out_base': + if name == "in_topup_fieldcoef": + return spec.argstr % value.split("_fieldcoef")[0] + if name == "out_base": return spec.argstr % os.path.abspath(value) return super(Eddy, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_corrected'] = os.path.abspath( - '%s.nii.gz' % self.inputs.out_base) - outputs['out_parameter'] = os.path.abspath( - '%s.eddy_parameters' % self.inputs.out_base) + outputs["out_corrected"] = os.path.abspath("%s.nii.gz" % self.inputs.out_base) + outputs["out_parameter"] = os.path.abspath( + "%s.eddy_parameters" % self.inputs.out_base + ) # File generation might depend on the version of EDDY out_rotated_bvecs = os.path.abspath( - '%s.eddy_rotated_bvecs' % self.inputs.out_base) + "%s.eddy_rotated_bvecs" % self.inputs.out_base + ) out_movement_rms = os.path.abspath( - '%s.eddy_movement_rms' % self.inputs.out_base) + "%s.eddy_movement_rms" % self.inputs.out_base + ) out_restricted_movement_rms = os.path.abspath( - '%s.eddy_restricted_movement_rms' % self.inputs.out_base) + "%s.eddy_restricted_movement_rms" % self.inputs.out_base + ) out_shell_alignment_parameters = os.path.abspath( - '%s.eddy_post_eddy_shell_alignment_parameters' % - self.inputs.out_base) + "%s.eddy_post_eddy_shell_alignment_parameters" % self.inputs.out_base + ) out_outlier_report = os.path.abspath( - '%s.eddy_outlier_report' % self.inputs.out_base) + "%s.eddy_outlier_report" % self.inputs.out_base + ) if isdefined(self.inputs.cnr_maps) and self.inputs.cnr_maps: out_cnr_maps = os.path.abspath( - '%s.eddy_cnr_maps.nii.gz' % self.inputs.out_base) + "%s.eddy_cnr_maps.nii.gz" % self.inputs.out_base + ) if os.path.exists(out_cnr_maps): - outputs['out_cnr_maps'] = out_cnr_maps + outputs["out_cnr_maps"] = out_cnr_maps if isdefined(self.inputs.residuals) and self.inputs.residuals: out_residuals = os.path.abspath( - '%s.eddy_residuals.nii.gz' % self.inputs.out_base) + "%s.eddy_residuals.nii.gz" % self.inputs.out_base + ) if os.path.exists(out_residuals): - outputs['out_residuals'] = out_residuals + outputs["out_residuals"] = out_residuals if os.path.exists(out_rotated_bvecs): - outputs['out_rotated_bvecs'] = out_rotated_bvecs + outputs["out_rotated_bvecs"] = out_rotated_bvecs if os.path.exists(out_movement_rms): - outputs['out_movement_rms'] = out_movement_rms + outputs["out_movement_rms"] = out_movement_rms if os.path.exists(out_restricted_movement_rms): - outputs['out_restricted_movement_rms'] = \ - out_restricted_movement_rms + outputs["out_restricted_movement_rms"] = out_restricted_movement_rms if os.path.exists(out_shell_alignment_parameters): - outputs['out_shell_alignment_parameters'] = \ - out_shell_alignment_parameters + outputs["out_shell_alignment_parameters"] = out_shell_alignment_parameters if os.path.exists(out_outlier_report): - outputs['out_outlier_report'] = out_outlier_report + outputs["out_outlier_report"] = out_outlier_report return outputs class SigLossInputSpec(FSLCommandInputSpec): - in_file = File( - mandatory=True, exists=True, argstr='-i %s', desc='b0 fieldmap file') + in_file = File(mandatory=True, exists=True, argstr="-i %s", desc="b0 fieldmap file") out_file = File( - argstr='-s %s', desc='output signal loss estimate file', genfile=True) + argstr="-s %s", desc="output signal loss estimate file", genfile=True + ) - mask_file = File(exists=True, argstr='-m %s', desc='brain mask file') - echo_time = traits.Float(argstr='--te=%f', desc='echo time in seconds') + mask_file = File(exists=True, argstr="-m %s", desc="brain mask file") + echo_time = traits.Float(argstr="--te=%f", desc="echo time in seconds") slice_direction = traits.Enum( - 'x', 'y', 'z', argstr='-d %s', desc='slicing direction') + "x", "y", "z", argstr="-d %s", desc="slicing direction" + ) class SigLossOuputSpec(TraitedSpec): - out_file = File(exists=True, desc='signal loss estimate file') + out_file = File(exists=True, desc="signal loss estimate file") class SigLoss(FSLCommand): @@ -852,118 +915,118 @@ class SigLoss(FSLCommand): """ + input_spec = SigLossInputSpec output_spec = SigLossOuputSpec - _cmd = 'sigloss' + _cmd = "sigloss" def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_file - if ((not isdefined(outputs['out_file'])) - and (isdefined(self.inputs.in_file))): - outputs['out_file'] = self._gen_fname( - self.inputs.in_file, suffix='_sigloss') + outputs["out_file"] = self.inputs.out_file + if (not isdefined(outputs["out_file"])) and (isdefined(self.inputs.in_file)): + outputs["out_file"] = self._gen_fname( + self.inputs.in_file, suffix="_sigloss" + ) return outputs def _gen_filename(self, name): - if name == 'out_file': - return self._list_outputs()['out_file'] + if name == "out_file": + return self._list_outputs()["out_file"] return None class EpiRegInputSpec(FSLCommandInputSpec): epi = File( - exists=True, - argstr='--epi=%s', - mandatory=True, - position=-4, - desc='EPI image') + exists=True, argstr="--epi=%s", mandatory=True, position=-4, desc="EPI image" + ) t1_head = File( exists=True, - argstr='--t1=%s', + argstr="--t1=%s", mandatory=True, position=-3, - desc='wholehead T1 image') + desc="wholehead T1 image", + ) t1_brain = File( exists=True, - argstr='--t1brain=%s', + argstr="--t1brain=%s", mandatory=True, position=-2, - desc='brain extracted T1 image') + desc="brain extracted T1 image", + ) out_base = traits.String( "epi2struct", - desc='output base name', - argstr='--out=%s', + desc="output base name", + argstr="--out=%s", position=-1, - usedefault=True) - fmap = File( - exists=True, argstr='--fmap=%s', desc='fieldmap image (in rad/s)') + usedefault=True, + ) + fmap = File(exists=True, argstr="--fmap=%s", desc="fieldmap image (in rad/s)") fmapmag = File( - exists=True, - argstr='--fmapmag=%s', - desc='fieldmap magnitude image - wholehead') + exists=True, argstr="--fmapmag=%s", desc="fieldmap magnitude image - wholehead" + ) fmapmagbrain = File( exists=True, - argstr='--fmapmagbrain=%s', - desc='fieldmap magnitude image - brain extracted') + argstr="--fmapmagbrain=%s", + desc="fieldmap magnitude image - brain extracted", + ) wmseg = File( exists=True, - argstr='--wmseg=%s', - desc='white matter segmentation of T1 image, has to be named \ - like the t1brain and end on _wmseg') + argstr="--wmseg=%s", + desc="white matter segmentation of T1 image, has to be named \ + like the t1brain and end on _wmseg", + ) echospacing = traits.Float( - argstr='--echospacing=%f', - desc='Effective EPI echo spacing \ - (sometimes called dwell time) - in seconds') + argstr="--echospacing=%f", + desc="Effective EPI echo spacing \ + (sometimes called dwell time) - in seconds", + ) pedir = traits.Enum( - 'x', - 'y', - 'z', - '-x', - '-y', - '-z', - argstr='--pedir=%s', - desc='phase encoding direction, dir = x/y/z/-x/-y/-z') + "x", + "y", + "z", + "-x", + "-y", + "-z", + argstr="--pedir=%s", + desc="phase encoding direction, dir = x/y/z/-x/-y/-z", + ) weight_image = File( - exists=True, - argstr='--weight=%s', - desc='weighting image (in T1 space)') + exists=True, argstr="--weight=%s", desc="weighting image (in T1 space)" + ) no_fmapreg = traits.Bool( False, - argstr='--nofmapreg', - desc='do not perform registration of fmap to T1 \ - (use if fmap already registered)') + argstr="--nofmapreg", + desc="do not perform registration of fmap to T1 \ + (use if fmap already registered)", + ) no_clean = traits.Bool( True, - argstr='--noclean', + argstr="--noclean", usedefault=True, - desc='do not clean up intermediate files') + desc="do not clean up intermediate files", + ) class EpiRegOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='unwarped and coregistered epi input') - out_1vol = File( - exists=True, desc='unwarped and coregistered single volume') - fmap2str_mat = File( - exists=True, desc='rigid fieldmap-to-structural transform') - fmap2epi_mat = File(exists=True, desc='rigid fieldmap-to-epi transform') - fmap_epi = File(exists=True, desc='fieldmap in epi space') - fmap_str = File(exists=True, desc='fieldmap in structural space') - fmapmag_str = File( - exists=True, desc='fieldmap magnitude image in structural space') - epi2str_inv = File(exists=True, desc='rigid structural-to-epi transform') - epi2str_mat = File(exists=True, desc='rigid epi-to-structural transform') - shiftmap = File(exists=True, desc='shiftmap in epi space') + out_file = File(exists=True, desc="unwarped and coregistered epi input") + out_1vol = File(exists=True, desc="unwarped and coregistered single volume") + fmap2str_mat = File(exists=True, desc="rigid fieldmap-to-structural transform") + fmap2epi_mat = File(exists=True, desc="rigid fieldmap-to-epi transform") + fmap_epi = File(exists=True, desc="fieldmap in epi space") + fmap_str = File(exists=True, desc="fieldmap in structural space") + fmapmag_str = File(exists=True, desc="fieldmap magnitude image in structural space") + epi2str_inv = File(exists=True, desc="rigid structural-to-epi transform") + epi2str_mat = File(exists=True, desc="rigid epi-to-structural transform") + shiftmap = File(exists=True, desc="shiftmap in epi space") fullwarp = File( exists=True, - desc='warpfield to unwarp epi and transform into \ - structural space') - wmseg = File( - exists=True, desc='white matter segmentation used in flirt bbr') - seg = File( - exists=True, desc='white matter, gray matter, csf segmentation') - wmedge = File(exists=True, desc='white matter edges for visualization') + desc="warpfield to unwarp epi and transform into \ + structural space", + ) + wmseg = File(exists=True, desc="white matter segmentation used in flirt bbr") + seg = File(exists=True, desc="white matter, gray matter, csf segmentation") + wmedge = File(exists=True, desc="white matter edges for visualization") class EpiReg(FSLCommand): @@ -993,44 +1056,59 @@ class EpiReg(FSLCommand): >>> epireg.run() # doctest: +SKIP """ - _cmd = 'epi_reg' + + _cmd = "epi_reg" input_spec = EpiRegInputSpec output_spec = EpiRegOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.join(os.getcwd(), - self.inputs.out_base + '.nii.gz') - if (not (isdefined(self.inputs.no_fmapreg) and self.inputs.no_fmapreg) - and isdefined(self.inputs.fmap)): - outputs['out_1vol'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_1vol.nii.gz') - outputs['fmap2str_mat'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_fieldmap2str.mat') - outputs['fmap2epi_mat'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_fieldmaprads2epi.mat') - outputs['fmap_epi'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_fieldmaprads2epi.nii.gz') - outputs['fmap_str'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_fieldmaprads2str.nii.gz') - outputs['fmapmag_str'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_fieldmap2str.nii.gz') - outputs['shiftmap'] = os.path.join( - os.getcwd(), - self.inputs.out_base + '_fieldmaprads2epi_shift.nii.gz') - outputs['fullwarp'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_warp.nii.gz') - outputs['epi2str_inv'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_inv.mat') - - outputs['epi2str_mat'] = os.path.join(os.getcwd(), - self.inputs.out_base + '.mat') - outputs['wmedge'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_fast_wmedge.nii.gz') - outputs['wmseg'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_fast_wmseg.nii.gz') - outputs['seg'] = os.path.join( - os.getcwd(), self.inputs.out_base + '_fast_seg.nii.gz') + outputs["out_file"] = os.path.join( + os.getcwd(), self.inputs.out_base + ".nii.gz" + ) + if not ( + isdefined(self.inputs.no_fmapreg) and self.inputs.no_fmapreg + ) and isdefined(self.inputs.fmap): + outputs["out_1vol"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_1vol.nii.gz" + ) + outputs["fmap2str_mat"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_fieldmap2str.mat" + ) + outputs["fmap2epi_mat"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_fieldmaprads2epi.mat" + ) + outputs["fmap_epi"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_fieldmaprads2epi.nii.gz" + ) + outputs["fmap_str"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_fieldmaprads2str.nii.gz" + ) + outputs["fmapmag_str"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_fieldmap2str.nii.gz" + ) + outputs["shiftmap"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_fieldmaprads2epi_shift.nii.gz" + ) + outputs["fullwarp"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_warp.nii.gz" + ) + outputs["epi2str_inv"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_inv.mat" + ) + + outputs["epi2str_mat"] = os.path.join( + os.getcwd(), self.inputs.out_base + ".mat" + ) + outputs["wmedge"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_fast_wmedge.nii.gz" + ) + outputs["wmseg"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_fast_wmseg.nii.gz" + ) + outputs["seg"] = os.path.join( + os.getcwd(), self.inputs.out_base + "_fast_seg.nii.gz" + ) return outputs @@ -1043,44 +1121,49 @@ def _list_outputs(self): class EPIDeWarpInputSpec(FSLCommandInputSpec): mag_file = File( exists=True, - desc='Magnitude file', - argstr='--mag %s', + desc="Magnitude file", + argstr="--mag %s", position=0, - mandatory=True) + mandatory=True, + ) dph_file = File( exists=True, - desc='Phase file assumed to be scaled from 0 to 4095', - argstr='--dph %s', - mandatory=True) + desc="Phase file assumed to be scaled from 0 to 4095", + argstr="--dph %s", + mandatory=True, + ) exf_file = File( - exists=True, - desc='example func volume (or use epi)', - argstr='--exf %s') - epi_file = File( - exists=True, desc='EPI volume to unwarp', argstr='--epi %s') + exists=True, desc="example func volume (or use epi)", argstr="--exf %s" + ) + epi_file = File(exists=True, desc="EPI volume to unwarp", argstr="--epi %s") tediff = traits.Float( 2.46, usedefault=True, - desc='difference in B0 field map TEs', - argstr='--tediff %s') + desc="difference in B0 field map TEs", + argstr="--tediff %s", + ) esp = traits.Float( - 0.58, desc='EPI echo spacing', argstr='--esp %s', usedefault=True) + 0.58, desc="EPI echo spacing", argstr="--esp %s", usedefault=True + ) sigma = traits.Int( 2, usedefault=True, - argstr='--sigma %s', + argstr="--sigma %s", desc="2D spatial gaussing smoothing \ - stdev (default = 2mm)") - vsm = traits.String( - genfile=True, desc='voxel shift map', argstr='--vsm %s') + stdev (default = 2mm)", + ) + vsm = traits.String(genfile=True, desc="voxel shift map", argstr="--vsm %s") exfdw = traits.String( - desc='dewarped example func volume', genfile=True, argstr='--exfdw %s') + desc="dewarped example func volume", genfile=True, argstr="--exfdw %s" + ) epidw = traits.String( - desc='dewarped epi volume', genfile=False, argstr='--epidw %s') - tmpdir = traits.String(genfile=True, desc='tmpdir', argstr='--tmpdir %s') + desc="dewarped epi volume", genfile=False, argstr="--epidw %s" + ) + tmpdir = traits.String(genfile=True, desc="tmpdir", argstr="--tmpdir %s") nocleanup = traits.Bool( - True, usedefault=True, desc='no cleanup', argstr='--nocleanup') - cleanup = traits.Bool(desc='cleanup', argstr='--cleanup') + True, usedefault=True, desc="no cleanup", argstr="--nocleanup" + ) + cleanup = traits.Bool(desc="cleanup", argstr="--cleanup") class EPIDeWarpOutputSpec(TraitedSpec): @@ -1115,14 +1198,19 @@ class EPIDeWarp(FSLCommand): """ - _cmd = 'epidewarp.fsl' + + _cmd = "epidewarp.fsl" input_spec = EPIDeWarpInputSpec output_spec = EPIDeWarpOutputSpec def __init__(self, **inputs): - warnings.warn(("Deprecated: Please use " - "nipype.workflows.dmri.preprocess.epi.sdc_fmb instead"), - DeprecationWarning) + warnings.warn( + ( + "Deprecated: Please use " + "nipype.workflows.dmri.preprocess.epi.sdc_fmb instead" + ), + DeprecationWarning, + ) return super(EPIDeWarp, self).__init__(**inputs) def _run_interface(self, runtime): @@ -1132,70 +1220,72 @@ def _run_interface(self, runtime): return runtime def _gen_filename(self, name): - if name == 'exfdw': + if name == "exfdw": if isdefined(self.inputs.exf_file): return self._gen_fname(self.inputs.exf_file, suffix="_exfdw") else: return self._gen_fname("exfdw") - if name == 'epidw': + if name == "epidw": if isdefined(self.inputs.epi_file): return self._gen_fname(self.inputs.epi_file, suffix="_epidw") - if name == 'vsm': - return self._gen_fname('vsm') - if name == 'tmpdir': - return os.path.join(os.getcwd(), 'temp') + if name == "vsm": + return self._gen_fname("vsm") + if name == "tmpdir": + return os.path.join(os.getcwd(), "temp") return None def _list_outputs(self): outputs = self.output_spec().get() if not isdefined(self.inputs.exfdw): - outputs['exfdw'] = self._gen_filename('exfdw') + outputs["exfdw"] = self._gen_filename("exfdw") else: - outputs['exfdw'] = self.inputs.exfdw + outputs["exfdw"] = self.inputs.exfdw if isdefined(self.inputs.epi_file): if isdefined(self.inputs.epidw): - outputs['unwarped_file'] = self.inputs.epidw + outputs["unwarped_file"] = self.inputs.epidw else: - outputs['unwarped_file'] = self._gen_filename('epidw') + outputs["unwarped_file"] = self._gen_filename("epidw") if not isdefined(self.inputs.vsm): - outputs['vsm_file'] = self._gen_filename('vsm') + outputs["vsm_file"] = self._gen_filename("vsm") else: - outputs['vsm_file'] = self._gen_fname(self.inputs.vsm) + outputs["vsm_file"] = self._gen_fname(self.inputs.vsm) if not isdefined(self.inputs.tmpdir): - outputs['exf_mask'] = self._gen_fname( - cwd=self._gen_filename('tmpdir'), basename='maskexf') + outputs["exf_mask"] = self._gen_fname( + cwd=self._gen_filename("tmpdir"), basename="maskexf" + ) else: - outputs['exf_mask'] = self._gen_fname( - cwd=self.inputs.tmpdir, basename='maskexf') + outputs["exf_mask"] = self._gen_fname( + cwd=self.inputs.tmpdir, basename="maskexf" + ) return outputs class EddyCorrectInputSpec(FSLCommandInputSpec): in_file = File( - exists=True, - desc='4D input file', - argstr='%s', - position=0, - mandatory=True) + exists=True, desc="4D input file", argstr="%s", position=0, mandatory=True + ) out_file = File( - desc='4D output file', - argstr='%s', + desc="4D output file", + argstr="%s", position=1, - name_source=['in_file'], - name_template='%s_edc', - output_name='eddy_corrected') + name_source=["in_file"], + name_template="%s_edc", + output_name="eddy_corrected", + ) ref_num = traits.Int( 0, - argstr='%d', + argstr="%d", position=2, - desc='reference number', + desc="reference number", mandatory=True, - usedefault=True) + usedefault=True, + ) class EddyCorrectOutputSpec(TraitedSpec): eddy_corrected = File( - exists=True, desc='path/name of 4D eddy corrected output file') + exists=True, desc="path/name of 4D eddy corrected output file" + ) class EddyCorrect(FSLCommand): @@ -1214,13 +1304,16 @@ class EddyCorrect(FSLCommand): 'eddy_correct diffusion.nii diffusion_edc.nii 0' """ - _cmd = 'eddy_correct' + + _cmd = "eddy_correct" input_spec = EddyCorrectInputSpec output_spec = EddyCorrectOutputSpec def __init__(self, **inputs): - warnings.warn(("Deprecated: Please use nipype.interfaces.fsl.epi.Eddy " - "instead"), DeprecationWarning) + warnings.warn( + ("Deprecated: Please use nipype.interfaces.fsl.epi.Eddy " "instead"), + DeprecationWarning, + ) return super(EddyCorrect, self).__init__(**inputs) def _run_interface(self, runtime): @@ -1232,111 +1325,111 @@ def _run_interface(self, runtime): class EddyQuadInputSpec(FSLCommandInputSpec): base_name = traits.Str( - 'eddy_corrected', + "eddy_corrected", usedefault=True, - argstr='%s', - desc=("Basename (including path) for EDDY output files, i.e., " - "corrected images and QC files"), + argstr="%s", + desc=( + "Basename (including path) for EDDY output files, i.e., " + "corrected images and QC files" + ), position=0, ) idx_file = File( exists=True, mandatory=True, argstr="--eddyIdx %s", - desc=("File containing indices for all volumes into acquisition " - "parameters") + desc=("File containing indices for all volumes into acquisition " "parameters"), ) param_file = File( exists=True, mandatory=True, argstr="--eddyParams %s", - desc="File containing acquisition parameters" + desc="File containing acquisition parameters", ) mask_file = File( - exists=True, - mandatory=True, - argstr="--mask %s", - desc="Binary mask file" + exists=True, mandatory=True, argstr="--mask %s", desc="Binary mask file" ) bval_file = File( - exists=True, - mandatory=True, - argstr="--bvals %s", - desc="b-values file" + exists=True, mandatory=True, argstr="--bvals %s", desc="b-values file" ) bvec_file = File( exists=True, argstr="--bvecs %s", - desc=("b-vectors file - only used when .eddy_residuals " - "file is present") + desc=( + "b-vectors file - only used when .eddy_residuals " + "file is present" + ), ) output_dir = traits.Str( - name_template='%s.qc', - name_source=['base_name'], - argstr='--output-dir %s', + name_template="%s.qc", + name_source=["base_name"], + argstr="--output-dir %s", desc="Output directory - default = '.qc'", ) field = File( - exists=True, - argstr='--field %s', - desc="TOPUP estimated field (in Hz)", + exists=True, argstr="--field %s", desc="TOPUP estimated field (in Hz)", ) slice_spec = File( exists=True, - argstr='--slspec %s', + argstr="--slspec %s", desc="Text file specifying slice/group acquisition", ) - verbose = traits.Bool( - argstr='--verbose', - desc="Display debug messages", - ) + verbose = traits.Bool(argstr="--verbose", desc="Display debug messages",) class EddyQuadOutputSpec(TraitedSpec): qc_json = File( exists=True, - desc=("Single subject database containing quality metrics and data " - "info.") - ) - qc_pdf = File( - exists=True, - desc="Single subject QC report." + desc=("Single subject database containing quality metrics and data " "info."), ) + qc_pdf = File(exists=True, desc="Single subject QC report.") avg_b_png = traits.List( File(exists=True), - desc=("Image showing mid-sagittal, -coronal and -axial slices of " - "each averaged b-shell volume.") + desc=( + "Image showing mid-sagittal, -coronal and -axial slices of " + "each averaged b-shell volume." + ), ) avg_b0_pe_png = traits.List( File(exists=True), - desc=("Image showing mid-sagittal, -coronal and -axial slices of " - "each averaged pe-direction b0 volume. Generated when using " - "the -f option.") + desc=( + "Image showing mid-sagittal, -coronal and -axial slices of " + "each averaged pe-direction b0 volume. Generated when using " + "the -f option." + ), ) cnr_png = traits.List( File(exists=True), - desc=("Image showing mid-sagittal, -coronal and -axial slices of " - "each b-shell CNR volume. Generated when CNR maps are " - "available.") + desc=( + "Image showing mid-sagittal, -coronal and -axial slices of " + "each b-shell CNR volume. Generated when CNR maps are " + "available." + ), ) vdm_png = File( exists=True, - desc=("Image showing mid-sagittal, -coronal and -axial slices of " - "the voxel displacement map. Generated when using the -f " - "option.") + desc=( + "Image showing mid-sagittal, -coronal and -axial slices of " + "the voxel displacement map. Generated when using the -f " + "option." + ), ) residuals = File( exists=True, - desc=("Text file containing the volume-wise mask-averaged squared " - "residuals. Generated when residual maps are available.") + desc=( + "Text file containing the volume-wise mask-averaged squared " + "residuals. Generated when residual maps are available." + ), ) clean_volumes = File( exists=True, - desc=("Text file containing a list of clean volumes, based on " - "the eddy squared residuals. To generate a version of the " - "pre-processed dataset without outlier volumes, use: " - "`fslselectvols -i -o " - "eddy_corrected_data_clean --vols=vols_no_outliers.txt`") + desc=( + "Text file containing a list of clean volumes, based on " + "the eddy squared residuals. To generate a version of the " + "pre-processed dataset without outlier volumes, use: " + "`fslselectvols -i -o " + "eddy_corrected_data_clean --vols=vols_no_outliers.txt`" + ), ) @@ -1368,52 +1461,52 @@ class EddyQuad(FSLCommand): >>> res = quad.run() # doctest: +SKIP """ - _cmd = 'eddy_quad' + + _cmd = "eddy_quad" input_spec = EddyQuadInputSpec output_spec = EddyQuadOutputSpec def _list_outputs(self): from glob import glob + outputs = self.output_spec().get() # If the output directory isn't defined, the interface seems to use # the default but not set its value in `self.inputs.output_dir` if not isdefined(self.inputs.output_dir): - out_dir = os.path.abspath(os.path.basename(self.inputs.base_name) + '.qc') + out_dir = os.path.abspath(os.path.basename(self.inputs.base_name) + ".qc") else: out_dir = os.path.abspath(self.inputs.output_dir) - outputs['qc_json'] = os.path.join(out_dir, 'qc.json') - outputs['qc_pdf'] = os.path.join(out_dir, 'qc.pdf') + outputs["qc_json"] = os.path.join(out_dir, "qc.json") + outputs["qc_pdf"] = os.path.join(out_dir, "qc.pdf") # Grab all b* files here. This will also grab the b0_pe* files # as well, but only if the field input was provided. So we'll remove # them later in the next conditional. - outputs['avg_b_png'] = sorted(glob( - os.path.join(out_dir, 'avg_b*.png') - )) + outputs["avg_b_png"] = sorted(glob(os.path.join(out_dir, "avg_b*.png"))) if isdefined(self.inputs.field): - outputs['avg_b0_pe_png'] = sorted(glob( - os.path.join(out_dir, 'avg_b0_pe*.png') - )) + outputs["avg_b0_pe_png"] = sorted( + glob(os.path.join(out_dir, "avg_b0_pe*.png")) + ) # The previous glob for `avg_b_png` also grabbed the # `avg_b0_pe_png` files so we have to remove them # from `avg_b_png`. - for fname in outputs['avg_b0_pe_png']: - outputs['avg_b_png'].remove(fname) + for fname in outputs["avg_b0_pe_png"]: + outputs["avg_b_png"].remove(fname) - outputs['vdm_png'] = os.path.join(out_dir, 'vdm.png') + outputs["vdm_png"] = os.path.join(out_dir, "vdm.png") - outputs['cnr_png'] = sorted(glob(os.path.join(out_dir, 'cnr*.png'))) + outputs["cnr_png"] = sorted(glob(os.path.join(out_dir, "cnr*.png"))) - residuals = os.path.join(out_dir, 'eddy_msr.txt') + residuals = os.path.join(out_dir, "eddy_msr.txt") if os.path.isfile(residuals): - outputs['residuals'] = residuals + outputs["residuals"] = residuals - clean_volumes = os.path.join(out_dir, 'vols_no_outliers.txt') + clean_volumes = os.path.join(out_dir, "vols_no_outliers.txt") if os.path.isfile(clean_volumes): - outputs['clean_volumes'] = clean_volumes + outputs["clean_volumes"] = clean_volumes return outputs diff --git a/nipype/interfaces/fsl/fix.py b/nipype/interfaces/fsl/fix.py index ab30c5da90..769513f8c3 100644 --- a/nipype/interfaces/fsl/fix.py +++ b/nipype/interfaces/fsl/fix.py @@ -55,9 +55,19 @@ """ -from ..base import (TraitedSpec, CommandLineInputSpec, CommandLine, - InputMultiPath, OutputMultiPath, BaseInterface, - BaseInterfaceInputSpec, traits, Directory, File, isdefined) +from ..base import ( + TraitedSpec, + CommandLineInputSpec, + CommandLine, + InputMultiPath, + OutputMultiPath, + BaseInterface, + BaseInterfaceInputSpec, + traits, + Directory, + File, + isdefined, +) import os @@ -65,29 +75,32 @@ class TrainingSetCreatorInputSpec(BaseInterfaceInputSpec): mel_icas_in = InputMultiPath( Directory(exists=True), copyfile=False, - desc='Melodic output directories', - argstr='%s', - position=-1) + desc="Melodic output directories", + argstr="%s", + position=-1, + ) class TrainingSetCreatorOutputSpec(TraitedSpec): mel_icas_out = OutputMultiPath( Directory(exists=True), copyfile=False, - desc='Hand labels for noise vs signal', - argstr='%s', - position=-1) + desc="Hand labels for noise vs signal", + argstr="%s", + position=-1, + ) class TrainingSetCreator(BaseInterface): - '''Goes through set of provided melodic output directories, to find all + """Goes through set of provided melodic output directories, to find all the ones that have a hand_labels_noise.txt file in them. This is outsourced as a separate class, so that the pipeline is rerun everytime a handlabeled file has been changed, or a new one created. - ''' + """ + input_spec = TrainingSetCreatorInputSpec output_spec = TrainingSetCreatorOutputSpec _always_run = True @@ -95,23 +108,24 @@ class TrainingSetCreator(BaseInterface): def _run_interface(self, runtime): mel_icas = [] for item in self.inputs.mel_icas_in: - if os.path.exists(os.path.join(item, 'hand_labels_noise.txt')): + if os.path.exists(os.path.join(item, "hand_labels_noise.txt")): mel_icas.append(item) if len(mel_icas) == 0: raise Exception( - '%s did not find any hand_labels_noise.txt files in the following directories: %s' - % (self.__class__.__name__, mel_icas)) + "%s did not find any hand_labels_noise.txt files in the following directories: %s" + % (self.__class__.__name__, mel_icas) + ) return runtime def _list_outputs(self): mel_icas = [] for item in self.inputs.mel_icas_in: - if os.path.exists(os.path.join(item, 'hand_labels_noise.txt')): + if os.path.exists(os.path.join(item, "hand_labels_noise.txt")): mel_icas.append(item) outputs = self._outputs().get() - outputs['mel_icas_out'] = mel_icas + outputs["mel_icas_out"] = mel_icas return outputs @@ -119,31 +133,34 @@ class FeatureExtractorInputSpec(CommandLineInputSpec): mel_ica = Directory( exists=True, copyfile=False, - desc='Melodic output directory or directories', - argstr='%s', - position=-1) + desc="Melodic output directory or directories", + argstr="%s", + position=-1, + ) class FeatureExtractorOutputSpec(TraitedSpec): mel_ica = Directory( exists=True, copyfile=False, - desc='Melodic output directory or directories', - argstr='%s', - position=-1) + desc="Melodic output directory or directories", + argstr="%s", + position=-1, + ) class FeatureExtractor(CommandLine): - ''' + """ Extract features (for later training and/or classifying) - ''' + """ + input_spec = FeatureExtractorInputSpec output_spec = FeatureExtractorOutputSpec - cmd = 'fix -f' + cmd = "fix -f" def _list_outputs(self): outputs = self.output_spec().get() - outputs['mel_ica'] = self.inputs.mel_ica + outputs["mel_ica"] = self.inputs.mel_ica return outputs @@ -151,42 +168,43 @@ class TrainingInputSpec(CommandLineInputSpec): mel_icas = InputMultiPath( Directory(exists=True), copyfile=False, - desc='Melodic output directories', - argstr='%s', - position=-1) + desc="Melodic output directories", + argstr="%s", + position=-1, + ) trained_wts_filestem = traits.Str( - desc= - 'trained-weights filestem, used for trained_wts_file and output directories', - argstr='%s', - position=1) + desc="trained-weights filestem, used for trained_wts_file and output directories", + argstr="%s", + position=1, + ) loo = traits.Bool( - argstr='-l', - desc='full leave-one-out test with classifier training', - position=2) + argstr="-l", desc="full leave-one-out test with classifier training", position=2 + ) class TrainingOutputSpec(TraitedSpec): - trained_wts_file = File(exists=True, desc='Trained-weights file') + trained_wts_file = File(exists=True, desc="Trained-weights file") class Training(CommandLine): - ''' + """ Train the classifier based on your own FEAT/MELODIC output directory. - ''' + """ + input_spec = TrainingInputSpec output_spec = TrainingOutputSpec - cmd = 'fix -t' + cmd = "fix -t" def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.trained_wts_filestem): - outputs['trained_wts_file'] = os.path.abspath( - self.inputs.trained_wts_filestem + '.RData') + outputs["trained_wts_file"] = os.path.abspath( + self.inputs.trained_wts_filestem + ".RData" + ) else: - outputs['trained_wts_file'] = os.path.abspath( - 'trained_wts_file.RData') + outputs["trained_wts_file"] = os.path.abspath("trained_wts_file.RData") return outputs @@ -194,47 +212,50 @@ class AccuracyTesterInputSpec(CommandLineInputSpec): mel_icas = InputMultiPath( Directory(exists=True), copyfile=False, - desc='Melodic output directories', - argstr='%s', + desc="Melodic output directories", + argstr="%s", position=3, - mandatory=True) + mandatory=True, + ) trained_wts_file = File( - desc='trained-weights file', argstr='%s', position=1, mandatory=True) + desc="trained-weights file", argstr="%s", position=1, mandatory=True + ) output_directory = Directory( - desc= - 'Path to folder in which to store the results of the accuracy test.', - argstr='%s', + desc="Path to folder in which to store the results of the accuracy test.", + argstr="%s", position=2, - mandatory=True) + mandatory=True, + ) class AccuracyTesterOutputSpec(TraitedSpec): output_directory = Directory( - desc= - 'Path to folder in which to store the results of the accuracy test.', - argstr='%s', - position=1) + desc="Path to folder in which to store the results of the accuracy test.", + argstr="%s", + position=1, + ) class AccuracyTester(CommandLine): - ''' + """ Test the accuracy of an existing training dataset on a set of hand-labelled subjects. Note: This may or may not be working. Couldn't presently not confirm because fix fails on this (even outside of nipype) without leaving an error msg. - ''' + """ + input_spec = AccuracyTesterInputSpec output_spec = AccuracyTesterOutputSpec - cmd = 'fix -C' + cmd = "fix -C" def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.output_directory): - outputs['output_directory'] = Directory( - exists=False, value=self.inputs.output_directory) + outputs["output_directory"] = Directory( + exists=False, value=self.inputs.output_directory + ) else: - outputs['output_directory'] = Directory( - exists=False, value='accuracy_test') + outputs["output_directory"] = Directory(exists=False, value="accuracy_test") return outputs @@ -242,58 +263,58 @@ class ClassifierInputSpec(CommandLineInputSpec): mel_ica = Directory( exists=True, copyfile=False, - desc='Melodic output directory or directories', - argstr='%s', - position=1) + desc="Melodic output directory or directories", + argstr="%s", + position=1, + ) trained_wts_file = File( exists=True, - desc='trained-weights file', - argstr='%s', + desc="trained-weights file", + argstr="%s", position=2, mandatory=True, - copyfile=False) + copyfile=False, + ) thresh = traits.Int( - argstr='%d', - desc='Threshold for cleanup.', - position=-1, - mandatory=True) + argstr="%d", desc="Threshold for cleanup.", position=-1, mandatory=True + ) artifacts_list_file = File( - desc= - 'Text file listing which ICs are artifacts; can be the output from classification or can be created manually' + desc="Text file listing which ICs are artifacts; can be the output from classification or can be created manually" ) class ClassifierOutputSpec(TraitedSpec): artifacts_list_file = File( - desc= - 'Text file listing which ICs are artifacts; can be the output from classification or can be created manually' + desc="Text file listing which ICs are artifacts; can be the output from classification or can be created manually" ) class Classifier(CommandLine): - ''' + """ Classify ICA components using a specific training dataset ( is in the range 0-100, typically 5-20). - ''' + """ + input_spec = ClassifierInputSpec output_spec = ClassifierOutputSpec - cmd = 'fix -c' + cmd = "fix -c" def _gen_artifacts_list_file(self, mel_ica, thresh): _, trained_wts_file = os.path.split(self.inputs.trained_wts_file) - trained_wts_filestem = trained_wts_file.split('.')[0] - filestem = 'fix4melview_' + trained_wts_filestem + '_thr' + trained_wts_filestem = trained_wts_file.split(".")[0] + filestem = "fix4melview_" + trained_wts_filestem + "_thr" - fname = os.path.join(mel_ica, filestem + str(thresh) + '.txt') + fname = os.path.join(mel_ica, filestem + str(thresh) + ".txt") return fname def _list_outputs(self): outputs = self.output_spec().get() - outputs['artifacts_list_file'] = self._gen_artifacts_list_file( - self.inputs.mel_ica, self.inputs.thresh) + outputs["artifacts_list_file"] = self._gen_artifacts_list_file( + self.inputs.mel_ica, self.inputs.thresh + ) return outputs @@ -301,68 +322,73 @@ def _list_outputs(self): class CleanerInputSpec(CommandLineInputSpec): artifacts_list_file = File( exists=True, - argstr='%s', + argstr="%s", position=1, mandatory=True, - desc= - 'Text file listing which ICs are artifacts; can be the output from classification or can be created manually' + desc="Text file listing which ICs are artifacts; can be the output from classification or can be created manually", ) cleanup_motion = traits.Bool( - argstr='-m', - desc= - 'cleanup motion confounds, looks for design.fsf for highpass filter cut-off', - position=2) + argstr="-m", + desc="cleanup motion confounds, looks for design.fsf for highpass filter cut-off", + position=2, + ) highpass = traits.Float( 100, - argstr='-m -h %f', + argstr="-m -h %f", usedefault=True, - desc='cleanup motion confounds', - position=2) + desc="cleanup motion confounds", + position=2, + ) aggressive = traits.Bool( - argstr='-A', - desc= - 'Apply aggressive (full variance) cleanup, instead of the default less-aggressive (unique variance) cleanup.', - position=3) + argstr="-A", + desc="Apply aggressive (full variance) cleanup, instead of the default less-aggressive (unique variance) cleanup.", + position=3, + ) confound_file = File( - argstr='-x %s', desc='Include additional confound file.', position=4) + argstr="-x %s", desc="Include additional confound file.", position=4 + ) confound_file_1 = File( - argstr='-x %s', desc='Include additional confound file.', position=5) + argstr="-x %s", desc="Include additional confound file.", position=5 + ) confound_file_2 = File( - argstr='-x %s', desc='Include additional confound file.', position=6) + argstr="-x %s", desc="Include additional confound file.", position=6 + ) class CleanerOutputSpec(TraitedSpec): - cleaned_functional_file = File(exists=True, desc='Cleaned session data') + cleaned_functional_file = File(exists=True, desc="Cleaned session data") class Cleaner(CommandLine): - ''' + """ Extract features (for later training and/or classifying) - ''' + """ + input_spec = CleanerInputSpec output_spec = CleanerOutputSpec - cmd = 'fix -a' + cmd = "fix -a" def _get_cleaned_functional_filename(self, artifacts_list_filename): - ''' extract the proper filename from the first line of the artifacts file ''' - artifacts_list_file = open(artifacts_list_filename, 'r') - functional_filename, extension = artifacts_list_file.readline().split( - '.') + """ extract the proper filename from the first line of the artifacts file """ + artifacts_list_file = open(artifacts_list_filename, "r") + functional_filename, extension = artifacts_list_file.readline().split(".") artifacts_list_file_path, artifacts_list_filename = os.path.split( - artifacts_list_filename) + artifacts_list_filename + ) - return (os.path.join(artifacts_list_file_path, - functional_filename + '_clean.nii.gz')) + return os.path.join( + artifacts_list_file_path, functional_filename + "_clean.nii.gz" + ) def _list_outputs(self): outputs = self.output_spec().get() - outputs[ - 'cleaned_functional_file'] = self._get_cleaned_functional_filename( - self.inputs.artifacts_list_file) + outputs["cleaned_functional_file"] = self._get_cleaned_functional_filename( + self.inputs.artifacts_list_file + ) return outputs diff --git a/nipype/interfaces/fsl/maths.py b/nipype/interfaces/fsl/maths.py index afa9328205..1b64511f9e 100644 --- a/nipype/interfaces/fsl/maths.py +++ b/nipype/interfaces/fsl/maths.py @@ -8,42 +8,35 @@ import os import numpy as np -from ..base import (TraitedSpec, File, traits, InputMultiPath, isdefined) +from ..base import TraitedSpec, File, traits, InputMultiPath, isdefined from .base import FSLCommand, FSLCommandInputSpec class MathsInput(FSLCommandInputSpec): in_file = File( - position=2, - argstr="%s", - exists=True, - mandatory=True, - desc="image to operate on") + position=2, argstr="%s", exists=True, mandatory=True, desc="image to operate on" + ) out_file = File( - genfile=True, - position=-2, - argstr="%s", - desc="image to write", - hash_files=False) + genfile=True, position=-2, argstr="%s", desc="image to write", hash_files=False + ) _dtypes = ["float", "char", "int", "short", "double", "input"] internal_datatype = traits.Enum( *_dtypes, position=1, argstr="-dt %s", - desc=("datatype to use for calculations " - "(default is float)")) + desc=("datatype to use for calculations " "(default is float)") + ) output_datatype = traits.Enum( *_dtypes, position=-1, argstr="-odt %s", - desc=("datatype to use for output (default " - "uses input type)")) + desc=("datatype to use for output (default " "uses input type)") + ) nan2zeros = traits.Bool( - position=3, - argstr='-nan', - desc='change NaNs to zeros before doing anything') + position=3, argstr="-nan", desc="change NaNs to zeros before doing anything" + ) class MathsOutput(TraitedSpec): @@ -63,7 +56,8 @@ def _list_outputs(self): outputs["out_file"] = self.inputs.out_file if not isdefined(self.inputs.out_file): outputs["out_file"] = self._gen_fname( - self.inputs.in_file, suffix=self._suffix) + self.inputs.in_file, suffix=self._suffix + ) outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs @@ -77,17 +71,15 @@ class ChangeDataTypeInput(MathsInput): _dtypes = ["float", "char", "int", "short", "double", "input"] output_datatype = traits.Enum( - *_dtypes, - position=-1, - argstr="-odt %s", - mandatory=True, - desc="output data type") + *_dtypes, position=-1, argstr="-odt %s", mandatory=True, desc="output data type" + ) class ChangeDataType(MathsCommand): """Use fslmaths to change the datatype of an image. """ + input_spec = ChangeDataTypeInput _suffix = "_chdt" @@ -95,23 +87,28 @@ class ChangeDataType(MathsCommand): class ThresholdInputSpec(MathsInput): thresh = traits.Float( - mandatory=True, position=4, argstr="%s", desc="threshold value") + mandatory=True, position=4, argstr="%s", desc="threshold value" + ) direction = traits.Enum( "below", "above", usedefault=True, - desc="zero-out either below or above thresh value") + desc="zero-out either below or above thresh value", + ) use_robust_range = traits.Bool( - desc="interpret thresh as percentage (0-100) of robust range") + desc="interpret thresh as percentage (0-100) of robust range" + ) use_nonzero_voxels = traits.Bool( desc="use nonzero voxels to calculate robust range", - requires=["use_robust_range"]) + requires=["use_robust_range"], + ) class Threshold(MathsCommand): """Use fslmaths to apply a threshold to an image in a variety of ways. """ + input_spec = ThresholdInputSpec _suffix = "_thresh" @@ -123,8 +120,7 @@ def _format_arg(self, name, spec, value): arg += "u" arg += "thr" if isdefined(_si.use_robust_range) and _si.use_robust_range: - if (isdefined(_si.use_nonzero_voxels) - and _si.use_nonzero_voxels): + if isdefined(_si.use_nonzero_voxels) and _si.use_nonzero_voxels: arg += "P" else: arg += "p" @@ -143,13 +139,15 @@ class StdImageInput(MathsInput): usedefault=True, argstr="-%sstd", position=4, - desc="dimension to standard deviate across") + desc="dimension to standard deviate across", + ) class StdImage(MathsCommand): """Use fslmaths to generate a standard deviation in an image across a given dimension. """ + input_spec = StdImageInput _suffix = "_std" @@ -164,13 +162,15 @@ class MeanImageInput(MathsInput): usedefault=True, argstr="-%smean", position=4, - desc="dimension to mean across") + desc="dimension to mean across", + ) class MeanImage(MathsCommand): """Use fslmaths to generate a mean image across a given dimension. """ + input_spec = MeanImageInput _suffix = "_mean" @@ -185,7 +185,8 @@ class MaxImageInput(MathsInput): usedefault=True, argstr="-%smax", position=4, - desc="dimension to max across") + desc="dimension to max across", + ) class MaxImage(MathsCommand): @@ -201,6 +202,7 @@ class MaxImage(MathsCommand): 'fslmaths functional.nii -Tmax functional_max.nii' """ + input_spec = MaxImageInput _suffix = "_max" @@ -215,14 +217,15 @@ class PercentileImageInput(MathsInput): usedefault=True, argstr="-%sperc", position=4, - desc="dimension to percentile across") + desc="dimension to percentile across", + ) perc = traits.Range( low=0, high=100, argstr="%f", position=5, - desc=("nth percentile (0-100) of FULL RANGE " - "across dimension")) + desc=("nth percentile (0-100) of FULL RANGE " "across dimension"), + ) class PercentileImage(MathsCommand): @@ -239,6 +242,7 @@ class PercentileImage(MathsCommand): 'fslmaths functional.nii -Tperc 90 functional_perc.nii' """ + input_spec = PercentileImageInput _suffix = "_perc" @@ -253,7 +257,8 @@ class MaxnImageInput(MathsInput): usedefault=True, argstr="-%smaxn", position=4, - desc="dimension to index max across") + desc="dimension to index max across", + ) class MaxnImage(MathsCommand): @@ -261,6 +266,7 @@ class MaxnImage(MathsCommand): a given dimension. """ + input_spec = MaxnImageInput _suffix = "_maxn" @@ -275,13 +281,15 @@ class MinImageInput(MathsInput): usedefault=True, argstr="-%smin", position=4, - desc="dimension to min across") + desc="dimension to min across", + ) class MinImage(MathsCommand): """Use fslmaths to generate a minimum image across a given dimension. """ + input_spec = MinImageInput _suffix = "_min" @@ -296,13 +304,15 @@ class MedianImageInput(MathsInput): usedefault=True, argstr="-%smedian", position=4, - desc="dimension to median across") + desc="dimension to median across", + ) class MedianImage(MathsCommand): """Use fslmaths to generate a median image across a given dimension. """ + input_spec = MedianImageInput _suffix = "_median" @@ -317,8 +327,8 @@ class AR1ImageInput(MathsInput): usedefault=True, argstr="-%sar1", position=4, - desc=("dimension to find AR(1) coefficient" - "across")) + desc=("dimension to find AR(1) coefficient" "across"), + ) class AR1Image(MathsCommand): @@ -326,6 +336,7 @@ class AR1Image(MathsCommand): given dimension. (Should use -odt float and probably demean first) """ + input_spec = AR1ImageInput _suffix = "_ar1" @@ -337,19 +348,22 @@ class IsotropicSmoothInput(MathsInput): xor=["sigma"], position=4, argstr="-s %.5f", - desc="fwhm of smoothing kernel [mm]") + desc="fwhm of smoothing kernel [mm]", + ) sigma = traits.Float( mandatory=True, xor=["fwhm"], position=4, argstr="-s %.5f", - desc="sigma of smoothing kernel [mm]") + desc="sigma of smoothing kernel [mm]", + ) class IsotropicSmooth(MathsCommand): """Use fslmaths to spatially smooth an image with a gaussian kernel. """ + input_spec = IsotropicSmoothInput _suffix = "_smooth" @@ -367,13 +381,15 @@ class ApplyMaskInput(MathsInput): mandatory=True, argstr="-mas %s", position=4, - desc="binary image defining mask space") + desc="binary image defining mask space", + ) class ApplyMask(MathsCommand): """Use fslmaths to apply a binary mask to another image. """ + input_spec = ApplyMaskInput _suffix = "_masked" @@ -390,19 +406,23 @@ class KernelInput(MathsInput): "file", argstr="-kernel %s", position=4, - desc="kernel shape to use") + desc="kernel shape to use", + ) kernel_size = traits.Float( argstr="%.4f", position=5, xor=["kernel_file"], - desc=("kernel size - voxels for box/boxv, mm " - "for sphere, mm sigma for gauss")) + desc=( + "kernel size - voxels for box/boxv, mm " "for sphere, mm sigma for gauss" + ), + ) kernel_file = File( exists=True, argstr="%s", position=5, xor=["kernel_size"], - desc="use external file for kernel") + desc="use external file for kernel", + ) class DilateInput(KernelInput): @@ -414,13 +434,15 @@ class DilateInput(KernelInput): argstr="-dil%s", position=6, mandatory=True, - desc="filtering operation to perfoem in dilation") + desc="filtering operation to perfoem in dilation", + ) class DilateImage(MathsCommand): """Use fslmaths to perform a spatial dilation of an image. """ + input_spec = DilateInput _suffix = "_dil" @@ -437,14 +459,15 @@ class ErodeInput(KernelInput): position=6, usedefault=True, default_value=False, - desc=("if true, minimum filter rather than " - "erosion by zeroing-out")) + desc=("if true, minimum filter rather than " "erosion by zeroing-out"), + ) class ErodeImage(MathsCommand): """Use fslmaths to perform a spatial erosion of an image. """ + input_spec = ErodeInput _suffix = "_ero" @@ -465,13 +488,15 @@ class SpatialFilterInput(KernelInput): argstr="-f%s", position=6, mandatory=True, - desc="operation to filter with") + desc="operation to filter with", + ) class SpatialFilter(MathsCommand): """Use fslmaths to spatially filter an image. """ + input_spec = SpatialFilterInput _suffix = "_filt" @@ -505,13 +530,15 @@ class UnaryMathsInput(MathsInput): argstr="-%s", position=4, mandatory=True, - desc="operation to perform") + desc="operation to perform", + ) class UnaryMaths(MathsCommand): """Use fslmaths to perorm a variety of mathematical operations on an image. """ + input_spec = UnaryMathsInput def _list_outputs(self): @@ -532,20 +559,23 @@ class BinaryMathsInput(MathsInput): mandatory=True, argstr="-%s", position=4, - desc="operation to perform") + desc="operation to perform", + ) operand_file = File( exists=True, argstr="%s", mandatory=True, position=5, xor=["operand_value"], - desc="second image to perform operation with") + desc="second image to perform operation with", + ) operand_value = traits.Float( argstr="%.8f", mandatory=True, position=5, xor=["operand_file"], - desc="value to perform operation with") + desc="value to perform operation with", + ) class BinaryMaths(MathsCommand): @@ -553,6 +583,7 @@ class BinaryMaths(MathsCommand): a numeric value. """ + input_spec = BinaryMathsInput @@ -562,13 +593,13 @@ class MultiImageMathsInput(MathsInput): position=4, argstr="%s", mandatory=True, - desc=("python formatted string of operations " - "to perform")) + desc=("python formatted string of operations " "to perform"), + ) operand_files = InputMultiPath( File(exists=True), mandatory=True, - desc=("list of file names to plug into op " - "string")) + desc=("list of file names to plug into op " "string"), + ) class MultiImageMaths(MathsCommand): @@ -586,6 +617,7 @@ class MultiImageMaths(MathsCommand): 'fslmaths functional.nii -add functional2.nii -mul -1 -div functional3.nii functional4.nii' """ + input_spec = MultiImageMathsInput def _format_arg(self, name, spec, value): @@ -601,13 +633,15 @@ class TemporalFilterInput(MathsInput): argstr="%.6f", position=5, usedefault=True, - desc="lowpass filter sigma (in volumes)") + desc="lowpass filter sigma (in volumes)", + ) highpass_sigma = traits.Float( -1, argstr="-bptf %.6f", position=4, usedefault=True, - desc="highpass filter sigma (in volumes)") + desc="highpass filter sigma (in volumes)", + ) class TemporalFilter(MathsCommand): @@ -615,5 +649,6 @@ class TemporalFilter(MathsCommand): timeseries. """ + input_spec = TemporalFilterInput _suffix = "_filt" diff --git a/nipype/interfaces/fsl/model.py b/nipype/interfaces/fsl/model.py index 5e176ff414..b4e04c690e 100644 --- a/nipype/interfaces/fsl/model.py +++ b/nipype/interfaces/fsl/model.py @@ -17,76 +17,113 @@ from ...utils.filemanip import simplify_list, ensure_list from ...utils.misc import human_order_sorted from ...external.due import BibTeX -from ..base import (File, traits, isdefined, TraitedSpec, BaseInterface, - Directory, InputMultiPath, OutputMultiPath, - BaseInterfaceInputSpec) +from ..base import ( + File, + traits, + isdefined, + TraitedSpec, + BaseInterface, + Directory, + InputMultiPath, + OutputMultiPath, + BaseInterfaceInputSpec, +) from .base import FSLCommand, FSLCommandInputSpec, Info class Level1DesignInputSpec(BaseInterfaceInputSpec): interscan_interval = traits.Float( - mandatory=True, desc='Interscan interval (in secs)') + mandatory=True, desc="Interscan interval (in secs)" + ) session_info = traits.Any( mandatory=True, - desc=('Session specific information generated ' - 'by ``modelgen.SpecifyModel``')) + desc=("Session specific information generated " "by ``modelgen.SpecifyModel``"), + ) bases = traits.Either( traits.Dict( - traits.Enum('dgamma'), - traits.Dict(traits.Enum('derivs'), traits.Bool)), + traits.Enum("dgamma"), traits.Dict(traits.Enum("derivs"), traits.Bool) + ), traits.Dict( - traits.Enum('gamma'), - traits.Dict(traits.Enum('derivs', 'gammasigma', 'gammadelay'))), + traits.Enum("gamma"), + traits.Dict(traits.Enum("derivs", "gammasigma", "gammadelay")), + ), traits.Dict( - traits.Enum('custom'), - traits.Dict(traits.Enum('bfcustompath'), traits.Str)), - traits.Dict(traits.Enum('none'), traits.Dict()), - traits.Dict(traits.Enum('none'), traits.Enum(None)), + traits.Enum("custom"), traits.Dict(traits.Enum("bfcustompath"), traits.Str) + ), + traits.Dict(traits.Enum("none"), traits.Dict()), + traits.Dict(traits.Enum("none"), traits.Enum(None)), mandatory=True, - desc=("name of basis function and options e.g., " - "{'dgamma': {'derivs': True}}"), + desc=( + "name of basis function and options e.g., " "{'dgamma': {'derivs': True}}" + ), ) orthogonalization = traits.Dict( traits.Int, traits.Dict(traits.Int, traits.Either(traits.Bool, traits.Int)), - desc=("which regressors to make orthogonal e.g., " - "{1: {0:0,1:0,2:0}, 2: {0:1,1:1,2:0}} to make the second " - "regressor in a 2-regressor model orthogonal to the first."), - usedefault=True) + desc=( + "which regressors to make orthogonal e.g., " + "{1: {0:0,1:0,2:0}, 2: {0:1,1:1,2:0}} to make the second " + "regressor in a 2-regressor model orthogonal to the first." + ), + usedefault=True, + ) model_serial_correlations = traits.Bool( desc="Option to model serial correlations using an \ autoregressive estimator (order 1). Setting this option is only \ useful in the context of the fsf file. If you set this to False, you need to \ repeat this option for FILMGLS by setting autocorr_noestimate to True", - mandatory=True) + mandatory=True, + ) contrasts = traits.List( traits.Either( - traits.Tuple(traits.Str, traits.Enum('T'), traits.List(traits.Str), - traits.List(traits.Float)), - traits.Tuple(traits.Str, traits.Enum('T'), traits.List(traits.Str), - traits.List(traits.Float), traits.List(traits.Float)), - traits.Tuple(traits.Str, traits.Enum('F'), - traits.List( - traits.Either( - traits.Tuple(traits.Str, traits.Enum('T'), - traits.List(traits.Str), - traits.List(traits.Float)), - traits.Tuple(traits.Str, traits.Enum('T'), - traits.List(traits.Str), - traits.List(traits.Float), - traits.List(traits.Float)))))), + traits.Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + ), + traits.Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + traits.List(traits.Float), + ), + traits.Tuple( + traits.Str, + traits.Enum("F"), + traits.List( + traits.Either( + traits.Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + ), + traits.Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + traits.List(traits.Float), + ), + ) + ), + ), + ), desc="List of contrasts with each contrast being a list of the form - \ [('name', 'stat', [condition list], [weight list], [session list])]. if \ session list is None or not provided, all sessions are used. For F \ contrasts, the condition list should contain previously defined \ -T-contrasts.") +T-contrasts.", + ) class Level1DesignOutputSpec(TraitedSpec): - fsf_files = OutputMultiPath( - File(exists=True), desc='FSL feat specification files') + fsf_files = OutputMultiPath(File(exists=True), desc="FSL feat specification files") ev_files = OutputMultiPath( - traits.List(File(exists=True)), desc='condition information files') + traits.List(File(exists=True)), desc="condition information files" + ) class Level1Design(BaseInterface): @@ -107,17 +144,25 @@ class Level1Design(BaseInterface): output_spec = Level1DesignOutputSpec def _create_ev_file(self, evfname, evinfo): - f = open(evfname, 'wt') + f = open(evfname, "wt") for i in evinfo: if len(i) == 3: - f.write('%f %f %f\n' % (i[0], i[1], i[2])) + f.write("%f %f %f\n" % (i[0], i[1], i[2])) else: - f.write('%f\n' % i[0]) + f.write("%f\n" % i[0]) f.close() - def _create_ev_files(self, cwd, runinfo, runidx, ev_parameters, - orthogonalization, contrasts, do_tempfilter, - basis_key): + def _create_ev_files( + self, + cwd, + runinfo, + runidx, + ev_parameters, + orthogonalization, + contrasts, + do_tempfilter, + basis_key, + ): """Creates EV files from condition and regressor information. Parameters: @@ -142,72 +187,74 @@ def _create_ev_files(self, cwd, runinfo, runidx, ev_parameters, basis_key = "hrf" elif basis_key == "gamma": try: - _ = ev_parameters['gammasigma'] + _ = ev_parameters["gammasigma"] except KeyError: - ev_parameters['gammasigma'] = 3 + ev_parameters["gammasigma"] = 3 try: - _ = ev_parameters['gammadelay'] + _ = ev_parameters["gammadelay"] except KeyError: - ev_parameters['gammadelay'] = 6 - ev_template = load_template('feat_ev_' + basis_key + '.tcl') - ev_none = load_template('feat_ev_none.tcl') - ev_ortho = load_template('feat_ev_ortho.tcl') - ev_txt = '' + ev_parameters["gammadelay"] = 6 + ev_template = load_template("feat_ev_" + basis_key + ".tcl") + ev_none = load_template("feat_ev_none.tcl") + ev_ortho = load_template("feat_ev_ortho.tcl") + ev_txt = "" # generate sections for conditions and other nuisance # regressors num_evs = [0, 0] - for field in ['cond', 'regress']: + for field in ["cond", "regress"]: for i, cond in enumerate(runinfo[field]): - name = cond['name'] + name = cond["name"] evname.append(name) - evfname = os.path.join(cwd, 'ev_%s_%d_%d.txt' % (name, runidx, - len(evname))) + evfname = os.path.join( + cwd, "ev_%s_%d_%d.txt" % (name, runidx, len(evname)) + ) evinfo = [] num_evs[0] += 1 num_evs[1] += 1 - if field == 'cond': - for j, onset in enumerate(cond['onset']): + if field == "cond": + for j, onset in enumerate(cond["onset"]): try: - amplitudes = cond['amplitudes'] + amplitudes = cond["amplitudes"] if len(amplitudes) > 1: amp = amplitudes[j] else: amp = amplitudes[0] except KeyError: amp = 1 - if len(cond['duration']) > 1: - evinfo.insert(j, [onset, cond['duration'][j], amp]) + if len(cond["duration"]) > 1: + evinfo.insert(j, [onset, cond["duration"][j], amp]) else: - evinfo.insert(j, [onset, cond['duration'][0], amp]) - ev_parameters['cond_file'] = evfname - ev_parameters['ev_num'] = num_evs[0] - ev_parameters['ev_name'] = name - ev_parameters['tempfilt_yn'] = do_tempfilter - if 'basisorth' not in ev_parameters: - ev_parameters['basisorth'] = 1 - if 'basisfnum' not in ev_parameters: - ev_parameters['basisfnum'] = 1 + evinfo.insert(j, [onset, cond["duration"][0], amp]) + ev_parameters["cond_file"] = evfname + ev_parameters["ev_num"] = num_evs[0] + ev_parameters["ev_name"] = name + ev_parameters["tempfilt_yn"] = do_tempfilter + if "basisorth" not in ev_parameters: + ev_parameters["basisorth"] = 1 + if "basisfnum" not in ev_parameters: + ev_parameters["basisfnum"] = 1 try: - ev_parameters['fsldir'] = os.environ['FSLDIR'] + ev_parameters["fsldir"] = os.environ["FSLDIR"] except KeyError: - if basis_key == 'flobs': - raise Exception( - 'FSL environment variables not set') + if basis_key == "flobs": + raise Exception("FSL environment variables not set") else: - ev_parameters['fsldir'] = '/usr/share/fsl' - ev_parameters['temporalderiv'] = int( - bool(ev_parameters.get('derivs', False))) - if ev_parameters['temporalderiv']: - evname.append(name + 'TD') + ev_parameters["fsldir"] = "/usr/share/fsl" + ev_parameters["temporalderiv"] = int( + bool(ev_parameters.get("derivs", False)) + ) + if ev_parameters["temporalderiv"]: + evname.append(name + "TD") num_evs[1] += 1 ev_txt += ev_template.substitute(ev_parameters) - elif field == 'regress': - evinfo = [[j] for j in cond['val']] + elif field == "regress": + evinfo = [[j] for j in cond["val"]] ev_txt += ev_none.substitute( ev_num=num_evs[0], ev_name=name, tempfilt_yn=do_tempfilter, - cond_file=evfname) + cond_file=evfname, + ) ev_txt += "\n" conds[name] = evfname self._create_ev_file(evfname, evinfo) @@ -221,20 +268,17 @@ def _create_ev_files(self, cwd, runinfo, runidx, ev_parameters, orthogonal = 0 if orthogonal == 1 and initial not in ev_txt: ev_txt += initial + "\n" - ev_txt += ev_ortho.substitute(c0=i, c1=j, - orthogonal=orthogonal) + ev_txt += ev_ortho.substitute(c0=i, c1=j, orthogonal=orthogonal) ev_txt += "\n" # add contrast info to fsf file if isdefined(contrasts): - contrast_header = load_template('feat_contrast_header.tcl') - contrast_prolog = load_template('feat_contrast_prolog.tcl') - contrast_element = load_template('feat_contrast_element.tcl') - contrast_ftest_element = load_template( - 'feat_contrast_ftest_element.tcl') - contrastmask_header = load_template('feat_contrastmask_header.tcl') - contrastmask_footer = load_template('feat_contrastmask_footer.tcl') - contrastmask_element = load_template( - 'feat_contrastmask_element.tcl') + contrast_header = load_template("feat_contrast_header.tcl") + contrast_prolog = load_template("feat_contrast_prolog.tcl") + contrast_element = load_template("feat_contrast_element.tcl") + contrast_ftest_element = load_template("feat_contrast_ftest_element.tcl") + contrastmask_header = load_template("feat_contrastmask_header.tcl") + contrastmask_footer = load_template("feat_contrastmask_footer.tcl") + contrastmask_element = load_template("feat_contrastmask_element.tcl") # add t/f contrast info ev_txt += contrast_header.substitute() con_names = [] @@ -244,7 +288,7 @@ def _create_ev_files(self, cwd, runinfo, runidx, ev_parameters, ftest_idx = [] ttest_idx = [] for j, con in enumerate(contrasts): - if con[1] == 'F': + if con[1] == "F": ftest_idx.append(j) for c in con[2]: if c[0] not in list(con_map.keys()): @@ -253,16 +297,17 @@ def _create_ev_files(self, cwd, runinfo, runidx, ev_parameters, else: ttest_idx.append(j) - for ctype in ['real', 'orig']: + for ctype in ["real", "orig"]: for j, con in enumerate(contrasts): - if con[1] == 'F': + if con[1] == "F": continue tidx = ttest_idx.index(j) + 1 ev_txt += contrast_prolog.substitute( - cnum=tidx, ctype=ctype, cname=con[0]) + cnum=tidx, ctype=ctype, cname=con[0] + ) count = 0 for c in range(1, len(evname) + 1): - if evname[c - 1].endswith('TD') and ctype == 'orig': + if evname[c - 1].endswith("TD") and ctype == "orig": continue count = count + 1 if evname[c - 1] in con[2]: @@ -270,19 +315,20 @@ def _create_ev_files(self, cwd, runinfo, runidx, ev_parameters, else: val = 0.0 ev_txt += contrast_element.substitute( - cnum=tidx, element=count, ctype=ctype, val=val) + cnum=tidx, element=count, ctype=ctype, val=val + ) ev_txt += "\n" for fconidx in ftest_idx: fval = 0 - if (con[0] in con_map.keys() - and fconidx in con_map[con[0]]): + if con[0] in con_map.keys() and fconidx in con_map[con[0]]: fval = 1 ev_txt += contrast_ftest_element.substitute( cnum=ftest_idx.index(fconidx) + 1, element=tidx, ctype=ctype, - val=fval) + val=fval, + ) ev_txt += "\n" # add contrast mask info @@ -290,8 +336,7 @@ def _create_ev_files(self, cwd, runinfo, runidx, ev_parameters, for j, _ in enumerate(contrasts): for k, _ in enumerate(contrasts): if j != k: - ev_txt += contrastmask_element.substitute( - c1=j + 1, c2=k + 1) + ev_txt += contrastmask_element.substitute(c1=j + 1, c2=k + 1) ev_txt += contrastmask_footer.substitute() return num_evs, ev_txt @@ -305,13 +350,13 @@ def _get_func_files(self, session_info): """ func_files = [] for i, info in enumerate(session_info): - func_files.insert(i, info['scans']) + func_files.insert(i, info["scans"]) return func_files def _run_interface(self, runtime): cwd = os.getcwd() - fsf_header = load_template('feat_header_l1.tcl') - fsf_postscript = load_template('feat_nongui.tcl') + fsf_header = load_template("feat_header_l1.tcl") + fsf_postscript = load_template("feat_nongui.tcl") prewhiten = 0 if isdefined(self.inputs.model_serial_correlations): @@ -324,18 +369,25 @@ def _run_interface(self, runtime): n_fcon = 0 if isdefined(self.inputs.contrasts): for i, c in enumerate(self.inputs.contrasts): - if c[1] == 'T': + if c[1] == "T": n_tcon += 1 - elif c[1] == 'F': + elif c[1] == "F": n_fcon += 1 for i, info in enumerate(session_info): do_tempfilter = 1 - if info['hpf'] == np.inf: + if info["hpf"] == np.inf: do_tempfilter = 0 num_evs, cond_txt = self._create_ev_files( - cwd, info, i, ev_parameters, self.inputs.orthogonalization, - self.inputs.contrasts, do_tempfilter, basis_key) + cwd, + info, + i, + ev_parameters, + self.inputs.orthogonalization, + self.inputs.contrasts, + do_tempfilter, + basis_key, + ) nim = load(func_files[i]) (_, _, _, timepoints) = nim.shape fsf_txt = fsf_header.substitute( @@ -347,13 +399,14 @@ def _run_interface(self, runtime): num_evs_real=num_evs[1], num_tcon=n_tcon, num_fcon=n_fcon, - high_pass_filter_cutoff=info['hpf'], + high_pass_filter_cutoff=info["hpf"], temphp_yn=do_tempfilter, - func_file=func_files[i]) + func_file=func_files[i], + ) fsf_txt += cond_txt fsf_txt += fsf_postscript.substitute(overwrite=1) - f = open(os.path.join(cwd, 'run%d.fsf' % i), 'w') + f = open(os.path.join(cwd, "run%d.fsf" % i), "w") f.write(fsf_txt) f.close() @@ -362,29 +415,30 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self.output_spec().get() cwd = os.getcwd() - outputs['fsf_files'] = [] - outputs['ev_files'] = [] + outputs["fsf_files"] = [] + outputs["ev_files"] = [] basis_key = list(self.inputs.bases.keys())[0] ev_parameters = dict(self.inputs.bases[basis_key]) for runno, runinfo in enumerate( - self._format_session_info(self.inputs.session_info)): - outputs['fsf_files'].append(os.path.join(cwd, 'run%d.fsf' % runno)) - outputs['ev_files'].insert(runno, []) + self._format_session_info(self.inputs.session_info) + ): + outputs["fsf_files"].append(os.path.join(cwd, "run%d.fsf" % runno)) + outputs["ev_files"].insert(runno, []) evname = [] - for field in ['cond', 'regress']: + for field in ["cond", "regress"]: for i, cond in enumerate(runinfo[field]): - name = cond['name'] + name = cond["name"] evname.append(name) - evfname = os.path.join(cwd, - 'ev_%s_%d_%d.txt' % (name, runno, - len(evname))) - if field == 'cond': - ev_parameters['temporalderiv'] = int( - bool(ev_parameters.get('derivs', False))) - if ev_parameters['temporalderiv']: - evname.append(name + 'TD') - outputs['ev_files'][runno].append( - os.path.join(cwd, evfname)) + evfname = os.path.join( + cwd, "ev_%s_%d_%d.txt" % (name, runno, len(evname)) + ) + if field == "cond": + ev_parameters["temporalderiv"] = int( + bool(ev_parameters.get("derivs", False)) + ) + if ev_parameters["temporalderiv"]: + evname.append(name + "TD") + outputs["ev_files"][runno].append(os.path.join(cwd, evfname)) return outputs @@ -394,7 +448,8 @@ class FEATInputSpec(FSLCommandInputSpec): mandatory=True, argstr="%s", position=0, - desc="File specifying the feat design spec file") + desc="File specifying the feat design spec file", + ) class FEATOutputSpec(TraitedSpec): @@ -404,35 +459,34 @@ class FEATOutputSpec(TraitedSpec): class FEAT(FSLCommand): """Uses FSL feat to calculate first level stats """ - _cmd = 'feat' + + _cmd = "feat" input_spec = FEATInputSpec output_spec = FEATOutputSpec def _list_outputs(self): outputs = self._outputs().get() is_ica = False - outputs['feat_dir'] = None - with open(self.inputs.fsf_file, 'rt') as fp: + outputs["feat_dir"] = None + with open(self.inputs.fsf_file, "rt") as fp: text = fp.read() if "set fmri(inmelodic) 1" in text: is_ica = True - for line in text.split('\n'): + for line in text.split("\n"): if line.find("set fmri(outputdir)") > -1: try: outputdir_spec = line.split('"')[-2] if os.path.exists(outputdir_spec): - outputs['feat_dir'] = outputdir_spec + outputs["feat_dir"] = outputdir_spec except: pass - if not outputs['feat_dir']: + if not outputs["feat_dir"]: if is_ica: - outputs['feat_dir'] = glob(os.path.join(os.getcwd(), - '*ica'))[0] + outputs["feat_dir"] = glob(os.path.join(os.getcwd(), "*ica"))[0] else: - outputs['feat_dir'] = glob(os.path.join(os.getcwd(), - '*feat'))[0] - print('Outputs from FEATmodel:', outputs) + outputs["feat_dir"] = glob(os.path.join(os.getcwd(), "*feat"))[0] + print("Outputs from FEATmodel:", outputs) return outputs @@ -443,136 +497,141 @@ class FEATModelInputSpec(FSLCommandInputSpec): argstr="%s", position=0, desc="File specifying the feat design spec file", - copyfile=False) + copyfile=False, + ) ev_files = traits.List( File(exists=True), mandatory=True, argstr="%s", desc="Event spec files generated by level1design", position=1, - copyfile=False) + copyfile=False, + ) class FEATModelOutpuSpec(TraitedSpec): - design_file = File( - exists=True, desc='Mat file containing ascii matrix for design') - design_image = File( - exists=True, desc='Graphical representation of design matrix') - design_cov = File( - exists=True, desc='Graphical representation of design covariance') - con_file = File( - exists=True, desc='Contrast file containing contrast vectors') - fcon_file = File(desc='Contrast file containing contrast vectors') + design_file = File(exists=True, desc="Mat file containing ascii matrix for design") + design_image = File(exists=True, desc="Graphical representation of design matrix") + design_cov = File(exists=True, desc="Graphical representation of design covariance") + con_file = File(exists=True, desc="Contrast file containing contrast vectors") + fcon_file = File(desc="Contrast file containing contrast vectors") class FEATModel(FSLCommand): """Uses FSL feat_model to generate design.mat files """ - _cmd = 'feat_model' + + _cmd = "feat_model" input_spec = FEATModelInputSpec output_spec = FEATModelOutpuSpec def _format_arg(self, name, trait_spec, value): - if name == 'fsf_file': - return super(FEATModel, - self)._format_arg(name, trait_spec, - self._get_design_root(value)) - elif name == 'ev_files': - return '' + if name == "fsf_file": + return super(FEATModel, self)._format_arg( + name, trait_spec, self._get_design_root(value) + ) + elif name == "ev_files": + return "" else: return super(FEATModel, self)._format_arg(name, trait_spec, value) def _get_design_root(self, infile): _, fname = os.path.split(infile) - return fname.split('.')[0] + return fname.split(".")[0] def _list_outputs(self): # TODO: figure out file names and get rid off the globs outputs = self._outputs().get() root = self._get_design_root(simplify_list(self.inputs.fsf_file)) - design_file = glob(os.path.join(os.getcwd(), '%s*.mat' % root)) - assert len(design_file) == 1, 'No mat file generated by FEAT Model' - outputs['design_file'] = design_file[0] - design_image = glob(os.path.join(os.getcwd(), '%s.png' % root)) - assert len( - design_image) == 1, 'No design image generated by FEAT Model' - outputs['design_image'] = design_image[0] - design_cov = glob(os.path.join(os.getcwd(), '%s_cov.png' % root)) - assert len( - design_cov) == 1, 'No covariance image generated by FEAT Model' - outputs['design_cov'] = design_cov[0] - con_file = glob(os.path.join(os.getcwd(), '%s*.con' % root)) - assert len(con_file) == 1, 'No con file generated by FEAT Model' - outputs['con_file'] = con_file[0] - fcon_file = glob(os.path.join(os.getcwd(), '%s*.fts' % root)) + design_file = glob(os.path.join(os.getcwd(), "%s*.mat" % root)) + assert len(design_file) == 1, "No mat file generated by FEAT Model" + outputs["design_file"] = design_file[0] + design_image = glob(os.path.join(os.getcwd(), "%s.png" % root)) + assert len(design_image) == 1, "No design image generated by FEAT Model" + outputs["design_image"] = design_image[0] + design_cov = glob(os.path.join(os.getcwd(), "%s_cov.png" % root)) + assert len(design_cov) == 1, "No covariance image generated by FEAT Model" + outputs["design_cov"] = design_cov[0] + con_file = glob(os.path.join(os.getcwd(), "%s*.con" % root)) + assert len(con_file) == 1, "No con file generated by FEAT Model" + outputs["con_file"] = con_file[0] + fcon_file = glob(os.path.join(os.getcwd(), "%s*.fts" % root)) if fcon_file: - assert len(fcon_file) == 1, 'No fts file generated by FEAT Model' - outputs['fcon_file'] = fcon_file[0] + assert len(fcon_file) == 1, "No fts file generated by FEAT Model" + outputs["fcon_file"] = fcon_file[0] return outputs class FILMGLSInputSpec(FSLCommandInputSpec): in_file = File( - exists=True, - mandatory=True, - position=-3, - argstr='%s', - desc='input data file') - design_file = File( - exists=True, position=-2, argstr='%s', desc='design matrix file') + exists=True, mandatory=True, position=-3, argstr="%s", desc="input data file" + ) + design_file = File(exists=True, position=-2, argstr="%s", desc="design matrix file") threshold = traits.Range( - value=1000., + value=1000.0, low=0.0, - argstr='%f', + argstr="%f", position=-1, usedefault=True, - desc='threshold') - smooth_autocorr = traits.Bool( - argstr='-sa', desc='Smooth auto corr estimates') - mask_size = traits.Int(argstr='-ms %d', desc="susan mask size") + desc="threshold", + ) + smooth_autocorr = traits.Bool(argstr="-sa", desc="Smooth auto corr estimates") + mask_size = traits.Int(argstr="-ms %d", desc="susan mask size") brightness_threshold = traits.Range( low=0, - argstr='-epith %d', - desc=('susan brightness threshold, ' - 'otherwise it is estimated')) - full_data = traits.Bool(argstr='-v', desc='output full data') + argstr="-epith %d", + desc=("susan brightness threshold, " "otherwise it is estimated"), + ) + full_data = traits.Bool(argstr="-v", desc="output full data") _estimate_xor = [ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ] autocorr_estimate_only = traits.Bool( - argstr='-ac', + argstr="-ac", xor=_estimate_xor, - desc=('perform autocorrelation ' - 'estimatation only')) + desc=("perform autocorrelation " "estimatation only"), + ) fit_armodel = traits.Bool( - argstr='-ar', + argstr="-ar", xor=_estimate_xor, - desc=('fits autoregressive model - default is ' - 'to use tukey with M=sqrt(numvols)')) + desc=( + "fits autoregressive model - default is " + "to use tukey with M=sqrt(numvols)" + ), + ) tukey_window = traits.Int( - argstr='-tukey %d', + argstr="-tukey %d", xor=_estimate_xor, - desc='tukey window size to estimate autocorr') + desc="tukey window size to estimate autocorr", + ) multitaper_product = traits.Int( - argstr='-mt %d', + argstr="-mt %d", xor=_estimate_xor, - desc=('multitapering with slepian tapers ' - 'and num is the time-bandwidth ' - 'product')) - use_pava = traits.Bool( - argstr='-pava', desc='estimates autocorr using PAVA') + desc=( + "multitapering with slepian tapers " + "and num is the time-bandwidth " + "product" + ), + ) + use_pava = traits.Bool(argstr="-pava", desc="estimates autocorr using PAVA") autocorr_noestimate = traits.Bool( - argstr='-noest', xor=_estimate_xor, desc='do not estimate autocorrs') + argstr="-noest", xor=_estimate_xor, desc="do not estimate autocorrs" + ) output_pwdata = traits.Bool( - argstr='-output_pwdata', - desc=('output prewhitened data and average ' - 'design matrix')) + argstr="-output_pwdata", + desc=("output prewhitened data and average " "design matrix"), + ) results_dir = Directory( - 'results', - argstr='-rn %s', + "results", + argstr="-rn %s", usedefault=True, - desc='directory to store results in') + desc="directory to store results in", + ) class FILMGLSInputSpec505(FSLCommandInputSpec): @@ -580,142 +639,154 @@ class FILMGLSInputSpec505(FSLCommandInputSpec): exists=True, mandatory=True, position=-3, - argstr='--in=%s', - desc='input data file') + argstr="--in=%s", + desc="input data file", + ) design_file = File( - exists=True, position=-2, argstr='--pd=%s', desc='design matrix file') + exists=True, position=-2, argstr="--pd=%s", desc="design matrix file" + ) threshold = traits.Range( - value=1000., + value=1000.0, low=0.0, - argstr='--thr=%f', + argstr="--thr=%f", position=-1, usedefault=True, - desc='threshold') - smooth_autocorr = traits.Bool( - argstr='--sa', desc='Smooth auto corr estimates') - mask_size = traits.Int(argstr='--ms=%d', desc="susan mask size") + desc="threshold", + ) + smooth_autocorr = traits.Bool(argstr="--sa", desc="Smooth auto corr estimates") + mask_size = traits.Int(argstr="--ms=%d", desc="susan mask size") brightness_threshold = traits.Range( low=0, - argstr='--epith=%d', - desc=('susan brightness threshold, ' - 'otherwise it is estimated')) - full_data = traits.Bool(argstr='-v', desc='output full data') + argstr="--epith=%d", + desc=("susan brightness threshold, " "otherwise it is estimated"), + ) + full_data = traits.Bool(argstr="-v", desc="output full data") _estimate_xor = [ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ] autocorr_estimate_only = traits.Bool( - argstr='--ac', + argstr="--ac", xor=_estimate_xor, - desc=('perform autocorrelation ' - 'estimation only')) + desc=("perform autocorrelation " "estimation only"), + ) fit_armodel = traits.Bool( - argstr='--ar', + argstr="--ar", xor=_estimate_xor, - desc=('fits autoregressive model - default is ' - 'to use tukey with M=sqrt(numvols)')) + desc=( + "fits autoregressive model - default is " + "to use tukey with M=sqrt(numvols)" + ), + ) tukey_window = traits.Int( - argstr='--tukey=%d', + argstr="--tukey=%d", xor=_estimate_xor, - desc='tukey window size to estimate autocorr') + desc="tukey window size to estimate autocorr", + ) multitaper_product = traits.Int( - argstr='--mt=%d', + argstr="--mt=%d", xor=_estimate_xor, - desc=('multitapering with slepian tapers ' - 'and num is the time-bandwidth ' - 'product')) - use_pava = traits.Bool( - argstr='--pava', desc='estimates autocorr using PAVA') + desc=( + "multitapering with slepian tapers " + "and num is the time-bandwidth " + "product" + ), + ) + use_pava = traits.Bool(argstr="--pava", desc="estimates autocorr using PAVA") autocorr_noestimate = traits.Bool( - argstr='--noest', xor=_estimate_xor, desc='do not estimate autocorrs') + argstr="--noest", xor=_estimate_xor, desc="do not estimate autocorrs" + ) output_pwdata = traits.Bool( - argstr='--outputPWdata', - desc=('output prewhitened data and average ' - 'design matrix')) + argstr="--outputPWdata", + desc=("output prewhitened data and average " "design matrix"), + ) results_dir = Directory( - 'results', - argstr='--rn=%s', + "results", + argstr="--rn=%s", usedefault=True, - desc='directory to store results in') + desc="directory to store results in", + ) class FILMGLSInputSpec507(FILMGLSInputSpec505): threshold = traits.Float( - default_value=-1000., - argstr='--thr=%f', + default_value=-1000.0, + argstr="--thr=%f", position=-1, usedefault=True, - desc='threshold') + desc="threshold", + ) tcon_file = File( - exists=True, - argstr='--con=%s', - desc='contrast file containing T-contrasts') + exists=True, argstr="--con=%s", desc="contrast file containing T-contrasts" + ) fcon_file = File( - exists=True, - argstr='--fcon=%s', - desc='contrast file containing F-contrasts') + exists=True, argstr="--fcon=%s", desc="contrast file containing F-contrasts" + ) mode = traits.Enum( - 'volumetric', - 'surface', - argstr="--mode=%s", - desc="Type of analysis to be done") + "volumetric", "surface", argstr="--mode=%s", desc="Type of analysis to be done" + ) surface = File( exists=True, argstr="--in2=%s", - desc=("input surface for autocorr smoothing in " - "surface-based analyses")) + desc=("input surface for autocorr smoothing in " "surface-based analyses"), + ) class FILMGLSOutputSpec(TraitedSpec): param_estimates = OutputMultiPath( File(exists=True), - desc=('Parameter estimates for each ' - 'column of the design matrix')) + desc=("Parameter estimates for each " "column of the design matrix"), + ) residual4d = File( exists=True, - desc=('Model fit residual mean-squared error for each ' - 'time point')) - dof_file = File(exists=True, desc='degrees of freedom') + desc=("Model fit residual mean-squared error for each " "time point"), + ) + dof_file = File(exists=True, desc="degrees of freedom") sigmasquareds = File( - exists=True, desc='summary of residuals, See Woolrich, et. al., 2001') + exists=True, desc="summary of residuals, See Woolrich, et. al., 2001" + ) results_dir = Directory( - exists=True, desc='directory storing model estimation output') + exists=True, desc="directory storing model estimation output" + ) corrections = File( - exists=True, - desc=('statistical corrections used within FILM ' - 'modeling')) - thresholdac = File(exists=True, desc='The FILM autocorrelation parameters') - logfile = File(exists=True, desc='FILM run logfile') + exists=True, desc=("statistical corrections used within FILM " "modeling") + ) + thresholdac = File(exists=True, desc="The FILM autocorrelation parameters") + logfile = File(exists=True, desc="FILM run logfile") class FILMGLSOutputSpec507(TraitedSpec): param_estimates = OutputMultiPath( File(exists=True), - desc=('Parameter estimates for each ' - 'column of the design matrix')) + desc=("Parameter estimates for each " "column of the design matrix"), + ) residual4d = File( exists=True, - desc=('Model fit residual mean-squared error for each ' - 'time point')) - dof_file = File(exists=True, desc='degrees of freedom') + desc=("Model fit residual mean-squared error for each " "time point"), + ) + dof_file = File(exists=True, desc="degrees of freedom") sigmasquareds = File( - exists=True, desc='summary of residuals, See Woolrich, et. al., 2001') + exists=True, desc="summary of residuals, See Woolrich, et. al., 2001" + ) results_dir = Directory( - exists=True, desc='directory storing model estimation output') - thresholdac = File(exists=True, desc='The FILM autocorrelation parameters') - logfile = File(exists=True, desc='FILM run logfile') + exists=True, desc="directory storing model estimation output" + ) + thresholdac = File(exists=True, desc="The FILM autocorrelation parameters") + logfile = File(exists=True, desc="FILM run logfile") copes = OutputMultiPath( - File(exists=True), desc='Contrast estimates for each contrast') + File(exists=True), desc="Contrast estimates for each contrast" + ) varcopes = OutputMultiPath( - File(exists=True), desc='Variance estimates for each contrast') - zstats = OutputMultiPath( - File(exists=True), desc='z-stat file for each contrast') - tstats = OutputMultiPath( - File(exists=True), desc='t-stat file for each contrast') - fstats = OutputMultiPath( - File(exists=True), desc='f-stat file for each contrast') - zfstats = OutputMultiPath( - File(exists=True), desc='z-stat file for each F contrast') + File(exists=True), desc="Variance estimates for each contrast" + ) + zstats = OutputMultiPath(File(exists=True), desc="z-stat file for each contrast") + tstats = OutputMultiPath(File(exists=True), desc="t-stat file for each contrast") + fstats = OutputMultiPath(File(exists=True), desc="f-stat file for each contrast") + zfstats = OutputMultiPath(File(exists=True), desc="z-stat file for each F contrast") class FILMGLS(FSLCommand): @@ -748,27 +819,25 @@ class FILMGLS(FSLCommand): """ - _cmd = 'film_gls' + _cmd = "film_gls" input_spec = FILMGLSInputSpec output_spec = FILMGLSOutputSpec - if Info.version() and LooseVersion(Info.version()) > LooseVersion('5.0.6'): + if Info.version() and LooseVersion(Info.version()) > LooseVersion("5.0.6"): input_spec = FILMGLSInputSpec507 output_spec = FILMGLSOutputSpec507 - elif (Info.version() - and LooseVersion(Info.version()) > LooseVersion('5.0.4')): + elif Info.version() and LooseVersion(Info.version()) > LooseVersion("5.0.4"): input_spec = FILMGLSInputSpec505 def _get_pe_files(self, cwd): files = None if isdefined(self.inputs.design_file): - fp = open(self.inputs.design_file, 'rt') + fp = open(self.inputs.design_file, "rt") for line in fp.readlines(): - if line.startswith('/NumWaves'): + if line.startswith("/NumWaves"): numpes = int(line.split()[-1]) files = [] for i in range(numpes): - files.append( - self._gen_fname('pe%d.nii' % (i + 1), cwd=cwd)) + files.append(self._gen_fname("pe%d.nii" % (i + 1), cwd=cwd)) break fp.close() return files @@ -777,16 +846,16 @@ def _get_numcons(self): numtcons = 0 numfcons = 0 if isdefined(self.inputs.tcon_file): - fp = open(self.inputs.tcon_file, 'rt') + fp = open(self.inputs.tcon_file, "rt") for line in fp.readlines(): - if line.startswith('/NumContrasts'): + if line.startswith("/NumContrasts"): numtcons = int(line.split()[-1]) break fp.close() if isdefined(self.inputs.fcon_file): - fp = open(self.inputs.fcon_file, 'rt') + fp = open(self.inputs.fcon_file, "rt") for line in fp.readlines(): - if line.startswith('/NumContrasts'): + if line.startswith("/NumContrasts"): numfcons = int(line.split()[-1]) break fp.close() @@ -796,25 +865,21 @@ def _list_outputs(self): outputs = self._outputs().get() cwd = os.getcwd() results_dir = os.path.join(cwd, self.inputs.results_dir) - outputs['results_dir'] = results_dir + outputs["results_dir"] = results_dir pe_files = self._get_pe_files(results_dir) if pe_files: - outputs['param_estimates'] = pe_files - outputs['residual4d'] = self._gen_fname('res4d.nii', cwd=results_dir) - outputs['dof_file'] = os.path.join(results_dir, 'dof') - outputs['sigmasquareds'] = self._gen_fname( - 'sigmasquareds.nii', cwd=results_dir) - outputs['thresholdac'] = self._gen_fname( - 'threshac1.nii', cwd=results_dir) - if (Info.version() - and LooseVersion(Info.version()) < LooseVersion('5.0.7')): - outputs['corrections'] = self._gen_fname( - 'corrections.nii', cwd=results_dir) - outputs['logfile'] = self._gen_fname( - 'logfile', change_ext=False, cwd=results_dir) - - if (Info.version() - and LooseVersion(Info.version()) > LooseVersion('5.0.6')): + outputs["param_estimates"] = pe_files + outputs["residual4d"] = self._gen_fname("res4d.nii", cwd=results_dir) + outputs["dof_file"] = os.path.join(results_dir, "dof") + outputs["sigmasquareds"] = self._gen_fname("sigmasquareds.nii", cwd=results_dir) + outputs["thresholdac"] = self._gen_fname("threshac1.nii", cwd=results_dir) + if Info.version() and LooseVersion(Info.version()) < LooseVersion("5.0.7"): + outputs["corrections"] = self._gen_fname("corrections.nii", cwd=results_dir) + outputs["logfile"] = self._gen_fname( + "logfile", change_ext=False, cwd=results_dir + ) + + if Info.version() and LooseVersion(Info.version()) > LooseVersion("5.0.6"): pth = results_dir numtcons, numfcons = self._get_numcons() base_contrast = 1 @@ -824,46 +889,47 @@ def _list_outputs(self): tstats = [] for i in range(numtcons): copes.append( - self._gen_fname( - 'cope%d.nii' % (base_contrast + i), cwd=pth)) + self._gen_fname("cope%d.nii" % (base_contrast + i), cwd=pth) + ) varcopes.append( - self._gen_fname( - 'varcope%d.nii' % (base_contrast + i), cwd=pth)) + self._gen_fname("varcope%d.nii" % (base_contrast + i), cwd=pth) + ) zstats.append( - self._gen_fname( - 'zstat%d.nii' % (base_contrast + i), cwd=pth)) + self._gen_fname("zstat%d.nii" % (base_contrast + i), cwd=pth) + ) tstats.append( - self._gen_fname( - 'tstat%d.nii' % (base_contrast + i), cwd=pth)) + self._gen_fname("tstat%d.nii" % (base_contrast + i), cwd=pth) + ) if copes: - outputs['copes'] = copes - outputs['varcopes'] = varcopes - outputs['zstats'] = zstats - outputs['tstats'] = tstats + outputs["copes"] = copes + outputs["varcopes"] = varcopes + outputs["zstats"] = zstats + outputs["tstats"] = tstats fstats = [] zfstats = [] for i in range(numfcons): fstats.append( - self._gen_fname( - 'fstat%d.nii' % (base_contrast + i), cwd=pth)) + self._gen_fname("fstat%d.nii" % (base_contrast + i), cwd=pth) + ) zfstats.append( - self._gen_fname( - 'zfstat%d.nii' % (base_contrast + i), cwd=pth)) + self._gen_fname("zfstat%d.nii" % (base_contrast + i), cwd=pth) + ) if fstats: - outputs['fstats'] = fstats - outputs['zfstats'] = zfstats + outputs["fstats"] = fstats + outputs["zfstats"] = zfstats return outputs class FEATRegisterInputSpec(BaseInterfaceInputSpec): feat_dirs = InputMultiPath( - Directory(exists=True), desc="Lower level feat dirs", mandatory=True) + Directory(exists=True), desc="Lower level feat dirs", mandatory=True + ) reg_image = File( exists=True, desc="image to register to (will be treated as standard)", - mandatory=True) - reg_dof = traits.Int( - 12, desc="registration degrees of freedom", usedefault=True) + mandatory=True, + ) + reg_dof = traits.Int(12, desc="registration degrees of freedom", usedefault=True) class FEATRegisterOutputSpec(TraitedSpec): @@ -873,24 +939,25 @@ class FEATRegisterOutputSpec(TraitedSpec): class FEATRegister(BaseInterface): """Register feat directories to a specific standard """ + input_spec = FEATRegisterInputSpec output_spec = FEATRegisterOutputSpec def _run_interface(self, runtime): - fsf_header = load_template('featreg_header.tcl') - fsf_footer = load_template('feat_nongui.tcl') - fsf_dirs = load_template('feat_fe_featdirs.tcl') + fsf_header = load_template("featreg_header.tcl") + fsf_footer = load_template("feat_nongui.tcl") + fsf_dirs = load_template("feat_fe_featdirs.tcl") num_runs = len(self.inputs.feat_dirs) fsf_txt = fsf_header.substitute( num_runs=num_runs, regimage=self.inputs.reg_image, - regdof=self.inputs.reg_dof) + regdof=self.inputs.reg_dof, + ) for i, rundir in enumerate(ensure_list(self.inputs.feat_dirs)): - fsf_txt += fsf_dirs.substitute( - runno=i + 1, rundir=os.path.abspath(rundir)) + fsf_txt += fsf_dirs.substitute(runno=i + 1, rundir=os.path.abspath(rundir)) fsf_txt += fsf_footer.substitute() - f = open(os.path.join(os.getcwd(), 'register.fsf'), 'wt') + f = open(os.path.join(os.getcwd(), "register.fsf"), "wt") f.write(fsf_txt) f.close() @@ -898,111 +965,117 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['fsf_file'] = os.path.abspath( - os.path.join(os.getcwd(), 'register.fsf')) + outputs["fsf_file"] = os.path.abspath(os.path.join(os.getcwd(), "register.fsf")) return outputs class FLAMEOInputSpec(FSLCommandInputSpec): cope_file = File( exists=True, - argstr='--copefile=%s', + argstr="--copefile=%s", mandatory=True, - desc='cope regressor data file') + desc="cope regressor data file", + ) var_cope_file = File( - exists=True, - argstr='--varcopefile=%s', - desc='varcope weightings data file') + exists=True, argstr="--varcopefile=%s", desc="varcope weightings data file" + ) dof_var_cope_file = File( - exists=True, - argstr='--dofvarcopefile=%s', - desc='dof data file for varcope data') + exists=True, argstr="--dofvarcopefile=%s", desc="dof data file for varcope data" + ) mask_file = File( - exists=True, argstr='--maskfile=%s', mandatory=True, desc='mask file') + exists=True, argstr="--maskfile=%s", mandatory=True, desc="mask file" + ) design_file = File( - exists=True, - argstr='--designfile=%s', - mandatory=True, - desc='design matrix file') + exists=True, argstr="--designfile=%s", mandatory=True, desc="design matrix file" + ) t_con_file = File( exists=True, - argstr='--tcontrastsfile=%s', + argstr="--tcontrastsfile=%s", mandatory=True, - desc='ascii matrix specifying t-contrasts') + desc="ascii matrix specifying t-contrasts", + ) f_con_file = File( exists=True, - argstr='--fcontrastsfile=%s', - desc='ascii matrix specifying f-contrasts') + argstr="--fcontrastsfile=%s", + desc="ascii matrix specifying f-contrasts", + ) cov_split_file = File( exists=True, - argstr='--covsplitfile=%s', + argstr="--covsplitfile=%s", mandatory=True, - desc='ascii matrix specifying the groups the covariance is split into') + desc="ascii matrix specifying the groups the covariance is split into", + ) run_mode = traits.Enum( - 'fe', - 'ols', - 'flame1', - 'flame12', - argstr='--runmode=%s', + "fe", + "ols", + "flame1", + "flame12", + argstr="--runmode=%s", mandatory=True, - desc='inference to perform') - n_jumps = traits.Int( - argstr='--njumps=%d', desc='number of jumps made by mcmc') + desc="inference to perform", + ) + n_jumps = traits.Int(argstr="--njumps=%d", desc="number of jumps made by mcmc") burnin = traits.Int( - argstr='--burnin=%d', - desc=('number of jumps at start of mcmc to be ' - 'discarded')) + argstr="--burnin=%d", + desc=("number of jumps at start of mcmc to be " "discarded"), + ) sample_every = traits.Int( - argstr='--sampleevery=%d', desc='number of jumps for each sample') - fix_mean = traits.Bool(argstr='--fixmean', desc='fix mean for tfit') + argstr="--sampleevery=%d", desc="number of jumps for each sample" + ) + fix_mean = traits.Bool(argstr="--fixmean", desc="fix mean for tfit") infer_outliers = traits.Bool( - argstr='--inferoutliers', desc='infer outliers - not for fe') - no_pe_outputs = traits.Bool( - argstr='--nopeoutput', desc='do not output pe files') + argstr="--inferoutliers", desc="infer outliers - not for fe" + ) + no_pe_outputs = traits.Bool(argstr="--nopeoutput", desc="do not output pe files") sigma_dofs = traits.Int( - argstr='--sigma_dofs=%d', - desc=('sigma (in mm) to use for Gaussian ' - 'smoothing the DOFs in FLAME 2. Default is ' - '1mm, -1 indicates no smoothing')) + argstr="--sigma_dofs=%d", + desc=( + "sigma (in mm) to use for Gaussian " + "smoothing the DOFs in FLAME 2. Default is " + "1mm, -1 indicates no smoothing" + ), + ) outlier_iter = traits.Int( - argstr='--ioni=%d', - desc=('Number of max iterations to use when ' - 'inferring outliers. Default is 12.')) - log_dir = Directory("stats", argstr='--ld=%s', usedefault=True) # ohinds + argstr="--ioni=%d", + desc=( + "Number of max iterations to use when " "inferring outliers. Default is 12." + ), + ) + log_dir = Directory("stats", argstr="--ld=%s", usedefault=True) # ohinds # no support for ven, vef class FLAMEOOutputSpec(TraitedSpec): pes = OutputMultiPath( File(exists=True), - desc=("Parameter estimates for each column of the " - "design matrix for each voxel")) + desc=( + "Parameter estimates for each column of the " "design matrix for each voxel" + ), + ) res4d = OutputMultiPath( File(exists=True), - desc=("Model fit residual mean-squared error for " - "each time point")) + desc=("Model fit residual mean-squared error for " "each time point"), + ) copes = OutputMultiPath( - File(exists=True), desc="Contrast estimates for each contrast") + File(exists=True), desc="Contrast estimates for each contrast" + ) var_copes = OutputMultiPath( - File(exists=True), desc="Variance estimates for each contrast") - zstats = OutputMultiPath( - File(exists=True), desc="z-stat file for each contrast") - tstats = OutputMultiPath( - File(exists=True), desc="t-stat file for each contrast") - zfstats = OutputMultiPath( - File(exists=True), desc="z stat file for each f contrast") - fstats = OutputMultiPath( - File(exists=True), desc="f-stat file for each contrast") + File(exists=True), desc="Variance estimates for each contrast" + ) + zstats = OutputMultiPath(File(exists=True), desc="z-stat file for each contrast") + tstats = OutputMultiPath(File(exists=True), desc="t-stat file for each contrast") + zfstats = OutputMultiPath(File(exists=True), desc="z stat file for each f contrast") + fstats = OutputMultiPath(File(exists=True), desc="f-stat file for each contrast") mrefvars = OutputMultiPath( - File(exists=True), - desc=("mean random effect variances for each " - "contrast")) + File(exists=True), desc=("mean random effect variances for each " "contrast") + ) tdof = OutputMultiPath( - File(exists=True), desc="temporal dof file for each contrast") - weights = OutputMultiPath( - File(exists=True), desc="weights file for each contrast") + File(exists=True), desc="temporal dof file for each contrast" + ) + weights = OutputMultiPath(File(exists=True), desc="weights file for each contrast") stats_dir = Directory( - File(exists=True), desc="directory storing model estimation output") + File(exists=True), desc="directory storing model estimation output" + ) class FLAMEO(FSLCommand): @@ -1027,36 +1100,39 @@ class FLAMEO(FSLCommand): """ - _cmd = 'flameo' + _cmd = "flameo" input_spec = FLAMEOInputSpec output_spec = FLAMEOOutputSpec - references_ = [{ - 'entry': - BibTeX( - '@article{BeckmannJenkinsonSmith2003,' - 'author={C.F. Beckmann, M. Jenkinson, and S.M. Smith},' - 'title={General multilevel linear modeling for group analysis in FMRI.},' - 'journal={NeuroImage},' - 'volume={20},' - 'pages={1052-1063},' - 'year={2003},' - '}'), - 'tags': ['method'], - }, { - 'entry': - BibTeX( - '@article{WoolrichBehrensBeckmannJenkinsonSmith2004,' - 'author={M.W. Woolrich, T.E. Behrens, ' - 'C.F. Beckmann, M. Jenkinson, and S.M. Smith},' - 'title={Multilevel linear modelling for FMRI group analysis using Bayesian inference.},' - 'journal={NeuroImage},' - 'volume={21},' - 'pages={1732-1747},' - 'year={2004},' - '}'), - 'tags': ['method'], - }] + references_ = [ + { + "entry": BibTeX( + "@article{BeckmannJenkinsonSmith2003," + "author={C.F. Beckmann, M. Jenkinson, and S.M. Smith}," + "title={General multilevel linear modeling for group analysis in FMRI.}," + "journal={NeuroImage}," + "volume={20}," + "pages={1052-1063}," + "year={2003}," + "}" + ), + "tags": ["method"], + }, + { + "entry": BibTeX( + "@article{WoolrichBehrensBeckmannJenkinsonSmith2004," + "author={M.W. Woolrich, T.E. Behrens, " + "C.F. Beckmann, M. Jenkinson, and S.M. Smith}," + "title={Multilevel linear modelling for FMRI group analysis using Bayesian inference.}," + "journal={NeuroImage}," + "volume={21}," + "pages={1732-1747}," + "year={2004}," + "}" + ), + "tags": ["method"], + }, + ] # ohinds: 2010-04-06 def _run_interface(self, runtime): @@ -1073,61 +1149,54 @@ def _list_outputs(self): outputs = self._outputs().get() pth = os.path.join(os.getcwd(), self.inputs.log_dir) - pes = human_order_sorted(glob(os.path.join(pth, 'pe[0-9]*.*'))) - assert len(pes) >= 1, 'No pe volumes generated by FSL Estimate' - outputs['pes'] = pes + pes = human_order_sorted(glob(os.path.join(pth, "pe[0-9]*.*"))) + assert len(pes) >= 1, "No pe volumes generated by FSL Estimate" + outputs["pes"] = pes - res4d = human_order_sorted(glob(os.path.join(pth, 'res4d.*'))) - assert len(res4d) == 1, 'No residual volume generated by FSL Estimate' - outputs['res4d'] = res4d[0] + res4d = human_order_sorted(glob(os.path.join(pth, "res4d.*"))) + assert len(res4d) == 1, "No residual volume generated by FSL Estimate" + outputs["res4d"] = res4d[0] - copes = human_order_sorted(glob(os.path.join(pth, 'cope[0-9]*.*'))) - assert len(copes) >= 1, 'No cope volumes generated by FSL CEstimate' - outputs['copes'] = copes + copes = human_order_sorted(glob(os.path.join(pth, "cope[0-9]*.*"))) + assert len(copes) >= 1, "No cope volumes generated by FSL CEstimate" + outputs["copes"] = copes - var_copes = human_order_sorted( - glob(os.path.join(pth, 'varcope[0-9]*.*'))) - assert len( - var_copes) >= 1, 'No varcope volumes generated by FSL CEstimate' - outputs['var_copes'] = var_copes + var_copes = human_order_sorted(glob(os.path.join(pth, "varcope[0-9]*.*"))) + assert len(var_copes) >= 1, "No varcope volumes generated by FSL CEstimate" + outputs["var_copes"] = var_copes - zstats = human_order_sorted(glob(os.path.join(pth, 'zstat[0-9]*.*'))) - assert len(zstats) >= 1, 'No zstat volumes generated by FSL CEstimate' - outputs['zstats'] = zstats + zstats = human_order_sorted(glob(os.path.join(pth, "zstat[0-9]*.*"))) + assert len(zstats) >= 1, "No zstat volumes generated by FSL CEstimate" + outputs["zstats"] = zstats if isdefined(self.inputs.f_con_file): - zfstats = human_order_sorted( - glob(os.path.join(pth, 'zfstat[0-9]*.*'))) - assert len( - zfstats) >= 1, 'No zfstat volumes generated by FSL CEstimate' - outputs['zfstats'] = zfstats - - fstats = human_order_sorted( - glob(os.path.join(pth, 'fstat[0-9]*.*'))) - assert len( - fstats) >= 1, 'No fstat volumes generated by FSL CEstimate' - outputs['fstats'] = fstats - - tstats = human_order_sorted(glob(os.path.join(pth, 'tstat[0-9]*.*'))) - assert len(tstats) >= 1, 'No tstat volumes generated by FSL CEstimate' - outputs['tstats'] = tstats + zfstats = human_order_sorted(glob(os.path.join(pth, "zfstat[0-9]*.*"))) + assert len(zfstats) >= 1, "No zfstat volumes generated by FSL CEstimate" + outputs["zfstats"] = zfstats + + fstats = human_order_sorted(glob(os.path.join(pth, "fstat[0-9]*.*"))) + assert len(fstats) >= 1, "No fstat volumes generated by FSL CEstimate" + outputs["fstats"] = fstats + + tstats = human_order_sorted(glob(os.path.join(pth, "tstat[0-9]*.*"))) + assert len(tstats) >= 1, "No tstat volumes generated by FSL CEstimate" + outputs["tstats"] = tstats mrefs = human_order_sorted( - glob(os.path.join(pth, 'mean_random_effects_var[0-9]*.*'))) - assert len( - mrefs) >= 1, 'No mean random effects volumes generated by FLAMEO' - outputs['mrefvars'] = mrefs + glob(os.path.join(pth, "mean_random_effects_var[0-9]*.*")) + ) + assert len(mrefs) >= 1, "No mean random effects volumes generated by FLAMEO" + outputs["mrefvars"] = mrefs - tdof = human_order_sorted(glob(os.path.join(pth, 'tdof_t[0-9]*.*'))) - assert len(tdof) >= 1, 'No T dof volumes generated by FLAMEO' - outputs['tdof'] = tdof + tdof = human_order_sorted(glob(os.path.join(pth, "tdof_t[0-9]*.*"))) + assert len(tdof) >= 1, "No T dof volumes generated by FLAMEO" + outputs["tdof"] = tdof - weights = human_order_sorted( - glob(os.path.join(pth, 'weights[0-9]*.*'))) - assert len(weights) >= 1, 'No weight volumes generated by FLAMEO' - outputs['weights'] = weights + weights = human_order_sorted(glob(os.path.join(pth, "weights[0-9]*.*"))) + assert len(weights) >= 1, "No weight volumes generated by FLAMEO" + outputs["weights"] = weights - outputs['stats_dir'] = pth + outputs["stats_dir"] = pth return outputs @@ -1136,66 +1205,66 @@ class ContrastMgrInputSpec(FSLCommandInputSpec): tcon_file = File( exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-1, - desc='contrast file containing T-contrasts') + desc="contrast file containing T-contrasts", + ) fcon_file = File( - exists=True, - argstr='-f %s', - desc='contrast file containing F-contrasts') + exists=True, argstr="-f %s", desc="contrast file containing F-contrasts" + ) param_estimates = InputMultiPath( File(exists=True), - argstr='', + argstr="", copyfile=False, mandatory=True, - desc=('Parameter estimates for each ' - 'column of the design matrix')) + desc=("Parameter estimates for each " "column of the design matrix"), + ) corrections = File( exists=True, copyfile=False, mandatory=True, - desc='statistical corrections used within FILM modelling') + desc="statistical corrections used within FILM modelling", + ) dof_file = File( exists=True, - argstr='', + argstr="", copyfile=False, mandatory=True, - desc='degrees of freedom') + desc="degrees of freedom", + ) sigmasquareds = File( exists=True, - argstr='', + argstr="", position=-2, copyfile=False, mandatory=True, - desc=('summary of residuals, See Woolrich, et. al., ' - '2001')) + desc=("summary of residuals, See Woolrich, et. al., " "2001"), + ) contrast_num = traits.Range( - low=1, - argstr='-cope', - desc=('contrast number to start labeling ' - 'copes from')) + low=1, argstr="-cope", desc=("contrast number to start labeling " "copes from") + ) suffix = traits.Str( - argstr='-suffix %s', - desc=('suffix to put on the end of the cope filename ' - 'before the contrast number, default is ' - 'nothing')) + argstr="-suffix %s", + desc=( + "suffix to put on the end of the cope filename " + "before the contrast number, default is " + "nothing" + ), + ) class ContrastMgrOutputSpec(TraitedSpec): copes = OutputMultiPath( - File(exists=True), desc='Contrast estimates for each contrast') + File(exists=True), desc="Contrast estimates for each contrast" + ) varcopes = OutputMultiPath( - File(exists=True), desc='Variance estimates for each contrast') - zstats = OutputMultiPath( - File(exists=True), desc='z-stat file for each contrast') - tstats = OutputMultiPath( - File(exists=True), desc='t-stat file for each contrast') - fstats = OutputMultiPath( - File(exists=True), desc='f-stat file for each contrast') - zfstats = OutputMultiPath( - File(exists=True), desc='z-stat file for each F contrast') - neffs = OutputMultiPath( - File(exists=True), desc='neff file ?? for each contrast') + File(exists=True), desc="Variance estimates for each contrast" + ) + zstats = OutputMultiPath(File(exists=True), desc="z-stat file for each contrast") + tstats = OutputMultiPath(File(exists=True), desc="t-stat file for each contrast") + fstats = OutputMultiPath(File(exists=True), desc="f-stat file for each contrast") + zfstats = OutputMultiPath(File(exists=True), desc="z-stat file for each F contrast") + neffs = OutputMultiPath(File(exists=True), desc="neff file ?? for each contrast") class ContrastMgr(FSLCommand): @@ -1205,10 +1274,10 @@ class ContrastMgr(FSLCommand): same location. This has deprecated for FSL versions 5.0.7+ as the necessary corrections file is no longer generated by FILMGLS. """ - if Info.version() and LooseVersion( - Info.version()) >= LooseVersion("5.0.7"): + + if Info.version() and LooseVersion(Info.version()) >= LooseVersion("5.0.7"): DeprecationWarning("ContrastMgr is deprecated in FSL 5.0.7+") - _cmd = 'contrast_mgr' + _cmd = "contrast_mgr" input_spec = ContrastMgrInputSpec output_spec = ContrastMgrOutputSpec @@ -1222,33 +1291,32 @@ def _run_interface(self, runtime): return runtime def _format_arg(self, name, trait_spec, value): - if name in ['param_estimates', 'corrections', 'dof_file']: - return '' - elif name in ['sigmasquareds']: + if name in ["param_estimates", "corrections", "dof_file"]: + return "" + elif name in ["sigmasquareds"]: path, _ = os.path.split(value) return path else: - return super(ContrastMgr, self)._format_arg( - name, trait_spec, value) + return super(ContrastMgr, self)._format_arg(name, trait_spec, value) def _get_design_root(self, infile): _, fname = os.path.split(infile) - return fname.split('.')[0] + return fname.split(".")[0] def _get_numcons(self): numtcons = 0 numfcons = 0 if isdefined(self.inputs.tcon_file): - fp = open(self.inputs.tcon_file, 'rt') + fp = open(self.inputs.tcon_file, "rt") for line in fp.readlines(): - if line.startswith('/NumContrasts'): + if line.startswith("/NumContrasts"): numtcons = int(line.split()[-1]) break fp.close() if isdefined(self.inputs.fcon_file): - fp = open(self.inputs.fcon_file, 'rt') + fp = open(self.inputs.fcon_file, "rt") for line in fp.readlines(): - if line.startswith('/NumContrasts'): + if line.startswith("/NumContrasts"): numfcons = int(line.split()[-1]) break fp.close() @@ -1267,45 +1335,42 @@ def _list_outputs(self): tstats = [] neffs = [] for i in range(numtcons): - copes.append( - self._gen_fname('cope%d.nii' % (base_contrast + i), cwd=pth)) + copes.append(self._gen_fname("cope%d.nii" % (base_contrast + i), cwd=pth)) varcopes.append( - self._gen_fname( - 'varcope%d.nii' % (base_contrast + i), cwd=pth)) - zstats.append( - self._gen_fname('zstat%d.nii' % (base_contrast + i), cwd=pth)) - tstats.append( - self._gen_fname('tstat%d.nii' % (base_contrast + i), cwd=pth)) - neffs.append( - self._gen_fname('neff%d.nii' % (base_contrast + i), cwd=pth)) + self._gen_fname("varcope%d.nii" % (base_contrast + i), cwd=pth) + ) + zstats.append(self._gen_fname("zstat%d.nii" % (base_contrast + i), cwd=pth)) + tstats.append(self._gen_fname("tstat%d.nii" % (base_contrast + i), cwd=pth)) + neffs.append(self._gen_fname("neff%d.nii" % (base_contrast + i), cwd=pth)) if copes: - outputs['copes'] = copes - outputs['varcopes'] = varcopes - outputs['zstats'] = zstats - outputs['tstats'] = tstats - outputs['neffs'] = neffs + outputs["copes"] = copes + outputs["varcopes"] = varcopes + outputs["zstats"] = zstats + outputs["tstats"] = tstats + outputs["neffs"] = neffs fstats = [] zfstats = [] for i in range(numfcons): - fstats.append( - self._gen_fname('fstat%d.nii' % (base_contrast + i), cwd=pth)) + fstats.append(self._gen_fname("fstat%d.nii" % (base_contrast + i), cwd=pth)) zfstats.append( - self._gen_fname('zfstat%d.nii' % (base_contrast + i), cwd=pth)) + self._gen_fname("zfstat%d.nii" % (base_contrast + i), cwd=pth) + ) if fstats: - outputs['fstats'] = fstats - outputs['zfstats'] = zfstats + outputs["fstats"] = fstats + outputs["zfstats"] = zfstats return outputs class L2ModelInputSpec(BaseInterfaceInputSpec): num_copes = traits.Range( - low=1, mandatory=True, desc='number of copes to be combined') + low=1, mandatory=True, desc="number of copes to be combined" + ) class L2ModelOutputSpec(TraitedSpec): - design_mat = File(exists=True, desc='design matrix file') - design_con = File(exists=True, desc='design contrast file') - design_grp = File(exists=True, desc='design group file') + design_mat = File(exists=True, desc="design matrix file") + design_con = File(exists=True, desc="design contrast file") + design_grp = File(exists=True, desc="design group file") class L2Model(BaseInterface): @@ -1325,43 +1390,44 @@ class L2Model(BaseInterface): def _run_interface(self, runtime): cwd = os.getcwd() mat_txt = [ - '/NumWaves 1', '/NumPoints {:d}'.format(self.inputs.num_copes), - '/PPheights 1', '', '/Matrix' + "/NumWaves 1", + "/NumPoints {:d}".format(self.inputs.num_copes), + "/PPheights 1", + "", + "/Matrix", ] for i in range(self.inputs.num_copes): - mat_txt += ['1'] - mat_txt = '\n'.join(mat_txt) + mat_txt += ["1"] + mat_txt = "\n".join(mat_txt) con_txt = [ - '/ContrastName1 group mean', - '/NumWaves 1', - '/NumContrasts 1', - '/PPheights 1', - '/RequiredEffect 100', # XX where does this + "/ContrastName1 group mean", + "/NumWaves 1", + "/NumContrasts 1", + "/PPheights 1", + "/RequiredEffect 100", # XX where does this # number come from - '', - '/Matrix', - '1' + "", + "/Matrix", + "1", ] - con_txt = '\n'.join(con_txt) + con_txt = "\n".join(con_txt) grp_txt = [ - '/NumWaves 1', '/NumPoints {:d}'.format(self.inputs.num_copes), - '', '/Matrix' + "/NumWaves 1", + "/NumPoints {:d}".format(self.inputs.num_copes), + "", + "/Matrix", ] for i in range(self.inputs.num_copes): - grp_txt += ['1'] - grp_txt = '\n'.join(grp_txt) + grp_txt += ["1"] + grp_txt = "\n".join(grp_txt) - txt = { - 'design.mat': mat_txt, - 'design.con': con_txt, - 'design.grp': grp_txt - } + txt = {"design.mat": mat_txt, "design.con": con_txt, "design.grp": grp_txt} # write design files - for i, name in enumerate(['design.mat', 'design.con', 'design.grp']): - f = open(os.path.join(cwd, name), 'wt') + for i, name in enumerate(["design.mat", "design.con", "design.grp"]): + f = open(os.path.join(cwd, name), "wt") f.write(txt[name]) f.close() @@ -1370,43 +1436,55 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() for field in list(outputs.keys()): - outputs[field] = os.path.join(os.getcwd(), field.replace('_', '.')) + outputs[field] = os.path.join(os.getcwd(), field.replace("_", ".")) return outputs class MultipleRegressDesignInputSpec(BaseInterfaceInputSpec): contrasts = traits.List( traits.Either( - traits.Tuple(traits.Str, traits.Enum('T'), traits.List(traits.Str), - traits.List(traits.Float)), - traits.Tuple(traits.Str, traits.Enum('F'), - traits.List( - traits.Tuple(traits.Str, traits.Enum('T'), - traits.List(traits.Str), - traits.List(traits.Float)), ))), + traits.Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + ), + traits.Tuple( + traits.Str, + traits.Enum("F"), + traits.List( + traits.Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + ), + ), + ), + ), mandatory=True, desc="List of contrasts with each contrast being a list of the form - \ [('name', 'stat', [condition list], [weight list])]. if \ session list is None or not provided, all sessions are used. For F \ contrasts, the condition list should contain previously defined \ -T-contrasts without any weight list.") +T-contrasts without any weight list.", + ) regressors = traits.Dict( traits.Str, traits.List(traits.Float), mandatory=True, - desc=('dictionary containing named lists of ' - 'regressors')) + desc=("dictionary containing named lists of " "regressors"), + ) groups = traits.List( - traits.Int, - desc=('list of group identifiers (defaults to single ' - 'group)')) + traits.Int, desc=("list of group identifiers (defaults to single " "group)") + ) class MultipleRegressDesignOutputSpec(TraitedSpec): - design_mat = File(exists=True, desc='design matrix file') - design_con = File(exists=True, desc='design t-contrast file') - design_fts = File(exists=True, desc='design f-contrast file') - design_grp = File(exists=True, desc='design group file') + design_mat = File(exists=True, desc="design matrix file") + design_con = File(exists=True, desc="design t-contrast file") + design_fts = File(exists=True, desc="design f-contrast file") + design_grp = File(exists=True, desc="design group file") class MultipleRegressDesign(BaseInterface): @@ -1438,13 +1516,10 @@ def _run_interface(self, runtime): regs = sorted(self.inputs.regressors.keys()) nwaves = len(regs) npoints = len(self.inputs.regressors[regs[0]]) - ntcons = sum([1 for con in self.inputs.contrasts if con[1] == 'T']) - nfcons = sum([1 for con in self.inputs.contrasts if con[1] == 'F']) + ntcons = sum([1 for con in self.inputs.contrasts if con[1] == "T"]) + nfcons = sum([1 for con in self.inputs.contrasts if con[1] == "F"]) # write mat file - mat_txt = [ - '/NumWaves %d' % nwaves, - '/NumPoints %d' % npoints - ] + mat_txt = ["/NumWaves %d" % nwaves, "/NumPoints %d" % npoints] ppheights = [] for reg in regs: maxreg = np.max(self.inputs.regressors[reg]) @@ -1453,77 +1528,76 @@ def _run_interface(self, runtime): regheight = max([abs(minreg), abs(maxreg)]) else: regheight = abs(maxreg - minreg) - ppheights.append('%e' % regheight) - mat_txt += ['/PPheights ' + ' '.join(ppheights)] - mat_txt += ['', '/Matrix'] + ppheights.append("%e" % regheight) + mat_txt += ["/PPheights " + " ".join(ppheights)] + mat_txt += ["", "/Matrix"] for cidx in range(npoints): - mat_txt.append(' '.join( - ['%e' % self.inputs.regressors[key][cidx] for key in regs])) - mat_txt = '\n'.join(mat_txt) + '\n' + mat_txt.append( + " ".join(["%e" % self.inputs.regressors[key][cidx] for key in regs]) + ) + mat_txt = "\n".join(mat_txt) + "\n" # write t-con file con_txt = [] counter = 0 tconmap = {} for conidx, con in enumerate(self.inputs.contrasts): - if con[1] == 'T': + if con[1] == "T": tconmap[conidx] = counter counter += 1 - con_txt += ['/ContrastName%d %s' % (counter, con[0])] + con_txt += ["/ContrastName%d %s" % (counter, con[0])] con_txt += [ - '/NumWaves %d' % nwaves, - '/NumContrasts %d' % ntcons, - '/PPheights %s' % ' '.join( - ['%e' % 1 for i in range(counter)]), - '/RequiredEffect %s' % ' '.join( - ['%.3f' % 100 for i in range(counter)]), '', '/Matrix' + "/NumWaves %d" % nwaves, + "/NumContrasts %d" % ntcons, + "/PPheights %s" % " ".join(["%e" % 1 for i in range(counter)]), + "/RequiredEffect %s" % " ".join(["%.3f" % 100 for i in range(counter)]), + "", + "/Matrix", ] for idx in sorted(tconmap.keys()): convals = np.zeros((nwaves, 1)) for regidx, reg in enumerate(self.inputs.contrasts[idx][2]): - convals[regs.index(reg)] = self.inputs.contrasts[idx][3][ - regidx] - con_txt.append(' '.join(['%e' % val for val in convals])) - con_txt = '\n'.join(con_txt) + '\n' + convals[regs.index(reg)] = self.inputs.contrasts[idx][3][regidx] + con_txt.append(" ".join(["%e" % val for val in convals])) + con_txt = "\n".join(con_txt) + "\n" # write f-con file - fcon_txt = '' + fcon_txt = "" if nfcons: fcon_txt = [ - '/NumWaves %d' % ntcons, - '/NumContrasts %d' % nfcons, '', '/Matrix' + "/NumWaves %d" % ntcons, + "/NumContrasts %d" % nfcons, + "", + "/Matrix", ] for conidx, con in enumerate(self.inputs.contrasts): - if con[1] == 'F': + if con[1] == "F": convals = np.zeros((ntcons, 1)) for tcon in con[2]: convals[tconmap[self.inputs.contrasts.index(tcon)]] = 1 - fcon_txt.append(' '.join(['%d' % val for val in convals])) - fcon_txt = '\n'.join(fcon_txt) - fcon_txt += '\n' + fcon_txt.append(" ".join(["%d" % val for val in convals])) + fcon_txt = "\n".join(fcon_txt) + fcon_txt += "\n" # write group file - grp_txt = [ - '/NumWaves 1', - '/NumPoints %d' % npoints, '', '/Matrix' - ] + grp_txt = ["/NumWaves 1", "/NumPoints %d" % npoints, "", "/Matrix"] for i in range(npoints): if isdefined(self.inputs.groups): - grp_txt += ['%d' % self.inputs.groups[i]] + grp_txt += ["%d" % self.inputs.groups[i]] else: - grp_txt += ['1'] - grp_txt = '\n'.join(grp_txt) + '\n' + grp_txt += ["1"] + grp_txt = "\n".join(grp_txt) + "\n" txt = { - 'design.mat': mat_txt, - 'design.con': con_txt, - 'design.fts': fcon_txt, - 'design.grp': grp_txt + "design.mat": mat_txt, + "design.con": con_txt, + "design.fts": fcon_txt, + "design.grp": grp_txt, } # write design files for key, val in list(txt.items()): - if ('fts' in key) and (nfcons == 0): + if ("fts" in key) and (nfcons == 0): continue - filename = key.replace('_', '.') - f = open(os.path.join(cwd, filename), 'wt') + filename = key.replace("_", ".") + f = open(os.path.join(cwd, filename), "wt") f.write(val) f.close() @@ -1531,11 +1605,11 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - nfcons = sum([1 for con in self.inputs.contrasts if con[1] == 'F']) + nfcons = sum([1 for con in self.inputs.contrasts if con[1] == "F"]) for field in list(outputs.keys()): - if ('fts' in field) and (nfcons == 0): + if ("fts" in field) and (nfcons == 0): continue - outputs[field] = os.path.join(os.getcwd(), field.replace('_', '.')) + outputs[field] = os.path.join(os.getcwd(), field.replace("_", ".")) return outputs @@ -1546,18 +1620,19 @@ class SMMInputSpec(FSLCommandInputSpec): argstr='--sdf="%s"', mandatory=True, desc="statistics spatial map", - copyfile=False) + copyfile=False, + ) mask = File( exists=True, position=1, argstr='--mask="%s"', mandatory=True, desc="mask file", - copyfile=False) + copyfile=False, + ) no_deactivation_class = traits.Bool( - position=2, - argstr="--zfstatmode", - desc="enforces no deactivation class") + position=2, argstr="--zfstatmode", desc="enforces no deactivation class" + ) class SMMOutputSpec(TraitedSpec): @@ -1567,27 +1642,29 @@ class SMMOutputSpec(TraitedSpec): class SMM(FSLCommand): - ''' + """ Spatial Mixture Modelling. For more detail on the spatial mixture modelling see Mixture Models with Adaptive Spatial Regularisation for Segmentation with an Application to FMRI Data; Woolrich, M., Behrens, T., Beckmann, C., and Smith, S.; IEEE Trans. Medical Imaging, 24(1):1-11, 2005. - ''' - _cmd = 'mm --ld=logdir' + """ + + _cmd = "mm --ld=logdir" input_spec = SMMInputSpec output_spec = SMMOutputSpec def _list_outputs(self): outputs = self._outputs().get() # TODO get the true logdir from the stdout - outputs['null_p_map'] = self._gen_fname( - basename="w1_mean", cwd="logdir") - outputs['activation_p_map'] = self._gen_fname( - basename="w2_mean", cwd="logdir") - if (not isdefined(self.inputs.no_deactivation_class) - or not self.inputs.no_deactivation_class): - outputs['deactivation_p_map'] = self._gen_fname( - basename="w3_mean", cwd="logdir") + outputs["null_p_map"] = self._gen_fname(basename="w1_mean", cwd="logdir") + outputs["activation_p_map"] = self._gen_fname(basename="w2_mean", cwd="logdir") + if ( + not isdefined(self.inputs.no_deactivation_class) + or not self.inputs.no_deactivation_class + ): + outputs["deactivation_p_map"] = self._gen_fname( + basename="w3_mean", cwd="logdir" + ) return outputs @@ -1598,128 +1675,144 @@ class MELODICInputSpec(FSLCommandInputSpec): mandatory=True, position=0, desc="input file names (either single file name or a list)", - sep=",") - out_dir = Directory( - argstr="-o %s", desc="output directory name", genfile=True) - mask = File( - exists=True, argstr="-m %s", desc="file name of mask for thresholding") + sep=",", + ) + out_dir = Directory(argstr="-o %s", desc="output directory name", genfile=True) + mask = File(exists=True, argstr="-m %s", desc="file name of mask for thresholding") no_mask = traits.Bool(argstr="--nomask", desc="switch off masking") - update_mask = traits.Bool( - argstr="--update_mask", desc="switch off mask updating") + update_mask = traits.Bool(argstr="--update_mask", desc="switch off mask updating") no_bet = traits.Bool(argstr="--nobet", desc="switch off BET") bg_threshold = traits.Float( argstr="--bgthreshold=%f", - desc=("brain/non-brain threshold used to mask non-brain voxels, as a " - "percentage (only if --nobet selected)")) + desc=( + "brain/non-brain threshold used to mask non-brain voxels, as a " + "percentage (only if --nobet selected)" + ), + ) dim = traits.Int( argstr="-d %d", - desc=("dimensionality reduction into #num dimensions (default: " - "automatic estimation)")) + desc=( + "dimensionality reduction into #num dimensions (default: " + "automatic estimation)" + ), + ) dim_est = traits.Str( argstr="--dimest=%s", - desc=("use specific dim. estimation technique: lap, " - "bic, mdl, aic, mean (default: lap)")) - sep_whiten = traits.Bool( - argstr="--sep_whiten", desc="switch on separate whitening") + desc=( + "use specific dim. estimation technique: lap, " + "bic, mdl, aic, mean (default: lap)" + ), + ) + sep_whiten = traits.Bool(argstr="--sep_whiten", desc="switch on separate whitening") sep_vn = traits.Bool( - argstr="--sep_vn", desc="switch off joined variance normalization") + argstr="--sep_vn", desc="switch off joined variance normalization" + ) migp = traits.Bool(argstr="--migp", desc="switch on MIGP data reduction") - migpN = traits.Int( - argstr="--migpN %d", desc="number of internal Eigenmaps") + migpN = traits.Int(argstr="--migpN %d", desc="number of internal Eigenmaps") migp_shuffle = traits.Bool( - argstr="--migp_shuffle", - desc="randomise MIGP file order (default: TRUE)") + argstr="--migp_shuffle", desc="randomise MIGP file order (default: TRUE)" + ) migp_factor = traits.Int( argstr="--migp_factor %d", - desc= - "Internal Factor of mem-threshold relative to number of Eigenmaps (default: 2)" + desc="Internal Factor of mem-threshold relative to number of Eigenmaps (default: 2)", ) num_ICs = traits.Int( - argstr="-n %d", - desc="number of IC's to extract (for deflation approach)") + argstr="-n %d", desc="number of IC's to extract (for deflation approach)" + ) approach = traits.Str( argstr="-a %s", desc="approach for decomposition, 2D: defl, symm (default), 3D: tica " - "(default), concat") + "(default), concat", + ) non_linearity = traits.Str( - argstr="--nl=%s", desc="nonlinearity: gauss, tanh, pow3, pow4") - var_norm = traits.Bool( - argstr="--vn", desc="switch off variance normalization") + argstr="--nl=%s", desc="nonlinearity: gauss, tanh, pow3, pow4" + ) + var_norm = traits.Bool(argstr="--vn", desc="switch off variance normalization") pbsc = traits.Bool( - argstr="--pbsc", - desc="switch off conversion to percent BOLD signal change") + argstr="--pbsc", desc="switch off conversion to percent BOLD signal change" + ) cov_weight = traits.Float( argstr="--covarweight=%f", - desc=("voxel-wise weights for the covariance matrix (e.g. " - "segmentation information)")) + desc=( + "voxel-wise weights for the covariance matrix (e.g. " + "segmentation information)" + ), + ) epsilon = traits.Float(argstr="--eps=%f", desc="minimum error change") epsilonS = traits.Float( - argstr="--epsS=%f", - desc="minimum error change for rank-1 approximation in TICA") + argstr="--epsS=%f", desc="minimum error change for rank-1 approximation in TICA" + ) maxit = traits.Int( - argstr="--maxit=%d", - desc="maximum number of iterations before restart") + argstr="--maxit=%d", desc="maximum number of iterations before restart" + ) max_restart = traits.Int( - argstr="--maxrestart=%d", desc="maximum number of restarts") + argstr="--maxrestart=%d", desc="maximum number of restarts" + ) mm_thresh = traits.Float( - argstr="--mmthresh=%f", - desc="threshold for Mixture Model based inference") + argstr="--mmthresh=%f", desc="threshold for Mixture Model based inference" + ) no_mm = traits.Bool( - argstr="--no_mm", desc="switch off mixture modelling on IC maps") + argstr="--no_mm", desc="switch off mixture modelling on IC maps" + ) ICs = File( exists=True, argstr="--ICs=%s", - desc="filename of the IC components file for mixture modelling") + desc="filename of the IC components file for mixture modelling", + ) mix = File( exists=True, argstr="--mix=%s", - desc="mixing matrix for mixture modelling / filtering") + desc="mixing matrix for mixture modelling / filtering", + ) smode = File( exists=True, argstr="--smode=%s", - desc="matrix of session modes for report generation") + desc="matrix of session modes for report generation", + ) rem_cmp = traits.List( - traits.Int, argstr="-f %d", desc="component numbers to remove") + traits.Int, argstr="-f %d", desc="component numbers to remove" + ) report = traits.Bool(argstr="--report", desc="generate Melodic web report") bg_image = File( exists=True, argstr="--bgimage=%s", - desc="specify background image for report (default: mean image)") + desc="specify background image for report (default: mean image)", + ) tr_sec = traits.Float(argstr="--tr=%f", desc="TR in seconds") log_power = traits.Bool( - argstr="--logPower", - desc="calculate log of power for frequency spectrum") + argstr="--logPower", desc="calculate log of power for frequency spectrum" + ) t_des = File( - exists=True, - argstr="--Tdes=%s", - desc="design matrix across time-domain") + exists=True, argstr="--Tdes=%s", desc="design matrix across time-domain" + ) t_con = File( - exists=True, - argstr="--Tcon=%s", - desc="t-contrast matrix across time-domain") + exists=True, argstr="--Tcon=%s", desc="t-contrast matrix across time-domain" + ) s_des = File( - exists=True, - argstr="--Sdes=%s", - desc="design matrix across subject-domain") + exists=True, argstr="--Sdes=%s", desc="design matrix across subject-domain" + ) s_con = File( - exists=True, - argstr="--Scon=%s", - desc="t-contrast matrix across subject-domain") + exists=True, argstr="--Scon=%s", desc="t-contrast matrix across subject-domain" + ) out_all = traits.Bool(argstr="--Oall", desc="output everything") out_unmix = traits.Bool(argstr="--Ounmix", desc="output unmixing matrix") out_stats = traits.Bool( - argstr="--Ostats", desc="output thresholded maps and probability maps") + argstr="--Ostats", desc="output thresholded maps and probability maps" + ) out_pca = traits.Bool(argstr="--Opca", desc="output PCA results") out_white = traits.Bool( - argstr="--Owhite", desc="output whitening/dewhitening matrices") + argstr="--Owhite", desc="output whitening/dewhitening matrices" + ) out_orig = traits.Bool(argstr="--Oorig", desc="output the original ICs") out_mean = traits.Bool(argstr="--Omean", desc="output mean volume") report_maps = traits.Str( argstr="--report_maps=%s", - desc="control string for spatial map images (see slicer)") + desc="control string for spatial map images (see slicer)", + ) remove_deriv = traits.Bool( argstr="--remove_deriv", - desc="removes every second entry in paradigm file (EV derivatives)") + desc="removes every second entry in paradigm file (EV derivatives)", + ) class MELODICOutputSpec(TraitedSpec): @@ -1753,18 +1846,19 @@ class MELODIC(FSLCommand): """ + input_spec = MELODICInputSpec output_spec = MELODICOutputSpec - _cmd = 'melodic' + _cmd = "melodic" def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_dir): - outputs['out_dir'] = os.path.abspath(self.inputs.out_dir) + outputs["out_dir"] = os.path.abspath(self.inputs.out_dir) else: - outputs['out_dir'] = self._gen_filename("out_dir") + outputs["out_dir"] = self._gen_filename("out_dir") if isdefined(self.inputs.report) and self.inputs.report: - outputs['report_dir'] = os.path.join(outputs['out_dir'], "report") + outputs["report_dir"] = os.path.join(outputs["out_dir"], "report") return outputs def _gen_filename(self, name): @@ -1774,28 +1868,26 @@ def _gen_filename(self, name): class SmoothEstimateInputSpec(FSLCommandInputSpec): dof = traits.Int( - argstr='--dof=%d', + argstr="--dof=%d", mandatory=True, - xor=['zstat_file'], - desc='number of degrees of freedom') + xor=["zstat_file"], + desc="number of degrees of freedom", + ) mask_file = File( - argstr='--mask=%s', - exists=True, - mandatory=True, - desc='brain mask volume') + argstr="--mask=%s", exists=True, mandatory=True, desc="brain mask volume" + ) residual_fit_file = File( - argstr='--res=%s', - exists=True, - requires=['dof'], - desc='residual-fit image file') + argstr="--res=%s", exists=True, requires=["dof"], desc="residual-fit image file" + ) zstat_file = File( - argstr='--zstat=%s', exists=True, xor=['dof'], desc='zstat image file') + argstr="--zstat=%s", exists=True, xor=["dof"], desc="zstat image file" + ) class SmoothEstimateOutputSpec(TraitedSpec): - dlh = traits.Float(desc='smoothness estimate sqrt(det(Lambda))') - volume = traits.Int(desc='number of voxels in mask') - resels = traits.Float(desc='number of resels') + dlh = traits.Float(desc="smoothness estimate sqrt(det(Lambda))") + volume = traits.Int(desc="number of voxels in mask") + resels = traits.Float(desc="number of resels") class SmoothEstimate(FSLCommand): @@ -1814,11 +1906,11 @@ class SmoothEstimate(FSLCommand): input_spec = SmoothEstimateInputSpec output_spec = SmoothEstimateOutputSpec - _cmd = 'smoothest' + _cmd = "smoothest" def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() - stdout = runtime.stdout.split('\n') + stdout = runtime.stdout.split("\n") outputs.dlh = float(stdout[0].split()[1]) outputs.volume = int(stdout[1].split()[1]) outputs.resels = float(stdout[2].split()[1]) @@ -1826,121 +1918,130 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): class ClusterInputSpec(FSLCommandInputSpec): - in_file = File( - argstr='--in=%s', mandatory=True, exists=True, desc='input volume') + in_file = File(argstr="--in=%s", mandatory=True, exists=True, desc="input volume") threshold = traits.Float( - argstr='--thresh=%.10f', - mandatory=True, - desc='threshold for input volume') + argstr="--thresh=%.10f", mandatory=True, desc="threshold for input volume" + ) out_index_file = traits.Either( traits.Bool, File, - argstr='--oindex=%s', - desc='output of cluster index (in size order)', - hash_files=False) + argstr="--oindex=%s", + desc="output of cluster index (in size order)", + hash_files=False, + ) out_threshold_file = traits.Either( traits.Bool, File, - argstr='--othresh=%s', - desc='thresholded image', - hash_files=False) + argstr="--othresh=%s", + desc="thresholded image", + hash_files=False, + ) out_localmax_txt_file = traits.Either( traits.Bool, File, - argstr='--olmax=%s', - desc='local maxima text file', - hash_files=False) + argstr="--olmax=%s", + desc="local maxima text file", + hash_files=False, + ) out_localmax_vol_file = traits.Either( traits.Bool, File, - argstr='--olmaxim=%s', - desc='output of local maxima volume', - hash_files=False) + argstr="--olmaxim=%s", + desc="output of local maxima volume", + hash_files=False, + ) out_size_file = traits.Either( traits.Bool, File, - argstr='--osize=%s', - desc='filename for output of size image', - hash_files=False) + argstr="--osize=%s", + desc="filename for output of size image", + hash_files=False, + ) out_max_file = traits.Either( traits.Bool, File, - argstr='--omax=%s', - desc='filename for output of max image', - hash_files=False) + argstr="--omax=%s", + desc="filename for output of max image", + hash_files=False, + ) out_mean_file = traits.Either( traits.Bool, File, - argstr='--omean=%s', - desc='filename for output of mean image', - hash_files=False) + argstr="--omean=%s", + desc="filename for output of mean image", + hash_files=False, + ) out_pval_file = traits.Either( traits.Bool, File, - argstr='--opvals=%s', - desc='filename for image output of log pvals', - hash_files=False) + argstr="--opvals=%s", + desc="filename for image output of log pvals", + hash_files=False, + ) pthreshold = traits.Float( - argstr='--pthresh=%.10f', - requires=['dlh', 'volume'], - desc='p-threshold for clusters') + argstr="--pthresh=%.10f", + requires=["dlh", "volume"], + desc="p-threshold for clusters", + ) peak_distance = traits.Float( - argstr='--peakdist=%.10f', - desc='minimum distance between local maxima/minima, in mm (default 0)') - cope_file = File(argstr='--cope=%s', desc='cope volume') - volume = traits.Int( - argstr='--volume=%d', desc='number of voxels in the mask') + argstr="--peakdist=%.10f", + desc="minimum distance between local maxima/minima, in mm (default 0)", + ) + cope_file = File(argstr="--cope=%s", desc="cope volume") + volume = traits.Int(argstr="--volume=%d", desc="number of voxels in the mask") dlh = traits.Float( - argstr='--dlh=%.10f', desc='smoothness estimate = sqrt(det(Lambda))') + argstr="--dlh=%.10f", desc="smoothness estimate = sqrt(det(Lambda))" + ) fractional = traits.Bool( False, usedefault=True, - argstr='--fractional', - desc='interprets the threshold as a fraction of the robust range') + argstr="--fractional", + desc="interprets the threshold as a fraction of the robust range", + ) connectivity = traits.Int( - argstr='--connectivity=%d', - desc='the connectivity of voxels (default 26)') + argstr="--connectivity=%d", desc="the connectivity of voxels (default 26)" + ) use_mm = traits.Bool( - False, - usedefault=True, - argstr='--mm', - desc='use mm, not voxel, coordinates') + False, usedefault=True, argstr="--mm", desc="use mm, not voxel, coordinates" + ) find_min = traits.Bool( - False, - usedefault=True, - argstr='--min', - desc='find minima instead of maxima') + False, usedefault=True, argstr="--min", desc="find minima instead of maxima" + ) no_table = traits.Bool( False, usedefault=True, - argstr='--no_table', - desc='suppresses printing of the table info') + argstr="--no_table", + desc="suppresses printing of the table info", + ) minclustersize = traits.Bool( False, usedefault=True, - argstr='--minclustersize', - desc='prints out minimum significant cluster size') + argstr="--minclustersize", + desc="prints out minimum significant cluster size", + ) xfm_file = File( - argstr='--xfm=%s', - desc=('filename for Linear: input->standard-space ' - 'transform. Non-linear: input->highres transform')) + argstr="--xfm=%s", + desc=( + "filename for Linear: input->standard-space " + "transform. Non-linear: input->highres transform" + ), + ) std_space_file = File( - argstr='--stdvol=%s', desc='filename for standard-space volume') - num_maxima = traits.Int( - argstr='--num=%d', desc='no of local maxima to report') - warpfield_file = File( - argstr='--warpvol=%s', desc='file contining warpfield') + argstr="--stdvol=%s", desc="filename for standard-space volume" + ) + num_maxima = traits.Int(argstr="--num=%d", desc="no of local maxima to report") + warpfield_file = File(argstr="--warpvol=%s", desc="file contining warpfield") class ClusterOutputSpec(TraitedSpec): - index_file = File(desc='output of cluster index (in size order)') - threshold_file = File(desc='thresholded image') - localmax_txt_file = File(desc='local maxima text file') - localmax_vol_file = File(desc='output of local maxima volume') - size_file = File(desc='filename for output of size image') - max_file = File(desc='filename for output of max image') - mean_file = File(desc='filename for output of mean image') - pval_file = File(desc='filename for image output of log pvals') + index_file = File(desc="output of cluster index (in size order)") + threshold_file = File(desc="thresholded image") + localmax_txt_file = File(desc="local maxima text file") + localmax_vol_file = File(desc="output of local maxima volume") + size_file = File(desc="filename for output of size image") + max_file = File(desc="filename for output of max image") + mean_file = File(desc="filename for output of mean image") + pval_file = File(desc="filename for image output of log pvals") class Cluster(FSLCommand): @@ -1958,19 +2059,20 @@ class Cluster(FSLCommand): 'cluster --in=zstat1.nii.gz --olmax=stats.txt --thresh=2.3000000000 --mm' """ + input_spec = ClusterInputSpec output_spec = ClusterOutputSpec - _cmd = 'cluster' + _cmd = "cluster" filemap = { - 'out_index_file': 'index', - 'out_threshold_file': 'threshold', - 'out_localmax_txt_file': 'localmax.txt', - 'out_localmax_vol_file': 'localmax', - 'out_size_file': 'size', - 'out_max_file': 'max', - 'out_mean_file': 'mean', - 'out_pval_file': 'pval' + "out_index_file": "index", + "out_threshold_file": "threshold", + "out_localmax_txt_file": "localmax.txt", + "out_localmax_vol_file": "localmax", + "out_size_file": "size", + "out_max_file": "max", + "out_mean_file": "mean", + "out_pval_file": "pval", } def _list_outputs(self): @@ -1982,12 +2084,13 @@ def _list_outputs(self): if isinstance(inval, bool): if inval: change_ext = True - if suffix.endswith('.txt'): + if suffix.endswith(".txt"): change_ext = False outputs[outkey] = self._gen_fname( self.inputs.in_file, - suffix='_' + suffix, - change_ext=change_ext) + suffix="_" + suffix, + change_ext=change_ext, + ) else: outputs[outkey] = os.path.abspath(inval) return outputs @@ -2017,44 +2120,48 @@ class DualRegressionInputSpec(FSLCommandInputSpec): mandatory=True, position=1, desc="4D image containing spatial IC maps (melodic_IC) from the " - "whole-group ICA analysis") + "whole-group ICA analysis", + ) des_norm = traits.Bool( True, argstr="%i", position=2, usedefault=True, desc="Whether to variance-normalise the timecourses used as the " - "stage-2 regressors; True is default and recommended") + "stage-2 regressors; True is default and recommended", + ) one_sample_group_mean = traits.Bool( argstr="-1", position=3, - desc="perform 1-sample group-mean test instead of generic " - "permutation test") + desc="perform 1-sample group-mean test instead of generic " "permutation test", + ) design_file = File( exists=True, argstr="%s", position=3, - desc="Design matrix for final cross-subject modelling with " - "randomise") + desc="Design matrix for final cross-subject modelling with " "randomise", + ) con_file = File( exists=True, argstr="%s", position=4, - desc="Design contrasts for final cross-subject modelling with " - "randomise") + desc="Design contrasts for final cross-subject modelling with " "randomise", + ) n_perm = traits.Int( argstr="%i", mandatory=True, position=5, desc="Number of permutations for randomise; set to 1 for just raw " - "tstat output, set to 0 to not run randomise at all.") + "tstat output, set to 0 to not run randomise at all.", + ) out_dir = Directory( "output", argstr="%s", usedefault=True, position=6, desc="This directory will be created to hold all output and logfiles", - genfile=True) + genfile=True, + ) class DualRegressionOutputSpec(TraitedSpec): @@ -2079,16 +2186,17 @@ class DualRegression(FSLCommand): >>> dual_regression.run() # doctest: +SKIP """ + input_spec = DualRegressionInputSpec output_spec = DualRegressionOutputSpec - _cmd = 'dual_regression' + _cmd = "dual_regression" def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_dir): - outputs['out_dir'] = os.path.abspath(self.inputs.out_dir) + outputs["out_dir"] = os.path.abspath(self.inputs.out_dir) else: - outputs['out_dir'] = self._gen_filename("out_dir") + outputs["out_dir"] = self._gen_filename("out_dir") return outputs def _gen_filename(self, name): @@ -2098,92 +2206,103 @@ def _gen_filename(self, name): class RandomiseInputSpec(FSLCommandInputSpec): in_file = File( - exists=True, - desc='4D input file', - argstr='-i %s', - position=0, - mandatory=True) + exists=True, desc="4D input file", argstr="-i %s", position=0, mandatory=True + ) base_name = traits.Str( - 'randomise', - desc='the rootname that all generated files will have', + "randomise", + desc="the rootname that all generated files will have", argstr='-o "%s"', position=1, - usedefault=True) + usedefault=True, + ) design_mat = File( - exists=True, desc='design matrix file', argstr='-d %s', position=2) - tcon = File( - exists=True, desc='t contrasts file', argstr='-t %s', position=3) - fcon = File(exists=True, desc='f contrasts file', argstr='-f %s') - mask = File(exists=True, desc='mask image', argstr='-m %s') + exists=True, desc="design matrix file", argstr="-d %s", position=2 + ) + tcon = File(exists=True, desc="t contrasts file", argstr="-t %s", position=3) + fcon = File(exists=True, desc="f contrasts file", argstr="-f %s") + mask = File(exists=True, desc="mask image", argstr="-m %s") x_block_labels = File( - exists=True, desc='exchangeability block labels file', argstr='-e %s') + exists=True, desc="exchangeability block labels file", argstr="-e %s" + ) demean = traits.Bool( - desc='demean data temporally before model fitting', argstr='-D') + desc="demean data temporally before model fitting", argstr="-D" + ) one_sample_group_mean = traits.Bool( - desc=('perform 1-sample group-mean test instead of generic ' - 'permutation test'), - argstr='-1') + desc=( + "perform 1-sample group-mean test instead of generic " "permutation test" + ), + argstr="-1", + ) show_total_perms = traits.Bool( - desc=('print out how many unique permutations would be generated ' - 'and exit'), - argstr='-q') + desc=("print out how many unique permutations would be generated " "and exit"), + argstr="-q", + ) show_info_parallel_mode = traits.Bool( - desc='print out information required for parallel mode and exit', - argstr='-Q') + desc="print out information required for parallel mode and exit", argstr="-Q" + ) vox_p_values = traits.Bool( - desc='output voxelwise (corrected and uncorrected) p-value images', - argstr='-x') - tfce = traits.Bool( - desc='carry out Threshold-Free Cluster Enhancement', argstr='-T') + desc="output voxelwise (corrected and uncorrected) p-value images", argstr="-x" + ) + tfce = traits.Bool(desc="carry out Threshold-Free Cluster Enhancement", argstr="-T") tfce2D = traits.Bool( - desc=('carry out Threshold-Free Cluster Enhancement with 2D ' - 'optimisation'), - argstr='--T2') - f_only = traits.Bool(desc='calculate f-statistics only', argstr='--f_only') + desc=("carry out Threshold-Free Cluster Enhancement with 2D " "optimisation"), + argstr="--T2", + ) + f_only = traits.Bool(desc="calculate f-statistics only", argstr="--f_only") raw_stats_imgs = traits.Bool( - desc='output raw ( unpermuted ) statistic images', argstr='-R') + desc="output raw ( unpermuted ) statistic images", argstr="-R" + ) p_vec_n_dist_files = traits.Bool( - desc='output permutation vector and null distribution text files', - argstr='-P') + desc="output permutation vector and null distribution text files", argstr="-P" + ) num_perm = traits.Int( - argstr='-n %d', - desc='number of permutations (default 5000, set to 0 for exhaustive)') + argstr="-n %d", + desc="number of permutations (default 5000, set to 0 for exhaustive)", + ) seed = traits.Int( - argstr='--seed=%d', - desc='specific integer seed for random number generator') + argstr="--seed=%d", desc="specific integer seed for random number generator" + ) var_smooth = traits.Int( - argstr='-v %d', desc='use variance smoothing (std is in mm)') + argstr="-v %d", desc="use variance smoothing (std is in mm)" + ) c_thresh = traits.Float( - argstr='-c %.1f', desc='carry out cluster-based thresholding') + argstr="-c %.1f", desc="carry out cluster-based thresholding" + ) cm_thresh = traits.Float( - argstr='-C %.1f', desc='carry out cluster-mass-based thresholding') - f_c_thresh = traits.Float( - argstr='-F %.2f', desc='carry out f cluster thresholding') + argstr="-C %.1f", desc="carry out cluster-mass-based thresholding" + ) + f_c_thresh = traits.Float(argstr="-F %.2f", desc="carry out f cluster thresholding") f_cm_thresh = traits.Float( - argstr='-S %.2f', desc='carry out f cluster-mass thresholding') + argstr="-S %.2f", desc="carry out f cluster-mass thresholding" + ) tfce_H = traits.Float( - argstr='--tfce_H=%.2f', desc='TFCE height parameter (default=2)') + argstr="--tfce_H=%.2f", desc="TFCE height parameter (default=2)" + ) tfce_E = traits.Float( - argstr='--tfce_E=%.2f', desc='TFCE extent parameter (default=0.5)') + argstr="--tfce_E=%.2f", desc="TFCE extent parameter (default=0.5)" + ) tfce_C = traits.Float( - argstr='--tfce_C=%.2f', desc='TFCE connectivity (6 or 26; default=6)') + argstr="--tfce_C=%.2f", desc="TFCE connectivity (6 or 26; default=6)" + ) class RandomiseOutputSpec(TraitedSpec): - tstat_files = traits.List( - File(exists=True), desc='t contrast raw statistic') - fstat_files = traits.List( - File(exists=True), desc='f contrast raw statistic') + tstat_files = traits.List(File(exists=True), desc="t contrast raw statistic") + fstat_files = traits.List(File(exists=True), desc="f contrast raw statistic") t_p_files = traits.List( - File(exists=True), desc='f contrast uncorrected p values files') + File(exists=True), desc="f contrast uncorrected p values files" + ) f_p_files = traits.List( - File(exists=True), desc='f contrast uncorrected p values files') + File(exists=True), desc="f contrast uncorrected p values files" + ) t_corrected_p_files = traits.List( File(exists=True), - desc='t contrast FWE (Family-wise error) corrected p values files') + desc="t contrast FWE (Family-wise error) corrected p values files", + ) f_corrected_p_files = traits.List( File(exists=True), - desc='f contrast FWE (Family-wise error) corrected p values files') + desc="f contrast FWE (Family-wise error) corrected p values files", + ) class Randomise(FSLCommand): @@ -2200,153 +2319,180 @@ class Randomise(FSLCommand): """ - _cmd = 'randomise' + _cmd = "randomise" input_spec = RandomiseInputSpec output_spec = RandomiseOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['tstat_files'] = glob( - self._gen_fname('%s_tstat*.nii' % self.inputs.base_name)) - outputs['fstat_files'] = glob( - self._gen_fname('%s_fstat*.nii' % self.inputs.base_name)) + outputs["tstat_files"] = glob( + self._gen_fname("%s_tstat*.nii" % self.inputs.base_name) + ) + outputs["fstat_files"] = glob( + self._gen_fname("%s_fstat*.nii" % self.inputs.base_name) + ) prefix = False if self.inputs.tfce or self.inputs.tfce2D: - prefix = 'tfce' + prefix = "tfce" elif self.inputs.vox_p_values: - prefix = 'vox' + prefix = "vox" elif self.inputs.c_thresh or self.inputs.f_c_thresh: - prefix = 'clustere' + prefix = "clustere" elif self.inputs.cm_thresh or self.inputs.f_cm_thresh: - prefix = 'clusterm' + prefix = "clusterm" if prefix: - outputs['t_p_files'] = glob( - self._gen_fname('%s_%s_p_tstat*' % (self.inputs.base_name, - prefix))) - outputs['t_corrected_p_files'] = glob( - self._gen_fname('%s_%s_corrp_tstat*.nii' % - (self.inputs.base_name, prefix))) - - outputs['f_p_files'] = glob( - self._gen_fname('%s_%s_p_fstat*.nii' % (self.inputs.base_name, - prefix))) - outputs['f_corrected_p_files'] = glob( - self._gen_fname('%s_%s_corrp_fstat*.nii' % - (self.inputs.base_name, prefix))) + outputs["t_p_files"] = glob( + self._gen_fname("%s_%s_p_tstat*" % (self.inputs.base_name, prefix)) + ) + outputs["t_corrected_p_files"] = glob( + self._gen_fname( + "%s_%s_corrp_tstat*.nii" % (self.inputs.base_name, prefix) + ) + ) + + outputs["f_p_files"] = glob( + self._gen_fname("%s_%s_p_fstat*.nii" % (self.inputs.base_name, prefix)) + ) + outputs["f_corrected_p_files"] = glob( + self._gen_fname( + "%s_%s_corrp_fstat*.nii" % (self.inputs.base_name, prefix) + ) + ) return outputs class GLMInputSpec(FSLCommandInputSpec): in_file = File( exists=True, - argstr='-i %s', + argstr="-i %s", mandatory=True, position=1, - desc='input file name (text matrix or 3D/4D image file)') + desc="input file name (text matrix or 3D/4D image file)", + ) out_file = File( name_template="%s_glm", - argstr='-o %s', + argstr="-o %s", position=3, - desc=('filename for GLM parameter estimates' + ' (GLM betas)'), + desc=("filename for GLM parameter estimates" + " (GLM betas)"), name_source="in_file", - keep_extension=True) + keep_extension=True, + ) design = File( exists=True, - argstr='-d %s', + argstr="-d %s", mandatory=True, position=2, - desc=('file name of the GLM design matrix (text time' + - ' courses for temporal regression or an image' + - ' file for spatial regression)')) + desc=( + "file name of the GLM design matrix (text time" + + " courses for temporal regression or an image" + + " file for spatial regression)" + ), + ) contrasts = File( - exists=True, argstr='-c %s', desc=('matrix of t-statics contrasts')) + exists=True, argstr="-c %s", desc=("matrix of t-statics contrasts") + ) mask = File( - exists=True, - argstr='-m %s', - desc=('mask image file name if input is image')) - dof = traits.Int( - argstr='--dof=%d', desc=('set degrees of freedom' + ' explicitly')) + exists=True, argstr="-m %s", desc=("mask image file name if input is image") + ) + dof = traits.Int(argstr="--dof=%d", desc=("set degrees of freedom" + " explicitly")) des_norm = traits.Bool( - argstr='--des_norm', - desc=('switch on normalization of the design' + - ' matrix columns to unit std deviation')) + argstr="--des_norm", + desc=( + "switch on normalization of the design" + + " matrix columns to unit std deviation" + ), + ) dat_norm = traits.Bool( - argstr='--dat_norm', - desc=('switch on normalization of the data time series to unit std ' - 'deviation')) + argstr="--dat_norm", + desc=( + "switch on normalization of the data time series to unit std " "deviation" + ), + ) var_norm = traits.Bool( - argstr='--vn', desc=('perform MELODIC variance-normalisation on data')) + argstr="--vn", desc=("perform MELODIC variance-normalisation on data") + ) demean = traits.Bool( - argstr='--demean', desc=('switch on demeaining of design and data')) + argstr="--demean", desc=("switch on demeaining of design and data") + ) out_cope = File( - argstr='--out_cope=%s', - desc='output file name for COPE (either as txt or image') + argstr="--out_cope=%s", desc="output file name for COPE (either as txt or image" + ) out_z_name = File( - argstr='--out_z=%s', - desc='output file name for Z-stats (either as txt or image') + argstr="--out_z=%s", desc="output file name for Z-stats (either as txt or image" + ) out_t_name = File( - argstr='--out_t=%s', - desc='output file name for t-stats (either as txt or image') + argstr="--out_t=%s", desc="output file name for t-stats (either as txt or image" + ) out_p_name = File( - argstr='--out_p=%s', - desc=('output file name for p-values of Z-stats (either as text file ' - 'or image)')) + argstr="--out_p=%s", + desc=( + "output file name for p-values of Z-stats (either as text file " "or image)" + ), + ) out_f_name = File( - argstr='--out_f=%s', - desc='output file name for F-value of full model fit') + argstr="--out_f=%s", desc="output file name for F-value of full model fit" + ) out_pf_name = File( - argstr='--out_pf=%s', - desc='output file name for p-value for full model fit') - out_res_name = File( - argstr='--out_res=%s', desc='output file name for residuals') + argstr="--out_pf=%s", desc="output file name for p-value for full model fit" + ) + out_res_name = File(argstr="--out_res=%s", desc="output file name for residuals") out_varcb_name = File( - argstr='--out_varcb=%s', desc='output file name for variance of COPEs') + argstr="--out_varcb=%s", desc="output file name for variance of COPEs" + ) out_sigsq_name = File( - argstr='--out_sigsq=%s', - desc=('output file name for residual noise variance sigma-square')) + argstr="--out_sigsq=%s", + desc=("output file name for residual noise variance sigma-square"), + ) out_data_name = File( - argstr='--out_data=%s', desc='output file name for pre-processed data') + argstr="--out_data=%s", desc="output file name for pre-processed data" + ) out_vnscales_name = File( - argstr='--out_vnscales=%s', - desc=('output file name for scaling factors for variance ' - 'normalisation')) + argstr="--out_vnscales=%s", + desc=("output file name for scaling factors for variance " "normalisation"), + ) class GLMOutputSpec(TraitedSpec): - out_file = File( - exists=True, desc=('file name of GLM parameters (if generated)')) + out_file = File(exists=True, desc=("file name of GLM parameters (if generated)")) out_cope = OutputMultiPath( File(exists=True), - desc=('output file name for COPEs (either as text file or image)')) + desc=("output file name for COPEs (either as text file or image)"), + ) out_z = OutputMultiPath( File(exists=True), - desc=('output file name for COPEs (either as text file or image)')) + desc=("output file name for COPEs (either as text file or image)"), + ) out_t = OutputMultiPath( File(exists=True), - desc=('output file name for t-stats (either as text file or image)')) + desc=("output file name for t-stats (either as text file or image)"), + ) out_p = OutputMultiPath( File(exists=True), - desc=('output file name for p-values of Z-stats (either as text file ' - 'or image)')) + desc=( + "output file name for p-values of Z-stats (either as text file " "or image)" + ), + ) out_f = OutputMultiPath( - File(exists=True), - desc=('output file name for F-value of full model fit')) + File(exists=True), desc=("output file name for F-value of full model fit") + ) out_pf = OutputMultiPath( - File(exists=True), - desc=('output file name for p-value for full model fit')) - out_res = OutputMultiPath( - File(exists=True), desc='output file name for residuals') + File(exists=True), desc=("output file name for p-value for full model fit") + ) + out_res = OutputMultiPath(File(exists=True), desc="output file name for residuals") out_varcb = OutputMultiPath( - File(exists=True), desc='output file name for variance of COPEs') + File(exists=True), desc="output file name for variance of COPEs" + ) out_sigsq = OutputMultiPath( File(exists=True), - desc=('output file name for residual noise variance sigma-square')) + desc=("output file name for residual noise variance sigma-square"), + ) out_data = OutputMultiPath( - File(exists=True), desc='output file for preprocessed data') + File(exists=True), desc="output file for preprocessed data" + ) out_vnscales = OutputMultiPath( File(exists=True), - desc=('output file name for scaling factors for variance ' - 'normalisation')) + desc=("output file name for scaling factors for variance " "normalisation"), + ) class GLM(FSLCommand): @@ -2361,7 +2507,8 @@ class GLM(FSLCommand): 'fsl_glm -i functional.nii -d maps.nii -o functional_glm.nii' """ - _cmd = 'fsl_glm' + + _cmd = "fsl_glm" input_spec = GLMInputSpec output_spec = GLMOutputSpec @@ -2369,38 +2516,37 @@ def _list_outputs(self): outputs = super(GLM, self)._list_outputs() if isdefined(self.inputs.out_cope): - outputs['out_cope'] = os.path.abspath(self.inputs.out_cope) + outputs["out_cope"] = os.path.abspath(self.inputs.out_cope) if isdefined(self.inputs.out_z_name): - outputs['out_z'] = os.path.abspath(self.inputs.out_z_name) + outputs["out_z"] = os.path.abspath(self.inputs.out_z_name) if isdefined(self.inputs.out_t_name): - outputs['out_t'] = os.path.abspath(self.inputs.out_t_name) + outputs["out_t"] = os.path.abspath(self.inputs.out_t_name) if isdefined(self.inputs.out_p_name): - outputs['out_p'] = os.path.abspath(self.inputs.out_p_name) + outputs["out_p"] = os.path.abspath(self.inputs.out_p_name) if isdefined(self.inputs.out_f_name): - outputs['out_f'] = os.path.abspath(self.inputs.out_f_name) + outputs["out_f"] = os.path.abspath(self.inputs.out_f_name) if isdefined(self.inputs.out_pf_name): - outputs['out_pf'] = os.path.abspath(self.inputs.out_pf_name) + outputs["out_pf"] = os.path.abspath(self.inputs.out_pf_name) if isdefined(self.inputs.out_res_name): - outputs['out_res'] = os.path.abspath(self.inputs.out_res_name) + outputs["out_res"] = os.path.abspath(self.inputs.out_res_name) if isdefined(self.inputs.out_varcb_name): - outputs['out_varcb'] = os.path.abspath(self.inputs.out_varcb_name) + outputs["out_varcb"] = os.path.abspath(self.inputs.out_varcb_name) if isdefined(self.inputs.out_sigsq_name): - outputs['out_sigsq'] = os.path.abspath(self.inputs.out_sigsq_name) + outputs["out_sigsq"] = os.path.abspath(self.inputs.out_sigsq_name) if isdefined(self.inputs.out_data_name): - outputs['out_data'] = os.path.abspath(self.inputs.out_data_name) + outputs["out_data"] = os.path.abspath(self.inputs.out_data_name) if isdefined(self.inputs.out_vnscales_name): - outputs['out_vnscales'] = os.path.abspath( - self.inputs.out_vnscales_name) + outputs["out_vnscales"] = os.path.abspath(self.inputs.out_vnscales_name) return outputs @@ -2419,9 +2565,10 @@ def load_template(name): """ from pkg_resources import resource_filename as pkgrf - full_fname = pkgrf('nipype', - os.path.join('interfaces', 'fsl', 'model_templates', - name)) + + full_fname = pkgrf( + "nipype", os.path.join("interfaces", "fsl", "model_templates", name) + ) with open(full_fname) as template_file: template = Template(template_file.read()) diff --git a/nipype/interfaces/fsl/possum.py b/nipype/interfaces/fsl/possum.py index 50b88db185..88797aaecd 100644 --- a/nipype/interfaces/fsl/possum.py +++ b/nipype/interfaces/fsl/possum.py @@ -18,76 +18,100 @@ class B0CalcInputSpec(FSLCommandInputSpec): in_file = File( exists=True, mandatory=True, - argstr='-i %s', + argstr="-i %s", position=0, - desc='filename of input image (usually a tissue/air segmentation)') + desc="filename of input image (usually a tissue/air segmentation)", + ) out_file = File( - argstr='-o %s', + argstr="-o %s", position=1, - name_source=['in_file'], - name_template='%s_b0field', - output_name='out_file', - desc='filename of B0 output volume') + name_source=["in_file"], + name_template="%s_b0field", + output_name="out_file", + desc="filename of B0 output volume", + ) x_grad = traits.Float( - 0.0, usedefault=True, - argstr='--gx=%0.4f', - desc='Value for zeroth-order x-gradient field (per mm)') + 0.0, + usedefault=True, + argstr="--gx=%0.4f", + desc="Value for zeroth-order x-gradient field (per mm)", + ) y_grad = traits.Float( - 0.0, usedefault=True, - argstr='--gy=%0.4f', - desc='Value for zeroth-order y-gradient field (per mm)') + 0.0, + usedefault=True, + argstr="--gy=%0.4f", + desc="Value for zeroth-order y-gradient field (per mm)", + ) z_grad = traits.Float( - 0.0, usedefault=True, - argstr='--gz=%0.4f', - desc='Value for zeroth-order z-gradient field (per mm)') + 0.0, + usedefault=True, + argstr="--gz=%0.4f", + desc="Value for zeroth-order z-gradient field (per mm)", + ) x_b0 = traits.Float( - 0.0, usedefault=True, - argstr='--b0x=%0.2f', - xor=['xyz_b0'], - desc='Value for zeroth-order b0 field (x-component), in Tesla') + 0.0, + usedefault=True, + argstr="--b0x=%0.2f", + xor=["xyz_b0"], + desc="Value for zeroth-order b0 field (x-component), in Tesla", + ) y_b0 = traits.Float( - 0.0, usedefault=True, - argstr='--b0y=%0.2f', - xor=['xyz_b0'], - desc='Value for zeroth-order b0 field (y-component), in Tesla') + 0.0, + usedefault=True, + argstr="--b0y=%0.2f", + xor=["xyz_b0"], + desc="Value for zeroth-order b0 field (y-component), in Tesla", + ) z_b0 = traits.Float( - 1.0, usedefault=True, - argstr='--b0=%0.2f', - xor=['xyz_b0'], - desc='Value for zeroth-order b0 field (z-component), in Tesla') + 1.0, + usedefault=True, + argstr="--b0=%0.2f", + xor=["xyz_b0"], + desc="Value for zeroth-order b0 field (z-component), in Tesla", + ) xyz_b0 = traits.Tuple( traits.Float, traits.Float, traits.Float, - argstr='--b0x=%0.2f --b0y=%0.2f --b0=%0.2f', - xor=['x_b0', 'y_b0', 'z_b0'], - desc='Zeroth-order B0 field in Tesla') + argstr="--b0x=%0.2f --b0y=%0.2f --b0=%0.2f", + xor=["x_b0", "y_b0", "z_b0"], + desc="Zeroth-order B0 field in Tesla", + ) delta = traits.Float( - -9.45e-6, usedefault=True, - argstr='-d %e', desc='Delta value (chi_tissue - chi_air)') + -9.45e-6, + usedefault=True, + argstr="-d %e", + desc="Delta value (chi_tissue - chi_air)", + ) chi_air = traits.Float( - 4.0e-7, usedefault=True, - argstr='--chi0=%e', desc='susceptibility of air') + 4.0e-7, usedefault=True, argstr="--chi0=%e", desc="susceptibility of air" + ) compute_xyz = traits.Bool( - False, usedefault=True, - argstr='--xyz', - desc='calculate and save all 3 field components (i.e. x,y,z)') + False, + usedefault=True, + argstr="--xyz", + desc="calculate and save all 3 field components (i.e. x,y,z)", + ) extendboundary = traits.Float( - 1.0, usedefault=True, - argstr='--extendboundary=%0.2f', - desc='Relative proportion to extend voxels at boundary') + 1.0, + usedefault=True, + argstr="--extendboundary=%0.2f", + desc="Relative proportion to extend voxels at boundary", + ) directconv = traits.Bool( - False, usedefault=True, - argstr='--directconv', - desc='use direct (image space) convolution, not FFT') + False, + usedefault=True, + argstr="--directconv", + desc="use direct (image space) convolution, not FFT", + ) class B0CalcOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='filename of B0 output volume') + out_file = File(exists=True, desc="filename of B0 output volume") class B0Calc(FSLCommand): @@ -112,6 +136,6 @@ class B0Calc(FSLCommand): """ - _cmd = 'b0calc' + _cmd = "b0calc" input_spec = B0CalcInputSpec output_spec = B0CalcOutputSpec diff --git a/nipype/interfaces/fsl/preprocess.py b/nipype/interfaces/fsl/preprocess.py index 9207fbd497..418737be2c 100644 --- a/nipype/interfaces/fsl/preprocess.py +++ b/nipype/interfaces/fsl/preprocess.py @@ -14,8 +14,15 @@ from ... import LooseVersion from ...utils.filemanip import split_filename -from ..base import (TraitedSpec, File, InputMultiPath, OutputMultiPath, - Undefined, traits, isdefined) +from ..base import ( + TraitedSpec, + File, + InputMultiPath, + OutputMultiPath, + Undefined, + traits, + isdefined, +) from .base import FSLCommand, FSLCommandInputSpec, Info @@ -24,72 +31,90 @@ class BETInputSpec(FSLCommandInputSpec): # will put something on the end in_file = File( exists=True, - desc='input file to skull strip', - argstr='%s', + desc="input file to skull strip", + argstr="%s", position=0, - mandatory=True) + mandatory=True, + ) out_file = File( - desc='name of output skull stripped image', - argstr='%s', + desc="name of output skull stripped image", + argstr="%s", position=1, genfile=True, - hash_files=False) - outline = traits.Bool(desc='create surface outline image', argstr='-o') - mask = traits.Bool(desc='create binary mask image', argstr='-m') - skull = traits.Bool(desc='create skull image', argstr='-s') - no_output = traits.Bool( - argstr='-n', desc="Don't generate segmented output") - frac = traits.Float( - desc='fractional intensity threshold', argstr='-f %.2f') + hash_files=False, + ) + outline = traits.Bool(desc="create surface outline image", argstr="-o") + mask = traits.Bool(desc="create binary mask image", argstr="-m") + skull = traits.Bool(desc="create skull image", argstr="-s") + no_output = traits.Bool(argstr="-n", desc="Don't generate segmented output") + frac = traits.Float(desc="fractional intensity threshold", argstr="-f %.2f") vertical_gradient = traits.Float( - argstr='-g %.2f', - desc='vertical gradient in fractional intensity threshold (-1, 1)') - radius = traits.Int(argstr='-r %d', units='mm', desc="head radius") + argstr="-g %.2f", + desc="vertical gradient in fractional intensity threshold (-1, 1)", + ) + radius = traits.Int(argstr="-r %d", units="mm", desc="head radius") center = traits.List( traits.Int, - desc='center of gravity in voxels', - argstr='-c %s', + desc="center of gravity in voxels", + argstr="-c %s", minlen=0, maxlen=3, - units='voxels') + units="voxels", + ) threshold = traits.Bool( - argstr='-t', - desc="apply thresholding to segmented brain image and mask") - mesh = traits.Bool(argstr='-e', desc="generate a vtk mesh brain surface") + argstr="-t", desc="apply thresholding to segmented brain image and mask" + ) + mesh = traits.Bool(argstr="-e", desc="generate a vtk mesh brain surface") # the remaining 'options' are more like modes (mutually exclusive) that # FSL actually implements in a shell script wrapper around the bet binary. # for some combinations of them in specific order a call would not fail, # but in general using more than one of the following is clearly not # supported - _xor_inputs = ('functional', 'reduce_bias', 'robust', 'padding', - 'remove_eyes', 'surfaces', 't2_guided') + _xor_inputs = ( + "functional", + "reduce_bias", + "robust", + "padding", + "remove_eyes", + "surfaces", + "t2_guided", + ) robust = traits.Bool( - desc='robust brain centre estimation (iterates BET several times)', - argstr='-R', - xor=_xor_inputs) + desc="robust brain centre estimation (iterates BET several times)", + argstr="-R", + xor=_xor_inputs, + ) padding = traits.Bool( - desc=('improve BET if FOV is very small in Z (by temporarily padding ' - 'end slices)'), - argstr='-Z', - xor=_xor_inputs) + desc=( + "improve BET if FOV is very small in Z (by temporarily padding " + "end slices)" + ), + argstr="-Z", + xor=_xor_inputs, + ) remove_eyes = traits.Bool( - desc='eye & optic nerve cleanup (can be useful in SIENA)', - argstr='-S', - xor=_xor_inputs) + desc="eye & optic nerve cleanup (can be useful in SIENA)", + argstr="-S", + xor=_xor_inputs, + ) surfaces = traits.Bool( - desc=('run bet2 and then betsurf to get additional skull and scalp ' - 'surfaces (includes registrations)'), - argstr='-A', - xor=_xor_inputs) + desc=( + "run bet2 and then betsurf to get additional skull and scalp " + "surfaces (includes registrations)" + ), + argstr="-A", + xor=_xor_inputs, + ) t2_guided = File( - desc='as with creating surfaces, when also feeding in ' - 'non-brain-extracted T2 (includes registrations)', - argstr='-A2 %s', - xor=_xor_inputs) - functional = traits.Bool( - argstr='-F', xor=_xor_inputs, desc="apply to 4D fMRI data") + desc="as with creating surfaces, when also feeding in " + "non-brain-extracted T2 (includes registrations)", + argstr="-A2 %s", + xor=_xor_inputs, + ) + functional = traits.Bool(argstr="-F", xor=_xor_inputs, desc="apply to 4D fMRI data") reduce_bias = traits.Bool( - argstr='-B', xor=_xor_inputs, desc="bias field and neck cleanup") + argstr="-B", xor=_xor_inputs, desc="bias field and neck cleanup" + ) class BETOutputSpec(TraitedSpec): @@ -98,14 +123,11 @@ class BETOutputSpec(TraitedSpec): outline_file = File(desc="path/name of outline file (if generated)") meshfile = File(desc="path/name of vtk mesh file (if generated)") inskull_mask_file = File(desc="path/name of inskull mask (if generated)") - inskull_mesh_file = File( - desc="path/name of inskull mesh outline (if generated)") + inskull_mesh_file = File(desc="path/name of inskull mesh outline (if generated)") outskull_mask_file = File(desc="path/name of outskull mask (if generated)") - outskull_mesh_file = File( - desc="path/name of outskull mesh outline (if generated)") + outskull_mesh_file = File(desc="path/name of outskull mesh outline (if generated)") outskin_mask_file = File(desc="path/name of outskin mask (if generated)") - outskin_mesh_file = File( - desc="path/name of outskin mesh outline (if generated)") + outskin_mesh_file = File(desc="path/name of outskin mesh outline (if generated)") skull_mask_file = File(desc="path/name of skull mask (if generated)") @@ -128,7 +150,7 @@ class BET(FSLCommand): """ - _cmd = 'bet' + _cmd = "bet" input_spec = BETInputSpec output_spec = BETOutputSpec @@ -144,168 +166,188 @@ def _run_interface(self, runtime): def _gen_outfilename(self): out_file = self.inputs.out_file if not isdefined(out_file) and isdefined(self.inputs.in_file): - out_file = self._gen_fname(self.inputs.in_file, suffix='_brain') + out_file = self._gen_fname(self.inputs.in_file, suffix="_brain") return os.path.abspath(out_file) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self._gen_outfilename() - if ((isdefined(self.inputs.mesh) and self.inputs.mesh) - or (isdefined(self.inputs.surfaces) and self.inputs.surfaces)): - outputs['meshfile'] = self._gen_fname( - outputs['out_file'], suffix='_mesh.vtk', change_ext=False) - if (isdefined(self.inputs.mask) and self.inputs.mask) or \ - (isdefined(self.inputs.reduce_bias) and - self.inputs.reduce_bias): - outputs['mask_file'] = self._gen_fname( - outputs['out_file'], suffix='_mask') + outputs["out_file"] = self._gen_outfilename() + if (isdefined(self.inputs.mesh) and self.inputs.mesh) or ( + isdefined(self.inputs.surfaces) and self.inputs.surfaces + ): + outputs["meshfile"] = self._gen_fname( + outputs["out_file"], suffix="_mesh.vtk", change_ext=False + ) + if (isdefined(self.inputs.mask) and self.inputs.mask) or ( + isdefined(self.inputs.reduce_bias) and self.inputs.reduce_bias + ): + outputs["mask_file"] = self._gen_fname(outputs["out_file"], suffix="_mask") if isdefined(self.inputs.outline) and self.inputs.outline: - outputs['outline_file'] = self._gen_fname( - outputs['out_file'], suffix='_overlay') + outputs["outline_file"] = self._gen_fname( + outputs["out_file"], suffix="_overlay" + ) if isdefined(self.inputs.surfaces) and self.inputs.surfaces: - outputs['inskull_mask_file'] = self._gen_fname( - outputs['out_file'], suffix='_inskull_mask') - outputs['inskull_mesh_file'] = self._gen_fname( - outputs['out_file'], suffix='_inskull_mesh') - outputs['outskull_mask_file'] = self._gen_fname( - outputs['out_file'], suffix='_outskull_mask') - outputs['outskull_mesh_file'] = self._gen_fname( - outputs['out_file'], suffix='_outskull_mesh') - outputs['outskin_mask_file'] = self._gen_fname( - outputs['out_file'], suffix='_outskin_mask') - outputs['outskin_mesh_file'] = self._gen_fname( - outputs['out_file'], suffix='_outskin_mesh') - outputs['skull_mask_file'] = self._gen_fname( - outputs['out_file'], suffix='_skull_mask') + outputs["inskull_mask_file"] = self._gen_fname( + outputs["out_file"], suffix="_inskull_mask" + ) + outputs["inskull_mesh_file"] = self._gen_fname( + outputs["out_file"], suffix="_inskull_mesh" + ) + outputs["outskull_mask_file"] = self._gen_fname( + outputs["out_file"], suffix="_outskull_mask" + ) + outputs["outskull_mesh_file"] = self._gen_fname( + outputs["out_file"], suffix="_outskull_mesh" + ) + outputs["outskin_mask_file"] = self._gen_fname( + outputs["out_file"], suffix="_outskin_mask" + ) + outputs["outskin_mesh_file"] = self._gen_fname( + outputs["out_file"], suffix="_outskin_mesh" + ) + outputs["skull_mask_file"] = self._gen_fname( + outputs["out_file"], suffix="_skull_mask" + ) if isdefined(self.inputs.no_output) and self.inputs.no_output: - outputs['out_file'] = Undefined + outputs["out_file"] = Undefined return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._gen_outfilename() return None class FASTInputSpec(FSLCommandInputSpec): """ Defines inputs (trait classes) for FAST """ + in_files = InputMultiPath( File(exists=True), copyfile=False, - desc='image, or multi-channel set of images, ' - 'to be segmented', - argstr='%s', + desc="image, or multi-channel set of images, " "to be segmented", + argstr="%s", position=-1, - mandatory=True) - out_basename = File(desc='base name of output files', argstr='-o %s') + mandatory=True, + ) + out_basename = File(desc="base name of output files", argstr="-o %s") # ^^ uses in_file name as basename if none given number_classes = traits.Range( - low=1, high=10, argstr='-n %d', desc='number of tissue-type classes') - output_biasfield = traits.Bool( - desc='output estimated bias field', argstr='-b') + low=1, high=10, argstr="-n %d", desc="number of tissue-type classes" + ) + output_biasfield = traits.Bool(desc="output estimated bias field", argstr="-b") output_biascorrected = traits.Bool( - desc='output restored image (bias-corrected image)', argstr='-B') + desc="output restored image (bias-corrected image)", argstr="-B" + ) img_type = traits.Enum( (1, 2, 3), - desc='int specifying type of image: (1 = T1, 2 = T2, 3 = PD)', - argstr='-t %d') + desc="int specifying type of image: (1 = T1, 2 = T2, 3 = PD)", + argstr="-t %d", + ) bias_iters = traits.Range( low=1, high=10, - argstr='-I %d', - desc='number of main-loop iterations during ' - 'bias-field removal') + argstr="-I %d", + desc="number of main-loop iterations during " "bias-field removal", + ) bias_lowpass = traits.Range( low=4, high=40, - desc='bias field smoothing extent (FWHM) ' - 'in mm', - argstr='-l %d', - units='mm') + desc="bias field smoothing extent (FWHM) " "in mm", + argstr="-l %d", + units="mm", + ) init_seg_smooth = traits.Range( low=0.0001, high=0.1, - desc='initial segmentation spatial ' - 'smoothness (during bias field ' - 'estimation)', - argstr='-f %.3f') + desc="initial segmentation spatial " + "smoothness (during bias field " + "estimation)", + argstr="-f %.3f", + ) segments = traits.Bool( - desc='outputs a separate binary image for each ' - 'tissue type', - argstr='-g') + desc="outputs a separate binary image for each " "tissue type", argstr="-g" + ) init_transform = File( exists=True, - desc=' initialise' - ' using priors', - argstr='-a %s') + desc=" initialise" " using priors", + argstr="-a %s", + ) other_priors = InputMultiPath( File(exist=True), - desc='alternative prior images', - argstr='-A %s', + desc="alternative prior images", + argstr="-A %s", minlen=3, - maxlen=3) + maxlen=3, + ) no_pve = traits.Bool( - desc='turn off PVE (partial volume estimation)', argstr='--nopve') - no_bias = traits.Bool(desc='do not remove bias field', argstr='-N') - use_priors = traits.Bool(desc='use priors throughout', argstr='-P') + desc="turn off PVE (partial volume estimation)", argstr="--nopve" + ) + no_bias = traits.Bool(desc="do not remove bias field", argstr="-N") + use_priors = traits.Bool(desc="use priors throughout", argstr="-P") # ^^ Must also set -a!, mutually inclusive?? No, conditional mandatory... need to figure out how to handle with traits. segment_iters = traits.Range( low=1, high=50, - desc='number of segmentation-initialisation' - ' iterations', - argstr='-W %d') + desc="number of segmentation-initialisation" " iterations", + argstr="-W %d", + ) mixel_smooth = traits.Range( - low=0.0, - high=1.0, - desc='spatial smoothness for mixeltype', - argstr='-R %.2f') + low=0.0, high=1.0, desc="spatial smoothness for mixeltype", argstr="-R %.2f" + ) iters_afterbias = traits.Range( low=1, high=20, - desc='number of main-loop iterations ' - 'after bias-field removal', - argstr='-O %d') + desc="number of main-loop iterations " "after bias-field removal", + argstr="-O %d", + ) hyper = traits.Range( - low=0.0, - high=1.0, - desc='segmentation spatial smoothness', - argstr='-H %.2f') - verbose = traits.Bool(desc='switch on diagnostic messages', argstr='-v') + low=0.0, high=1.0, desc="segmentation spatial smoothness", argstr="-H %.2f" + ) + verbose = traits.Bool(desc="switch on diagnostic messages", argstr="-v") manual_seg = File( - exists=True, desc='Filename containing intensities', argstr='-s %s') + exists=True, desc="Filename containing intensities", argstr="-s %s" + ) probability_maps = traits.Bool( - desc='outputs individual probability maps', argstr='-p') + desc="outputs individual probability maps", argstr="-p" + ) class FASTOutputSpec(TraitedSpec): """Specify possible outputs from FAST""" + tissue_class_map = File( exists=True, - desc='path/name of binary segmented volume file' - ' one val for each class _seg') + desc="path/name of binary segmented volume file" + " one val for each class _seg", + ) tissue_class_files = OutputMultiPath( File( desc=( - 'path/name of binary segmented volumes one file for each class ' - '_seg_x'))) + "path/name of binary segmented volumes one file for each class " + "_seg_x" + ) + ) + ) restored_image = OutputMultiPath( File( desc=( - 'restored images (one for each input image) named according to ' - 'the input images _restore'))) + "restored images (one for each input image) named according to " + "the input images _restore" + ) + ) + ) mixeltype = File(desc="path/name of mixeltype volume file _mixeltype") - partial_volume_map = File(desc='path/name of partial volume file _pveseg') + partial_volume_map = File(desc="path/name of partial volume file _pveseg") partial_volume_files = OutputMultiPath( - File( - desc='path/name of partial volumes files one for each class, _pve_x' - )) + File(desc="path/name of partial volumes files one for each class, _pve_x") + ) - bias_field = OutputMultiPath(File(desc='Estimated bias field _bias')) + bias_field = OutputMultiPath(File(desc="Estimated bias field _bias")) probability_maps = OutputMultiPath( - File(desc='filenames, one for each class, for each input, prob_x')) + File(desc="filenames, one for each class, for each input, prob_x") + ) class FAST(FSLCommand): @@ -325,14 +367,15 @@ class FAST(FSLCommand): >>> out = fastr.run() # doctest: +SKIP """ - _cmd = 'fast' + + _cmd = "fast" input_spec = FASTInputSpec output_spec = FASTOutputSpec def _format_arg(self, name, spec, value): # first do what should be done in general formatted = super(FAST, self)._format_arg(name, spec, value) - if name == 'in_files': + if name == "in_files": # FAST needs the -S parameter value to correspond to the number # of input images, otherwise it will ignore all but the first formatted = "-S %d %s" % (len(value), formatted) @@ -348,287 +391,306 @@ def _list_outputs(self): # input filename _gen_fname_opts = {} if isdefined(self.inputs.out_basename): - _gen_fname_opts['basename'] = self.inputs.out_basename - _gen_fname_opts['cwd'] = os.getcwd() + _gen_fname_opts["basename"] = self.inputs.out_basename + _gen_fname_opts["cwd"] = os.getcwd() else: - _gen_fname_opts['basename'] = self.inputs.in_files[-1] - _gen_fname_opts['cwd'], _, _ = split_filename( - _gen_fname_opts['basename']) + _gen_fname_opts["basename"] = self.inputs.in_files[-1] + _gen_fname_opts["cwd"], _, _ = split_filename(_gen_fname_opts["basename"]) - outputs['tissue_class_map'] = self._gen_fname( - suffix='_seg', **_gen_fname_opts) + outputs["tissue_class_map"] = self._gen_fname(suffix="_seg", **_gen_fname_opts) if self.inputs.segments: - outputs['tissue_class_files'] = [] + outputs["tissue_class_files"] = [] for i in range(nclasses): - outputs['tissue_class_files'].append( - self._gen_fname(suffix='_seg_%d' % i, **_gen_fname_opts)) + outputs["tissue_class_files"].append( + self._gen_fname(suffix="_seg_%d" % i, **_gen_fname_opts) + ) if isdefined(self.inputs.output_biascorrected): - outputs['restored_image'] = [] + outputs["restored_image"] = [] if len(self.inputs.in_files) > 1: # for multi-image segmentation there is one corrected image # per input for val, f in enumerate(self.inputs.in_files): # image numbering is 1-based - outputs['restored_image'].append( + outputs["restored_image"].append( self._gen_fname( - suffix='_restore_%d' % (val + 1), - **_gen_fname_opts)) + suffix="_restore_%d" % (val + 1), **_gen_fname_opts + ) + ) else: # single image segmentation has unnumbered output image - outputs['restored_image'].append( - self._gen_fname(suffix='_restore', **_gen_fname_opts)) + outputs["restored_image"].append( + self._gen_fname(suffix="_restore", **_gen_fname_opts) + ) - outputs['mixeltype'] = self._gen_fname( - suffix='_mixeltype', **_gen_fname_opts) + outputs["mixeltype"] = self._gen_fname(suffix="_mixeltype", **_gen_fname_opts) if not self.inputs.no_pve: - outputs['partial_volume_map'] = self._gen_fname( - suffix='_pveseg', **_gen_fname_opts) - outputs['partial_volume_files'] = [] + outputs["partial_volume_map"] = self._gen_fname( + suffix="_pveseg", **_gen_fname_opts + ) + outputs["partial_volume_files"] = [] for i in range(nclasses): - outputs['partial_volume_files'].append( - self._gen_fname(suffix='_pve_%d' % i, **_gen_fname_opts)) + outputs["partial_volume_files"].append( + self._gen_fname(suffix="_pve_%d" % i, **_gen_fname_opts) + ) if self.inputs.output_biasfield: - outputs['bias_field'] = [] + outputs["bias_field"] = [] if len(self.inputs.in_files) > 1: # for multi-image segmentation there is one bias field image # per input for val, f in enumerate(self.inputs.in_files): # image numbering is 1-based - outputs['bias_field'].append( + outputs["bias_field"].append( self._gen_fname( - suffix='_bias_%d' % (val + 1), **_gen_fname_opts)) + suffix="_bias_%d" % (val + 1), **_gen_fname_opts + ) + ) else: # single image segmentation has unnumbered output image - outputs['bias_field'].append( - self._gen_fname(suffix='_bias', **_gen_fname_opts)) + outputs["bias_field"].append( + self._gen_fname(suffix="_bias", **_gen_fname_opts) + ) if self.inputs.probability_maps: - outputs['probability_maps'] = [] + outputs["probability_maps"] = [] for i in range(nclasses): - outputs['probability_maps'].append( - self._gen_fname(suffix='_prob_%d' % i, **_gen_fname_opts)) + outputs["probability_maps"].append( + self._gen_fname(suffix="_prob_%d" % i, **_gen_fname_opts) + ) return outputs class FLIRTInputSpec(FSLCommandInputSpec): in_file = File( - exists=True, - argstr='-in %s', - mandatory=True, - position=0, - desc='input file') + exists=True, argstr="-in %s", mandatory=True, position=0, desc="input file" + ) reference = File( - exists=True, - argstr='-ref %s', - mandatory=True, - position=1, - desc='reference file') + exists=True, argstr="-ref %s", mandatory=True, position=1, desc="reference file" + ) out_file = File( - argstr='-out %s', - desc='registered output file', - name_source=['in_file'], - name_template='%s_flirt', + argstr="-out %s", + desc="registered output file", + name_source=["in_file"], + name_template="%s_flirt", position=2, - hash_files=False) + hash_files=False, + ) out_matrix_file = File( - argstr='-omat %s', - name_source=['in_file'], + argstr="-omat %s", + name_source=["in_file"], keep_extension=True, - name_template='%s_flirt.mat', - desc='output affine matrix in 4x4 asciii format', + name_template="%s_flirt.mat", + desc="output affine matrix in 4x4 asciii format", position=3, - hash_files=False) + hash_files=False, + ) out_log = File( - name_source=['in_file'], + name_source=["in_file"], keep_extension=True, - requires=['save_log'], - name_template='%s_flirt.log', - desc='output log') - in_matrix_file = File(argstr='-init %s', desc='input 4x4 affine matrix') + requires=["save_log"], + name_template="%s_flirt.log", + desc="output log", + ) + in_matrix_file = File(argstr="-init %s", desc="input 4x4 affine matrix") apply_xfm = traits.Bool( - argstr='-applyxfm', + argstr="-applyxfm", desc=( - 'apply transformation supplied by in_matrix_file or uses_qform to' - ' use the affine matrix stored in the reference header')) + "apply transformation supplied by in_matrix_file or uses_qform to" + " use the affine matrix stored in the reference header" + ), + ) apply_isoxfm = traits.Float( - argstr='-applyisoxfm %f', - xor=['apply_xfm'], - desc='as applyxfm but forces isotropic resampling') + argstr="-applyisoxfm %f", + xor=["apply_xfm"], + desc="as applyxfm but forces isotropic resampling", + ) datatype = traits.Enum( - 'char', - 'short', - 'int', - 'float', - 'double', - argstr='-datatype %s', - desc='force output data type') + "char", + "short", + "int", + "float", + "double", + argstr="-datatype %s", + desc="force output data type", + ) cost = traits.Enum( - 'mutualinfo', - 'corratio', - 'normcorr', - 'normmi', - 'leastsq', - 'labeldiff', - 'bbr', - argstr='-cost %s', - desc='cost function') + "mutualinfo", + "corratio", + "normcorr", + "normmi", + "leastsq", + "labeldiff", + "bbr", + argstr="-cost %s", + desc="cost function", + ) # XXX What is the difference between 'cost' and 'searchcost'? Are # these both necessary or do they map to the same variable. cost_func = traits.Enum( - 'mutualinfo', - 'corratio', - 'normcorr', - 'normmi', - 'leastsq', - 'labeldiff', - 'bbr', - argstr='-searchcost %s', - desc='cost function') + "mutualinfo", + "corratio", + "normcorr", + "normmi", + "leastsq", + "labeldiff", + "bbr", + argstr="-searchcost %s", + desc="cost function", + ) uses_qform = traits.Bool( - argstr='-usesqform', desc='initialize using sform or qform') - display_init = traits.Bool( - argstr='-displayinit', desc='display initial matrix') + argstr="-usesqform", desc="initialize using sform or qform" + ) + display_init = traits.Bool(argstr="-displayinit", desc="display initial matrix") angle_rep = traits.Enum( - 'quaternion', - 'euler', - argstr='-anglerep %s', - desc='representation of rotation angles') + "quaternion", + "euler", + argstr="-anglerep %s", + desc="representation of rotation angles", + ) interp = traits.Enum( - 'trilinear', - 'nearestneighbour', - 'sinc', - 'spline', - argstr='-interp %s', - desc='final interpolation method used in reslicing') + "trilinear", + "nearestneighbour", + "sinc", + "spline", + argstr="-interp %s", + desc="final interpolation method used in reslicing", + ) sinc_width = traits.Int( - argstr='-sincwidth %d', units='voxels', desc='full-width in voxels') + argstr="-sincwidth %d", units="voxels", desc="full-width in voxels" + ) sinc_window = traits.Enum( - 'rectangular', - 'hanning', - 'blackman', - argstr='-sincwindow %s', - desc='sinc window') # XXX better doc - bins = traits.Int(argstr='-bins %d', desc='number of histogram bins') - dof = traits.Int( - argstr='-dof %d', desc='number of transform degrees of freedom') - no_resample = traits.Bool( - argstr='-noresample', desc='do not change input sampling') + "rectangular", + "hanning", + "blackman", + argstr="-sincwindow %s", + desc="sinc window", + ) # XXX better doc + bins = traits.Int(argstr="-bins %d", desc="number of histogram bins") + dof = traits.Int(argstr="-dof %d", desc="number of transform degrees of freedom") + no_resample = traits.Bool(argstr="-noresample", desc="do not change input sampling") force_scaling = traits.Bool( - argstr='-forcescaling', desc='force rescaling even for low-res images') + argstr="-forcescaling", desc="force rescaling even for low-res images" + ) min_sampling = traits.Float( - argstr='-minsampling %f', - units='mm', - desc='set minimum voxel dimension for sampling') + argstr="-minsampling %f", + units="mm", + desc="set minimum voxel dimension for sampling", + ) padding_size = traits.Int( - argstr='-paddingsize %d', - units='voxels', - desc='for applyxfm: interpolates outside image ' - 'by size') + argstr="-paddingsize %d", + units="voxels", + desc="for applyxfm: interpolates outside image " "by size", + ) searchr_x = traits.List( traits.Int, minlen=2, maxlen=2, - units='degrees', - argstr='-searchrx %s', - desc='search angles along x-axis, in degrees') + units="degrees", + argstr="-searchrx %s", + desc="search angles along x-axis, in degrees", + ) searchr_y = traits.List( traits.Int, minlen=2, maxlen=2, - units='degrees', - argstr='-searchry %s', - desc='search angles along y-axis, in degrees') + units="degrees", + argstr="-searchry %s", + desc="search angles along y-axis, in degrees", + ) searchr_z = traits.List( traits.Int, minlen=2, maxlen=2, - units='degrees', - argstr='-searchrz %s', - desc='search angles along z-axis, in degrees') + units="degrees", + argstr="-searchrz %s", + desc="search angles along z-axis, in degrees", + ) no_search = traits.Bool( - argstr='-nosearch', desc='set all angular searches to ranges 0 to 0') + argstr="-nosearch", desc="set all angular searches to ranges 0 to 0" + ) coarse_search = traits.Int( - argstr='-coarsesearch %d', - units='degrees', - desc='coarse search delta angle') + argstr="-coarsesearch %d", units="degrees", desc="coarse search delta angle" + ) fine_search = traits.Int( - argstr='-finesearch %d', - units='degrees', - desc='fine search delta angle') + argstr="-finesearch %d", units="degrees", desc="fine search delta angle" + ) schedule = File( - exists=True, argstr='-schedule %s', desc='replaces default schedule') + exists=True, argstr="-schedule %s", desc="replaces default schedule" + ) ref_weight = File( - exists=True, - argstr='-refweight %s', - desc='File for reference weighting volume') + exists=True, argstr="-refweight %s", desc="File for reference weighting volume" + ) in_weight = File( - exists=True, - argstr='-inweight %s', - desc='File for input weighting volume') - no_clamp = traits.Bool( - argstr='-noclamp', desc='do not use intensity clamping') + exists=True, argstr="-inweight %s", desc="File for input weighting volume" + ) + no_clamp = traits.Bool(argstr="-noclamp", desc="do not use intensity clamping") no_resample_blur = traits.Bool( - argstr='-noresampblur', desc='do not use blurring on downsampling') - rigid2D = traits.Bool( - argstr='-2D', desc='use 2D rigid body mode - ignores dof') - save_log = traits.Bool(desc='save to log file') - verbose = traits.Int(argstr='-verbose %d', desc='verbose mode, 0 is least') + argstr="-noresampblur", desc="do not use blurring on downsampling" + ) + rigid2D = traits.Bool(argstr="-2D", desc="use 2D rigid body mode - ignores dof") + save_log = traits.Bool(desc="save to log file") + verbose = traits.Int(argstr="-verbose %d", desc="verbose mode, 0 is least") bgvalue = traits.Float( 0, - argstr='-setbackground %f', - desc=('use specified background value for points ' - 'outside FOV')) + argstr="-setbackground %f", + desc=("use specified background value for points " "outside FOV"), + ) # BBR options wm_seg = File( - argstr='-wmseg %s', - min_ver='5.0.0', - desc='white matter segmentation volume needed by BBR cost function') + argstr="-wmseg %s", + min_ver="5.0.0", + desc="white matter segmentation volume needed by BBR cost function", + ) wmcoords = File( - argstr='-wmcoords %s', - min_ver='5.0.0', - desc='white matter boundary coordinates for BBR cost function') + argstr="-wmcoords %s", + min_ver="5.0.0", + desc="white matter boundary coordinates for BBR cost function", + ) wmnorms = File( - argstr='-wmnorms %s', - min_ver='5.0.0', - desc='white matter boundary normals for BBR cost function') + argstr="-wmnorms %s", + min_ver="5.0.0", + desc="white matter boundary normals for BBR cost function", + ) fieldmap = File( - argstr='-fieldmap %s', - min_ver='5.0.0', - desc=('fieldmap image in rads/s - must be already registered to the ' - 'reference image')) + argstr="-fieldmap %s", + min_ver="5.0.0", + desc=( + "fieldmap image in rads/s - must be already registered to the " + "reference image" + ), + ) fieldmapmask = File( - argstr='-fieldmapmask %s', - min_ver='5.0.0', - desc='mask for fieldmap image') + argstr="-fieldmapmask %s", min_ver="5.0.0", desc="mask for fieldmap image" + ) pedir = traits.Int( - argstr='-pedir %d', - min_ver='5.0.0', - desc='phase encode direction of EPI - 1/2/3=x/y/z & -1/-2/-3=-x/-y/-z') + argstr="-pedir %d", + min_ver="5.0.0", + desc="phase encode direction of EPI - 1/2/3=x/y/z & -1/-2/-3=-x/-y/-z", + ) echospacing = traits.Float( - argstr='-echospacing %f', - min_ver='5.0.0', - desc='value of EPI echo spacing - units of seconds') + argstr="-echospacing %f", + min_ver="5.0.0", + desc="value of EPI echo spacing - units of seconds", + ) bbrtype = traits.Enum( - 'signed', - 'global_abs', - 'local_abs', - argstr='-bbrtype %s', - min_ver='5.0.0', - desc=('type of bbr cost function: signed [default], global_abs, ' - 'local_abs')) + "signed", + "global_abs", + "local_abs", + argstr="-bbrtype %s", + min_ver="5.0.0", + desc=("type of bbr cost function: signed [default], global_abs, " "local_abs"), + ) bbrslope = traits.Float( - argstr='-bbrslope %f', min_ver='5.0.0', desc='value of bbr slope') + argstr="-bbrslope %f", min_ver="5.0.0", desc="value of bbr slope" + ) class FLIRTOutputSpec(TraitedSpec): - out_file = File( - exists=True, desc='path/name of registered file (if generated)') + out_file = File(exists=True, desc="path/name of registered file (if generated)") out_matrix_file = File( - exists=True, - desc='path/name of calculated affine transform ' - '(if generated)') - out_log = File(desc='path/name of output log (if generated)') + exists=True, desc="path/name of calculated affine transform " "(if generated)" + ) + out_log = File(desc="path/name of output log (if generated)") class FLIRT(FSLCommand): @@ -653,17 +715,19 @@ class FLIRT(FSLCommand): >>> res = flt.run() #doctest: +SKIP """ - _cmd = 'flirt' + + _cmd = "flirt" input_spec = FLIRTInputSpec output_spec = FLIRTOutputSpec _log_written = False def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = super(FLIRT, self).aggregate_outputs( - runtime=runtime, needed_outputs=needed_outputs) + runtime=runtime, needed_outputs=needed_outputs + ) if self.inputs.save_log and not self._log_written: with open(outputs.out_log, "a") as text_file: - text_file.write(runtime.stdout + '\n') + text_file.write(runtime.stdout + "\n") self._log_written = True return outputs @@ -672,22 +736,27 @@ def _parse_inputs(self, skip=None): skip = [] if self.inputs.save_log and not self.inputs.verbose: self.inputs.verbose = 1 - if self.inputs.apply_xfm and not (self.inputs.in_matrix_file - or self.inputs.uses_qform): - raise RuntimeError('Argument apply_xfm requires in_matrix_file or ' - 'uses_qform arguments to run') - skip.append('save_log') + if self.inputs.apply_xfm and not ( + self.inputs.in_matrix_file or self.inputs.uses_qform + ): + raise RuntimeError( + "Argument apply_xfm requires in_matrix_file or " + "uses_qform arguments to run" + ) + skip.append("save_log") return super(FLIRT, self)._parse_inputs(skip=skip) class ApplyXFMInputSpec(FLIRTInputSpec): apply_xfm = traits.Bool( True, - argstr='-applyxfm', + argstr="-applyxfm", desc=( - 'apply transformation supplied by in_matrix_file or uses_qform to' - ' use the affine matrix stored in the reference header'), - usedefault=True) + "apply transformation supplied by in_matrix_file or uses_qform to" + " use the affine matrix stored in the reference header" + ), + usedefault=True, + ) class ApplyXFM(FLIRT): @@ -711,6 +780,7 @@ class ApplyXFM(FLIRT): >>> result = applyxfm.run() # doctest: +SKIP """ + input_spec = ApplyXFMInputSpec @@ -720,70 +790,69 @@ class MCFLIRTInputSpec(FSLCommandInputSpec): position=0, argstr="-in %s", mandatory=True, - desc="timeseries to motion-correct") + desc="timeseries to motion-correct", + ) out_file = File( - argstr='-out %s', genfile=True, desc="file to write", hash_files=False) + argstr="-out %s", genfile=True, desc="file to write", hash_files=False + ) cost = traits.Enum( - 'mutualinfo', - 'woods', - 'corratio', - 'normcorr', - 'normmi', - 'leastsquares', - argstr='-cost %s', - desc="cost function to optimize") - bins = traits.Int(argstr='-bins %d', desc="number of histogram bins") - dof = traits.Int( - argstr='-dof %d', desc="degrees of freedom for the transformation") - ref_vol = traits.Int(argstr='-refvol %d', desc="volume to align frames to") - scaling = traits.Float( - argstr='-scaling %.2f', desc="scaling factor to use") + "mutualinfo", + "woods", + "corratio", + "normcorr", + "normmi", + "leastsquares", + argstr="-cost %s", + desc="cost function to optimize", + ) + bins = traits.Int(argstr="-bins %d", desc="number of histogram bins") + dof = traits.Int(argstr="-dof %d", desc="degrees of freedom for the transformation") + ref_vol = traits.Int(argstr="-refvol %d", desc="volume to align frames to") + scaling = traits.Float(argstr="-scaling %.2f", desc="scaling factor to use") smooth = traits.Float( - argstr='-smooth %.2f', desc="smoothing factor for the cost function") + argstr="-smooth %.2f", desc="smoothing factor for the cost function" + ) rotation = traits.Int( - argstr='-rotation %d', desc="scaling factor for rotation tolerances") + argstr="-rotation %d", desc="scaling factor for rotation tolerances" + ) stages = traits.Int( - argstr='-stages %d', - desc="stages (if 4, perform final search with sinc interpolation") - init = File( - exists=True, argstr='-init %s', desc="inital transformation matrix") + argstr="-stages %d", + desc="stages (if 4, perform final search with sinc interpolation", + ) + init = File(exists=True, argstr="-init %s", desc="inital transformation matrix") interpolation = traits.Enum( "spline", "nn", "sinc", argstr="-%s_final", - desc="interpolation method for transformation") - use_gradient = traits.Bool( - argstr='-gdt', desc="run search on gradient images") - use_contour = traits.Bool( - argstr='-edge', desc="run search on contour images") - mean_vol = traits.Bool(argstr='-meanvol', desc="register to mean volume") + desc="interpolation method for transformation", + ) + use_gradient = traits.Bool(argstr="-gdt", desc="run search on gradient images") + use_contour = traits.Bool(argstr="-edge", desc="run search on contour images") + mean_vol = traits.Bool(argstr="-meanvol", desc="register to mean volume") stats_imgs = traits.Bool( - argstr='-stats', desc="produce variance and std. dev. images") - save_mats = traits.Bool( - argstr='-mats', desc="save transformation matrices") - save_plots = traits.Bool( - argstr='-plots', desc="save transformation parameters") + argstr="-stats", desc="produce variance and std. dev. images" + ) + save_mats = traits.Bool(argstr="-mats", desc="save transformation matrices") + save_plots = traits.Bool(argstr="-plots", desc="save transformation parameters") save_rms = traits.Bool( - argstr='-rmsabs -rmsrel', desc="save rms displacement parameters") + argstr="-rmsabs -rmsrel", desc="save rms displacement parameters" + ) ref_file = File( - exists=True, - argstr='-reffile %s', - desc="target image for motion correction") + exists=True, argstr="-reffile %s", desc="target image for motion correction" + ) class MCFLIRTOutputSpec(TraitedSpec): out_file = File(exists=True, desc="motion-corrected timeseries") variance_img = File(exists=True, desc="variance image") std_img = File(exists=True, desc="standard deviation image") - mean_img = File( - exists=True, desc="mean timeseries image (if mean_vol=True)") + mean_img = File(exists=True, desc="mean timeseries image (if mean_vol=True)") par_file = File(exists=True, desc="text-file with motion parameters") - mat_file = OutputMultiPath( - File(exists=True), desc="transformation matrices") + mat_file = OutputMultiPath(File(exists=True), desc="transformation matrices") rms_files = OutputMultiPath( - File(exists=True), - desc="absolute and relative displacement parameters") + File(exists=True), desc="absolute and relative displacement parameters" + ) class MCFLIRT(FSLCommand): @@ -804,7 +873,8 @@ class MCFLIRT(FSLCommand): >>> res = mcflt.run() # doctest: +SKIP """ - _cmd = 'mcflirt' + + _cmd = "mcflirt" input_spec = MCFLIRTInputSpec output_spec = MCFLIRTOutputSpec @@ -819,21 +889,25 @@ def _format_arg(self, name, spec, value): def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = self._gen_outfilename() - output_dir = os.path.dirname(outputs['out_file']) + outputs["out_file"] = self._gen_outfilename() + output_dir = os.path.dirname(outputs["out_file"]) if isdefined(self.inputs.stats_imgs) and self.inputs.stats_imgs: - if LooseVersion(Info.version()) < LooseVersion('6.0.0'): + if LooseVersion(Info.version()) < LooseVersion("6.0.0"): # FSL <6.0 outputs have .nii.gz_variance.nii.gz as extension - outputs['variance_img'] = self._gen_fname( - outputs['out_file'] + '_variance.ext', cwd=output_dir) - outputs['std_img'] = self._gen_fname( - outputs['out_file'] + '_sigma.ext', cwd=output_dir) + outputs["variance_img"] = self._gen_fname( + outputs["out_file"] + "_variance.ext", cwd=output_dir + ) + outputs["std_img"] = self._gen_fname( + outputs["out_file"] + "_sigma.ext", cwd=output_dir + ) else: - outputs['variance_img'] = self._gen_fname( - outputs['out_file'], suffix='_variance', cwd=output_dir) - outputs['std_img'] = self._gen_fname( - outputs['out_file'], suffix='_sigma', cwd=output_dir) + outputs["variance_img"] = self._gen_fname( + outputs["out_file"], suffix="_variance", cwd=output_dir + ) + outputs["std_img"] = self._gen_fname( + outputs["out_file"], suffix="_sigma", cwd=output_dir + ) # The mean image created if -stats option is specified ('meanvol') # is missing the top and bottom slices. Therefore we only expose the @@ -842,33 +916,34 @@ def _list_outputs(self): # Note that the same problem holds for the std and variance image. if isdefined(self.inputs.mean_vol) and self.inputs.mean_vol: - if LooseVersion(Info.version()) < LooseVersion('6.0.0'): + if LooseVersion(Info.version()) < LooseVersion("6.0.0"): # FSL <6.0 outputs have .nii.gz_mean_img.nii.gz as extension - outputs['mean_img'] = self._gen_fname( - outputs['out_file'] + '_mean_reg.ext', cwd=output_dir) + outputs["mean_img"] = self._gen_fname( + outputs["out_file"] + "_mean_reg.ext", cwd=output_dir + ) else: - outputs['mean_img'] = self._gen_fname( - outputs['out_file'], suffix='_mean_reg', cwd=output_dir) + outputs["mean_img"] = self._gen_fname( + outputs["out_file"], suffix="_mean_reg", cwd=output_dir + ) if isdefined(self.inputs.save_mats) and self.inputs.save_mats: - _, filename = os.path.split(outputs['out_file']) - matpathname = os.path.join(output_dir, filename + '.mat') + _, filename = os.path.split(outputs["out_file"]) + matpathname = os.path.join(output_dir, filename + ".mat") _, _, _, timepoints = load(self.inputs.in_file).shape - outputs['mat_file'] = [] + outputs["mat_file"] = [] for t in range(timepoints): - outputs['mat_file'].append( - os.path.join(matpathname, 'MAT_%04d' % t)) + outputs["mat_file"].append(os.path.join(matpathname, "MAT_%04d" % t)) if isdefined(self.inputs.save_plots) and self.inputs.save_plots: # Note - if e.g. out_file has .nii.gz, you get .nii.gz.par, # which is what mcflirt does! - outputs['par_file'] = outputs['out_file'] + '.par' + outputs["par_file"] = outputs["out_file"] + ".par" if isdefined(self.inputs.save_rms) and self.inputs.save_rms: - outfile = outputs['out_file'] - outputs['rms_files'] = [outfile + '_abs.rms', outfile + '_rel.rms'] + outfile = outputs["out_file"] + outputs["rms_files"] = [outfile + "_abs.rms", outfile + "_rel.rms"] return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._gen_outfilename() return None @@ -877,241 +952,284 @@ def _gen_outfilename(self): if isdefined(out_file): out_file = os.path.realpath(out_file) if not isdefined(out_file) and isdefined(self.inputs.in_file): - out_file = self._gen_fname(self.inputs.in_file, suffix='_mcf') + out_file = self._gen_fname(self.inputs.in_file, suffix="_mcf") return os.path.abspath(out_file) class FNIRTInputSpec(FSLCommandInputSpec): ref_file = File( - exists=True, - argstr='--ref=%s', - mandatory=True, - desc='name of reference image') + exists=True, argstr="--ref=%s", mandatory=True, desc="name of reference image" + ) in_file = File( - exists=True, - argstr='--in=%s', - mandatory=True, - desc='name of input image') + exists=True, argstr="--in=%s", mandatory=True, desc="name of input image" + ) affine_file = File( - exists=True, - argstr='--aff=%s', - desc='name of file containing affine transform') + exists=True, argstr="--aff=%s", desc="name of file containing affine transform" + ) inwarp_file = File( exists=True, - argstr='--inwarp=%s', - desc='name of file containing initial non-linear warps') + argstr="--inwarp=%s", + desc="name of file containing initial non-linear warps", + ) in_intensitymap_file = traits.List( File(exists=True), - argstr='--intin=%s', + argstr="--intin=%s", copyfile=False, minlen=1, maxlen=2, - desc=('name of file/files containing ' - 'initial intensity mapping ' - 'usually generated by previous ' - 'fnirt run')) + desc=( + "name of file/files containing " + "initial intensity mapping " + "usually generated by previous " + "fnirt run" + ), + ) fieldcoeff_file = traits.Either( traits.Bool, File, - argstr='--cout=%s', - desc='name of output file with field coefficients or true') + argstr="--cout=%s", + desc="name of output file with field coefficients or true", + ) warped_file = File( - argstr='--iout=%s', - desc='name of output image', - genfile=True, - hash_files=False) + argstr="--iout=%s", desc="name of output image", genfile=True, hash_files=False + ) field_file = traits.Either( traits.Bool, File, - argstr='--fout=%s', - desc='name of output file with field or true', - hash_files=False) + argstr="--fout=%s", + desc="name of output file with field or true", + hash_files=False, + ) jacobian_file = traits.Either( traits.Bool, File, - argstr='--jout=%s', - desc=('name of file for writing out the ' - 'Jacobian of the field (for ' - 'diagnostic or VBM purposes)'), - hash_files=False) + argstr="--jout=%s", + desc=( + "name of file for writing out the " + "Jacobian of the field (for " + "diagnostic or VBM purposes)" + ), + hash_files=False, + ) modulatedref_file = traits.Either( traits.Bool, File, - argstr='--refout=%s', - desc=('name of file for writing out ' - 'intensity modulated --ref (for ' - 'diagnostic purposes)'), - hash_files=False) + argstr="--refout=%s", + desc=( + "name of file for writing out " + "intensity modulated --ref (for " + "diagnostic purposes)" + ), + hash_files=False, + ) out_intensitymap_file = traits.Either( traits.Bool, File, - argstr='--intout=%s', - desc=('name of files for writing ' - 'information pertaining to ' - 'intensity mapping'), - hash_files=False) + argstr="--intout=%s", + desc=( + "name of files for writing " + "information pertaining to " + "intensity mapping" + ), + hash_files=False, + ) log_file = File( - argstr='--logout=%s', - desc='Name of log-file', - genfile=True, - hash_files=False) + argstr="--logout=%s", desc="Name of log-file", genfile=True, hash_files=False + ) config_file = traits.Either( traits.Enum("T1_2_MNI152_2mm", "FA_2_FMRIB58_1mm"), File(exists=True), - argstr='--config=%s', - desc='Name of config file specifying command line arguments') + argstr="--config=%s", + desc="Name of config file specifying command line arguments", + ) refmask_file = File( exists=True, - argstr='--refmask=%s', - desc='name of file with mask in reference space') + argstr="--refmask=%s", + desc="name of file with mask in reference space", + ) inmask_file = File( exists=True, - argstr='--inmask=%s', - desc='name of file with mask in input image space') + argstr="--inmask=%s", + desc="name of file with mask in input image space", + ) skip_refmask = traits.Bool( - argstr='--applyrefmask=0', - xor=['apply_refmask'], - desc='Skip specified refmask if set, default false') + argstr="--applyrefmask=0", + xor=["apply_refmask"], + desc="Skip specified refmask if set, default false", + ) skip_inmask = traits.Bool( - argstr='--applyinmask=0', - xor=['apply_inmask'], - desc='skip specified inmask if set, default false') + argstr="--applyinmask=0", + xor=["apply_inmask"], + desc="skip specified inmask if set, default false", + ) apply_refmask = traits.List( traits.Enum(0, 1), - argstr='--applyrefmask=%s', - xor=['skip_refmask'], - desc=('list of iterations to use reference mask on (1 to use, 0 to ' - 'skip)'), - sep=",") + argstr="--applyrefmask=%s", + xor=["skip_refmask"], + desc=("list of iterations to use reference mask on (1 to use, 0 to " "skip)"), + sep=",", + ) apply_inmask = traits.List( traits.Enum(0, 1), - argstr='--applyinmask=%s', - xor=['skip_inmask'], - desc='list of iterations to use input mask on (1 to use, 0 to skip)', - sep=",") + argstr="--applyinmask=%s", + xor=["skip_inmask"], + desc="list of iterations to use input mask on (1 to use, 0 to skip)", + sep=",", + ) skip_implicit_ref_masking = traits.Bool( - argstr='--imprefm=0', - desc=('skip implicit masking based on value in --ref image. ' - 'Default = 0')) + argstr="--imprefm=0", + desc=("skip implicit masking based on value in --ref image. " "Default = 0"), + ) skip_implicit_in_masking = traits.Bool( - argstr='--impinm=0', - desc=('skip implicit masking based on value in --in image. ' - 'Default = 0')) + argstr="--impinm=0", + desc=("skip implicit masking based on value in --in image. " "Default = 0"), + ) refmask_val = traits.Float( - argstr='--imprefval=%f', - desc='Value to mask out in --ref image. Default =0.0') + argstr="--imprefval=%f", desc="Value to mask out in --ref image. Default =0.0" + ) inmask_val = traits.Float( - argstr='--impinval=%f', - desc='Value to mask out in --in image. Default =0.0') + argstr="--impinval=%f", desc="Value to mask out in --in image. Default =0.0" + ) max_nonlin_iter = traits.List( traits.Int, - argstr='--miter=%s', - desc='Max # of non-linear iterations list, default [5, 5, 5, 5]', - sep=",") + argstr="--miter=%s", + desc="Max # of non-linear iterations list, default [5, 5, 5, 5]", + sep=",", + ) subsampling_scheme = traits.List( traits.Int, - argstr='--subsamp=%s', - desc='sub-sampling scheme, list, default [4, 2, 1, 1]', - sep=",") + argstr="--subsamp=%s", + desc="sub-sampling scheme, list, default [4, 2, 1, 1]", + sep=",", + ) warp_resolution = traits.Tuple( traits.Int, traits.Int, traits.Int, - argstr='--warpres=%d,%d,%d', - desc=('(approximate) resolution (in mm) of warp basis in x-, y- and ' - 'z-direction, default 10, 10, 10')) + argstr="--warpres=%d,%d,%d", + desc=( + "(approximate) resolution (in mm) of warp basis in x-, y- and " + "z-direction, default 10, 10, 10" + ), + ) spline_order = traits.Int( - argstr='--splineorder=%d', - desc='Order of spline, 2->Qadratic spline, 3->Cubic spline. Default=3') + argstr="--splineorder=%d", + desc="Order of spline, 2->Qadratic spline, 3->Cubic spline. Default=3", + ) in_fwhm = traits.List( traits.Int, - argstr='--infwhm=%s', - desc=('FWHM (in mm) of gaussian smoothing kernel for input volume, ' - 'default [6, 4, 2, 2]'), - sep=",") + argstr="--infwhm=%s", + desc=( + "FWHM (in mm) of gaussian smoothing kernel for input volume, " + "default [6, 4, 2, 2]" + ), + sep=",", + ) ref_fwhm = traits.List( traits.Int, - argstr='--reffwhm=%s', - desc=('FWHM (in mm) of gaussian smoothing kernel for ref volume, ' - 'default [4, 2, 0, 0]'), - sep=",") + argstr="--reffwhm=%s", + desc=( + "FWHM (in mm) of gaussian smoothing kernel for ref volume, " + "default [4, 2, 0, 0]" + ), + sep=",", + ) regularization_model = traits.Enum( - 'membrane_energy', - 'bending_energy', - argstr='--regmod=%s', - desc=('Model for regularisation of warp-field [membrane_energy ' - 'bending_energy], default bending_energy')) + "membrane_energy", + "bending_energy", + argstr="--regmod=%s", + desc=( + "Model for regularisation of warp-field [membrane_energy " + "bending_energy], default bending_energy" + ), + ) regularization_lambda = traits.List( traits.Float, - argstr='--lambda=%s', - desc=('Weight of regularisation, default depending on --ssqlambda and ' - '--regmod switches. See user documetation.'), - sep=",") + argstr="--lambda=%s", + desc=( + "Weight of regularisation, default depending on --ssqlambda and " + "--regmod switches. See user documetation." + ), + sep=",", + ) skip_lambda_ssq = traits.Bool( - argstr='--ssqlambda=0', - desc='If true, lambda is not weighted by current ssq, default false') + argstr="--ssqlambda=0", + desc="If true, lambda is not weighted by current ssq, default false", + ) jacobian_range = traits.Tuple( traits.Float, traits.Float, - argstr='--jacrange=%f,%f', - desc='Allowed range of Jacobian determinants, default 0.01, 100.0') + argstr="--jacrange=%f,%f", + desc="Allowed range of Jacobian determinants, default 0.01, 100.0", + ) derive_from_ref = traits.Bool( - argstr='--refderiv', - desc=('If true, ref image is used to calculate derivatives. ' - 'Default false')) + argstr="--refderiv", + desc=("If true, ref image is used to calculate derivatives. " "Default false"), + ) intensity_mapping_model = traits.Enum( - 'none', - 'global_linear', - 'global_non_linear', - 'local_linear', - 'global_non_linear_with_bias', - 'local_non_linear', - argstr='--intmod=%s', - desc='Model for intensity-mapping') + "none", + "global_linear", + "global_non_linear", + "local_linear", + "global_non_linear_with_bias", + "local_non_linear", + argstr="--intmod=%s", + desc="Model for intensity-mapping", + ) intensity_mapping_order = traits.Int( - argstr='--intorder=%d', - desc='Order of poynomial for mapping intensities, default 5') + argstr="--intorder=%d", + desc="Order of poynomial for mapping intensities, default 5", + ) biasfield_resolution = traits.Tuple( traits.Int, traits.Int, traits.Int, - argstr='--biasres=%d,%d,%d', - desc=('Resolution (in mm) of bias-field modelling local intensities, ' - 'default 50, 50, 50')) + argstr="--biasres=%d,%d,%d", + desc=( + "Resolution (in mm) of bias-field modelling local intensities, " + "default 50, 50, 50" + ), + ) bias_regularization_lambda = traits.Float( - argstr='--biaslambda=%f', - desc='Weight of regularisation for bias-field, default 10000') + argstr="--biaslambda=%f", + desc="Weight of regularisation for bias-field, default 10000", + ) skip_intensity_mapping = traits.Bool( - argstr='--estint=0', - xor=['apply_intensity_mapping'], - desc='Skip estimate intensity-mapping default false') + argstr="--estint=0", + xor=["apply_intensity_mapping"], + desc="Skip estimate intensity-mapping default false", + ) apply_intensity_mapping = traits.List( traits.Enum(0, 1), - argstr='--estint=%s', - xor=['skip_intensity_mapping'], - desc=('List of subsampling levels to apply intensity mapping for ' - '(0 to skip, 1 to apply)'), - sep=",") + argstr="--estint=%s", + xor=["skip_intensity_mapping"], + desc=( + "List of subsampling levels to apply intensity mapping for " + "(0 to skip, 1 to apply)" + ), + sep=",", + ) hessian_precision = traits.Enum( - 'double', - 'float', - argstr='--numprec=%s', - desc=('Precision for representing Hessian, double or float. ' - 'Default double')) + "double", + "float", + argstr="--numprec=%s", + desc=("Precision for representing Hessian, double or float. " "Default double"), + ) class FNIRTOutputSpec(TraitedSpec): - fieldcoeff_file = File(exists=True, desc='file with field coefficients') - warped_file = File(exists=True, desc='warped image') - field_file = File(desc='file with warp field') - jacobian_file = File(desc='file containing Jacobian of the field') - modulatedref_file = File(desc='file containing intensity modulated --ref') + fieldcoeff_file = File(exists=True, desc="file with field coefficients") + warped_file = File(exists=True, desc="warped image") + field_file = File(desc="file with warp field") + jacobian_file = File(desc="file containing Jacobian of the field") + modulatedref_file = File(desc="file containing intensity modulated --ref") out_intensitymap_file = traits.List( File, minlen=2, maxlen=2, - desc='files containing info pertaining to intensity mapping') - log_file = File(desc='Name of log-file') + desc="files containing info pertaining to intensity mapping", + ) + log_file = File(desc="Name of log-file") class FNIRT(FSLCommand): @@ -1146,18 +1264,18 @@ class FNIRT(FSLCommand): """ - _cmd = 'fnirt' + _cmd = "fnirt" input_spec = FNIRTInputSpec output_spec = FNIRTOutputSpec filemap = { - 'warped_file': 'warped', - 'field_file': 'field', - 'jacobian_file': 'field_jacobian', - 'modulatedref_file': 'modulated', - 'out_intensitymap_file': 'intmap', - 'log_file': 'log.txt', - 'fieldcoeff_file': 'fieldwarp' + "warped_file": "warped", + "field_file": "field", + "jacobian_file": "field_jacobian", + "modulatedref_file": "modulated", + "out_intensitymap_file": "intmap", + "log_file": "log.txt", + "fieldcoeff_file": "fieldwarp", } def _list_outputs(self): @@ -1165,48 +1283,49 @@ def _list_outputs(self): for key, suffix in list(self.filemap.items()): inval = getattr(self.inputs, key) change_ext = True - if key in ['warped_file', 'log_file']: - if suffix.endswith('.txt'): + if key in ["warped_file", "log_file"]: + if suffix.endswith(".txt"): change_ext = False if isdefined(inval): outputs[key] = os.path.abspath(inval) else: outputs[key] = self._gen_fname( - self.inputs.in_file, - suffix='_' + suffix, - change_ext=change_ext) + self.inputs.in_file, suffix="_" + suffix, change_ext=change_ext + ) elif isdefined(inval): if isinstance(inval, bool): if inval: outputs[key] = self._gen_fname( self.inputs.in_file, - suffix='_' + suffix, - change_ext=change_ext) + suffix="_" + suffix, + change_ext=change_ext, + ) else: outputs[key] = os.path.abspath(inval) - if key == 'out_intensitymap_file' and isdefined(outputs[key]): + if key == "out_intensitymap_file" and isdefined(outputs[key]): basename = FNIRT.intensitymap_file_basename(outputs[key]) outputs[key] = [ outputs[key], - '%s.txt' % basename, + "%s.txt" % basename, ] return outputs def _format_arg(self, name, spec, value): - if name in ('in_intensitymap_file', 'out_intensitymap_file'): - if name == 'out_intensitymap_file': + if name in ("in_intensitymap_file", "out_intensitymap_file"): + if name == "out_intensitymap_file": value = self._list_outputs()[name] value = [FNIRT.intensitymap_file_basename(v) for v in value] - assert len(set(value)) == 1, ( - 'Found different basenames for {}: {}'.format(name, value)) + assert len(set(value)) == 1, "Found different basenames for {}: {}".format( + name, value + ) return spec.argstr % value[0] if name in list(self.filemap.keys()): return spec.argstr % self._list_outputs()[name] return super(FNIRT, self)._format_arg(name, spec, value) def _gen_filename(self, name): - if name in ['warped_file', 'log_file']: + if name in ["warped_file", "log_file"]: return self._list_outputs()[name] return None @@ -1220,12 +1339,12 @@ def write_config(self, configfile): configfile : /path/to/configfile """ try: - fid = open(configfile, 'w+') + fid = open(configfile, "w+") except IOError: - print('unable to create config_file %s' % (configfile)) + print("unable to create config_file %s" % (configfile)) for item in list(self.inputs.get().items()): - fid.write('%s\n' % (item)) + fid.write("%s\n" % (item)) fid.close() @classmethod @@ -1233,9 +1352,9 @@ def intensitymap_file_basename(cls, f): """Removes valid intensitymap extensions from `f`, returning a basename that can refer to both intensitymap files. """ - for ext in list(Info.ftypes.values()) + ['.txt']: + for ext in list(Info.ftypes.values()) + [".txt"]: if f.endswith(ext): - return f[:-len(ext)] + return f[: -len(ext)] # TODO consider warning for this case return f @@ -1243,74 +1362,86 @@ def intensitymap_file_basename(cls, f): class ApplyWarpInputSpec(FSLCommandInputSpec): in_file = File( exists=True, - argstr='--in=%s', + argstr="--in=%s", mandatory=True, position=0, - desc='image to be warped') + desc="image to be warped", + ) out_file = File( - argstr='--out=%s', + argstr="--out=%s", genfile=True, position=2, - desc='output filename', - hash_files=False) + desc="output filename", + hash_files=False, + ) ref_file = File( exists=True, - argstr='--ref=%s', + argstr="--ref=%s", mandatory=True, position=1, - desc='reference image') + desc="reference image", + ) field_file = File( - exists=True, argstr='--warp=%s', desc='file containing warp field') + exists=True, argstr="--warp=%s", desc="file containing warp field" + ) abswarp = traits.Bool( - argstr='--abs', - xor=['relwarp'], - desc="treat warp field as absolute: x' = w(x)") + argstr="--abs", xor=["relwarp"], desc="treat warp field as absolute: x' = w(x)" + ) relwarp = traits.Bool( - argstr='--rel', - xor=['abswarp'], + argstr="--rel", + xor=["abswarp"], position=-1, - desc="treat warp field as relative: x' = x + w(x)") + desc="treat warp field as relative: x' = x + w(x)", + ) datatype = traits.Enum( - 'char', - 'short', - 'int', - 'float', - 'double', - argstr='--datatype=%s', - desc='Force output data type [char short int float double].') + "char", + "short", + "int", + "float", + "double", + argstr="--datatype=%s", + desc="Force output data type [char short int float double].", + ) supersample = traits.Bool( - argstr='--super', - desc='intermediary supersampling of output, default is off') + argstr="--super", desc="intermediary supersampling of output, default is off" + ) superlevel = traits.Either( - traits.Enum('a'), + traits.Enum("a"), traits.Int, - argstr='--superlevel=%s', - desc=("level of intermediary supersampling, a for 'automatic' or " - "integer level. Default = 2")) + argstr="--superlevel=%s", + desc=( + "level of intermediary supersampling, a for 'automatic' or " + "integer level. Default = 2" + ), + ) premat = File( exists=True, - argstr='--premat=%s', - desc='filename for pre-transform (affine matrix)') + argstr="--premat=%s", + desc="filename for pre-transform (affine matrix)", + ) postmat = File( exists=True, - argstr='--postmat=%s', - desc='filename for post-transform (affine matrix)') + argstr="--postmat=%s", + desc="filename for post-transform (affine matrix)", + ) mask_file = File( exists=True, - argstr='--mask=%s', - desc='filename for mask image (in reference space)') + argstr="--mask=%s", + desc="filename for mask image (in reference space)", + ) interp = traits.Enum( - 'nn', - 'trilinear', - 'sinc', - 'spline', - argstr='--interp=%s', + "nn", + "trilinear", + "sinc", + "spline", + argstr="--interp=%s", position=-2, - desc='interpolation method') + desc="interpolation method", + ) class ApplyWarpOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='Warped output file') + out_file = File(exists=True, desc="Warped output file") class ApplyWarp(FSLCommand): @@ -1329,26 +1460,25 @@ class ApplyWarp(FSLCommand): """ - _cmd = 'applywarp' + _cmd = "applywarp" input_spec = ApplyWarpInputSpec output_spec = ApplyWarpOutputSpec def _format_arg(self, name, spec, value): - if name == 'superlevel': + if name == "superlevel": return spec.argstr % str(value) return super(ApplyWarp, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() if not isdefined(self.inputs.out_file): - outputs['out_file'] = self._gen_fname( - self.inputs.in_file, suffix='_warp') + outputs["out_file"] = self._gen_fname(self.inputs.in_file, suffix="_warp") else: - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._list_outputs()[name] return None @@ -1356,45 +1486,53 @@ def _gen_filename(self, name): class SliceTimerInputSpec(FSLCommandInputSpec): in_file = File( exists=True, - argstr='--in=%s', + argstr="--in=%s", mandatory=True, position=0, - desc='filename of input timeseries') + desc="filename of input timeseries", + ) out_file = File( - argstr='--out=%s', + argstr="--out=%s", genfile=True, - desc='filename of output timeseries', - hash_files=False) - index_dir = traits.Bool( - argstr='--down', desc='slice indexing from top to bottom') + desc="filename of output timeseries", + hash_files=False, + ) + index_dir = traits.Bool(argstr="--down", desc="slice indexing from top to bottom") time_repetition = traits.Float( - argstr='--repeat=%f', desc='Specify TR of data - default is 3s') + argstr="--repeat=%f", desc="Specify TR of data - default is 3s" + ) slice_direction = traits.Enum( 1, 2, 3, - argstr='--direction=%d', - desc='direction of slice acquisition (x=1, y=2, z=3) - default is z') - interleaved = traits.Bool( - argstr='--odd', desc='use interleaved acquisition') + argstr="--direction=%d", + desc="direction of slice acquisition (x=1, y=2, z=3) - default is z", + ) + interleaved = traits.Bool(argstr="--odd", desc="use interleaved acquisition") custom_timings = File( exists=True, - argstr='--tcustom=%s', - desc=('slice timings, in fractions of TR, range 0:1 (default is 0.5 = ' - 'no shift)')) + argstr="--tcustom=%s", + desc=( + "slice timings, in fractions of TR, range 0:1 (default is 0.5 = " + "no shift)" + ), + ) global_shift = traits.Float( - argstr='--tglobal', - desc='shift in fraction of TR, range 0:1 (default is 0.5 = no shift)') + argstr="--tglobal", + desc="shift in fraction of TR, range 0:1 (default is 0.5 = no shift)", + ) custom_order = File( exists=True, - argstr='--ocustom=%s', - desc=('filename of single-column custom interleave order file (first ' - 'slice is referred to as 1 not 0)')) + argstr="--ocustom=%s", + desc=( + "filename of single-column custom interleave order file (first " + "slice is referred to as 1 not 0)" + ), + ) class SliceTimerOutputSpec(TraitedSpec): - slice_time_corrected_file = File( - exists=True, desc='slice time corrected file') + slice_time_corrected_file = File(exists=True, desc="slice time corrected file") class SliceTimer(FSLCommand): @@ -1411,7 +1549,7 @@ class SliceTimer(FSLCommand): """ - _cmd = 'slicetimer' + _cmd = "slicetimer" input_spec = SliceTimerInputSpec output_spec = SliceTimerOutputSpec @@ -1419,69 +1557,80 @@ def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.out_file if not isdefined(out_file): - out_file = self._gen_fname(self.inputs.in_file, suffix='_st') - outputs['slice_time_corrected_file'] = os.path.abspath(out_file) + out_file = self._gen_fname(self.inputs.in_file, suffix="_st") + outputs["slice_time_corrected_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): - if name == 'out_file': - return self._list_outputs()['slice_time_corrected_file'] + if name == "out_file": + return self._list_outputs()["slice_time_corrected_file"] return None class SUSANInputSpec(FSLCommandInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=1, - desc='filename of input timeseries') + desc="filename of input timeseries", + ) brightness_threshold = traits.Float( - argstr='%.10f', + argstr="%.10f", position=2, mandatory=True, - desc=('brightness threshold and should be greater than noise level ' - 'and less than contrast of edges to be preserved.')) + desc=( + "brightness threshold and should be greater than noise level " + "and less than contrast of edges to be preserved." + ), + ) fwhm = traits.Float( - argstr='%.10f', + argstr="%.10f", position=3, mandatory=True, - desc='fwhm of smoothing, in mm, gets converted using sqrt(8*log(2))') + desc="fwhm of smoothing, in mm, gets converted using sqrt(8*log(2))", + ) dimension = traits.Enum( 3, 2, - argstr='%d', + argstr="%d", position=4, usedefault=True, - desc='within-plane (2) or fully 3D (3)') + desc="within-plane (2) or fully 3D (3)", + ) use_median = traits.Enum( 1, 0, - argstr='%d', + argstr="%d", position=5, usedefault=True, - desc=('whether to use a local median filter in the cases where ' - 'single-point noise is detected')) + desc=( + "whether to use a local median filter in the cases where " + "single-point noise is detected" + ), + ) usans = traits.List( traits.Tuple(File(exists=True), traits.Float), maxlen=2, - argstr='', + argstr="", position=6, usedefault=True, - desc='determines whether the smoothing area (USAN) is to be ' - 'found from secondary images (0, 1 or 2). A negative ' - 'value for any brightness threshold will auto-set the ' - 'threshold at 10% of the robust range') + desc="determines whether the smoothing area (USAN) is to be " + "found from secondary images (0, 1 or 2). A negative " + "value for any brightness threshold will auto-set the " + "threshold at 10% of the robust range", + ) out_file = File( - argstr='%s', + argstr="%s", position=-1, genfile=True, - desc='output file name', - hash_files=False) + desc="output file name", + hash_files=False, + ) class SUSANOutputSpec(TraitedSpec): - smoothed_file = File(exists=True, desc='smoothed output file') + smoothed_file = File(exists=True, desc="smoothed output file") class SUSAN(FSLCommand): @@ -1504,154 +1653,170 @@ class SUSAN(FSLCommand): >>> result = sus.run() # doctest: +SKIP """ - _cmd = 'susan' + _cmd = "susan" input_spec = SUSANInputSpec output_spec = SUSANOutputSpec def _format_arg(self, name, spec, value): - if name == 'fwhm': + if name == "fwhm": return spec.argstr % (float(value) / np.sqrt(8 * np.log(2))) - if name == 'usans': + if name == "usans": if not value: - return '0' + return "0" arglist = [str(len(value))] for filename, thresh in value: - arglist.extend([filename, '%.10f' % thresh]) - return ' '.join(arglist) + arglist.extend([filename, "%.10f" % thresh]) + return " ".join(arglist) return super(SUSAN, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.out_file if not isdefined(out_file): - out_file = self._gen_fname(self.inputs.in_file, suffix='_smooth') - outputs['smoothed_file'] = os.path.abspath(out_file) + out_file = self._gen_fname(self.inputs.in_file, suffix="_smooth") + outputs["smoothed_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): - if name == 'out_file': - return self._list_outputs()['smoothed_file'] + if name == "out_file": + return self._list_outputs()["smoothed_file"] return None class FUGUEInputSpec(FSLCommandInputSpec): - in_file = File( - exists=True, argstr='--in=%s', desc='filename of input volume') + in_file = File(exists=True, argstr="--in=%s", desc="filename of input volume") shift_in_file = File( exists=True, - argstr='--loadshift=%s', - desc='filename for reading pixel shift volume') + argstr="--loadshift=%s", + desc="filename for reading pixel shift volume", + ) phasemap_in_file = File( - exists=True, - argstr='--phasemap=%s', - desc='filename for input phase image') + exists=True, argstr="--phasemap=%s", desc="filename for input phase image" + ) fmap_in_file = File( exists=True, - argstr='--loadfmap=%s', - desc='filename for loading fieldmap (rad/s)') + argstr="--loadfmap=%s", + desc="filename for loading fieldmap (rad/s)", + ) unwarped_file = File( - argstr='--unwarp=%s', - desc='apply unwarping and save as filename', - xor=['warped_file'], - requires=['in_file']) + argstr="--unwarp=%s", + desc="apply unwarping and save as filename", + xor=["warped_file"], + requires=["in_file"], + ) warped_file = File( - argstr='--warp=%s', - desc='apply forward warping and save as filename', - xor=['unwarped_file'], - requires=['in_file']) + argstr="--warp=%s", + desc="apply forward warping and save as filename", + xor=["unwarped_file"], + requires=["in_file"], + ) forward_warping = traits.Bool( - False, - usedefault=True, - desc='apply forward warping instead of unwarping') + False, usedefault=True, desc="apply forward warping instead of unwarping" + ) dwell_to_asym_ratio = traits.Float( - argstr='--dwelltoasym=%.10f', desc='set the dwell to asym time ratio') + argstr="--dwelltoasym=%.10f", desc="set the dwell to asym time ratio" + ) dwell_time = traits.Float( - argstr='--dwell=%.10f', - desc=('set the EPI dwell time per phase-encode line - same as echo ' - 'spacing - (sec)')) + argstr="--dwell=%.10f", + desc=( + "set the EPI dwell time per phase-encode line - same as echo " + "spacing - (sec)" + ), + ) asym_se_time = traits.Float( - argstr='--asym=%.10f', - desc='set the fieldmap asymmetric spin echo time (sec)') - median_2dfilter = traits.Bool( - argstr='--median', desc='apply 2D median filtering') + argstr="--asym=%.10f", desc="set the fieldmap asymmetric spin echo time (sec)" + ) + median_2dfilter = traits.Bool(argstr="--median", desc="apply 2D median filtering") despike_2dfilter = traits.Bool( - argstr='--despike', desc='apply a 2D de-spiking filter') + argstr="--despike", desc="apply a 2D de-spiking filter" + ) no_gap_fill = traits.Bool( - argstr='--nofill', - desc='do not apply gap-filling measure to the fieldmap') + argstr="--nofill", desc="do not apply gap-filling measure to the fieldmap" + ) no_extend = traits.Bool( - argstr='--noextend', - desc='do not apply rigid-body extrapolation to the fieldmap') + argstr="--noextend", + desc="do not apply rigid-body extrapolation to the fieldmap", + ) smooth2d = traits.Float( - argstr='--smooth2=%.2f', - desc='apply 2D Gaussian smoothing of sigma N (in mm)') + argstr="--smooth2=%.2f", desc="apply 2D Gaussian smoothing of sigma N (in mm)" + ) smooth3d = traits.Float( - argstr='--smooth3=%.2f', - desc='apply 3D Gaussian smoothing of sigma N (in mm)') + argstr="--smooth3=%.2f", desc="apply 3D Gaussian smoothing of sigma N (in mm)" + ) poly_order = traits.Int( - argstr='--poly=%d', desc='apply polynomial fitting of order N') + argstr="--poly=%d", desc="apply polynomial fitting of order N" + ) fourier_order = traits.Int( - argstr='--fourier=%d', - desc='apply Fourier (sinusoidal) fitting of order N') - pava = traits.Bool( - argstr='--pava', desc='apply monotonic enforcement via PAVA') + argstr="--fourier=%d", desc="apply Fourier (sinusoidal) fitting of order N" + ) + pava = traits.Bool(argstr="--pava", desc="apply monotonic enforcement via PAVA") despike_threshold = traits.Float( - argstr='--despikethreshold=%s', - desc='specify the threshold for de-spiking (default=3.0)') + argstr="--despikethreshold=%s", + desc="specify the threshold for de-spiking (default=3.0)", + ) unwarp_direction = traits.Enum( - 'x', - 'y', - 'z', - 'x-', - 'y-', - 'z-', - argstr='--unwarpdir=%s', - desc='specifies direction of warping (default y)') + "x", + "y", + "z", + "x-", + "y-", + "z-", + argstr="--unwarpdir=%s", + desc="specifies direction of warping (default y)", + ) phase_conjugate = traits.Bool( - argstr='--phaseconj', desc='apply phase conjugate method of unwarping') + argstr="--phaseconj", desc="apply phase conjugate method of unwarping" + ) icorr = traits.Bool( - argstr='--icorr', - requires=['shift_in_file'], - desc=('apply intensity correction to unwarping (pixel shift method ' - 'only)')) + argstr="--icorr", + requires=["shift_in_file"], + desc=("apply intensity correction to unwarping (pixel shift method " "only)"), + ) icorr_only = traits.Bool( - argstr='--icorronly', - requires=['unwarped_file'], - desc='apply intensity correction only') + argstr="--icorronly", + requires=["unwarped_file"], + desc="apply intensity correction only", + ) mask_file = File( - exists=True, - argstr='--mask=%s', - desc='filename for loading valid mask') + exists=True, argstr="--mask=%s", desc="filename for loading valid mask" + ) nokspace = traits.Bool( - False, argstr='--nokspace', desc='do not use k-space forward warping') + False, argstr="--nokspace", desc="do not use k-space forward warping" + ) # Special outputs: shift (voxel shift map, vsm) save_shift = traits.Bool( - False, xor=['save_unmasked_shift'], desc='write pixel shift volume') + False, xor=["save_unmasked_shift"], desc="write pixel shift volume" + ) shift_out_file = File( - argstr='--saveshift=%s', desc='filename for saving pixel shift volume') + argstr="--saveshift=%s", desc="filename for saving pixel shift volume" + ) save_unmasked_shift = traits.Bool( - argstr='--unmaskshift', - xor=['save_shift'], - desc='saves the unmasked shiftmap when using --saveshift') + argstr="--unmaskshift", + xor=["save_shift"], + desc="saves the unmasked shiftmap when using --saveshift", + ) # Special outputs: fieldmap (fmap) save_fmap = traits.Bool( - False, xor=['save_unmasked_fmap'], desc='write field map volume') + False, xor=["save_unmasked_fmap"], desc="write field map volume" + ) fmap_out_file = File( - argstr='--savefmap=%s', desc='filename for saving fieldmap (rad/s)') + argstr="--savefmap=%s", desc="filename for saving fieldmap (rad/s)" + ) save_unmasked_fmap = traits.Bool( False, - argstr='--unmaskfmap', - xor=['save_fmap'], - desc='saves the unmasked fieldmap when using --savefmap') + argstr="--unmaskfmap", + xor=["save_fmap"], + desc="saves the unmasked fieldmap when using --savefmap", + ) class FUGUEOutputSpec(TraitedSpec): - unwarped_file = File(desc='unwarped file') - warped_file = File(desc='forward warped file') - shift_out_file = File(desc='voxel shift map file') - fmap_out_file = File(desc='fieldmap file') + unwarped_file = File(desc="unwarped file") + warped_file = File(desc="forward warped file") + shift_out_file = File(desc="voxel shift map file") + fmap_out_file = File(desc="fieldmap file") class FUGUE(FSLCommand): @@ -1719,7 +1884,7 @@ class FUGUE(FSLCommand): """ - _cmd = 'fugue' + _cmd = "fugue" input_spec = FUGUEInputSpec output_spec = FUGUEOutputSpec @@ -1733,82 +1898,97 @@ def _parse_inputs(self, skip=None): if not input_phase and not input_vsm and not input_fmap: raise RuntimeError( - ('Either phasemap_in_file, shift_in_file or fmap_in_file must ' - 'be set.')) + ( + "Either phasemap_in_file, shift_in_file or fmap_in_file must " + "be set." + ) + ) if not isdefined(self.inputs.in_file): - skip += ['unwarped_file', 'warped_file'] + skip += ["unwarped_file", "warped_file"] else: if self.inputs.forward_warping: - skip += ['unwarped_file'] - trait_spec = self.inputs.trait('warped_file') + skip += ["unwarped_file"] + trait_spec = self.inputs.trait("warped_file") trait_spec.name_template = "%s_warped" - trait_spec.name_source = 'in_file' - trait_spec.output_name = 'warped_file' + trait_spec.name_source = "in_file" + trait_spec.output_name = "warped_file" else: - skip += ['warped_file'] - trait_spec = self.inputs.trait('unwarped_file') + skip += ["warped_file"] + trait_spec = self.inputs.trait("unwarped_file") trait_spec.name_template = "%s_unwarped" - trait_spec.name_source = 'in_file' - trait_spec.output_name = 'unwarped_file' + trait_spec.name_source = "in_file" + trait_spec.output_name = "unwarped_file" # Handle shift output if not isdefined(self.inputs.shift_out_file): - vsm_save_masked = (isdefined(self.inputs.save_shift) - and self.inputs.save_shift) - vsm_save_unmasked = (isdefined(self.inputs.save_unmasked_shift) - and self.inputs.save_unmasked_shift) - - if (vsm_save_masked or vsm_save_unmasked): - trait_spec = self.inputs.trait('shift_out_file') - trait_spec.output_name = 'shift_out_file' + vsm_save_masked = ( + isdefined(self.inputs.save_shift) and self.inputs.save_shift + ) + vsm_save_unmasked = ( + isdefined(self.inputs.save_unmasked_shift) + and self.inputs.save_unmasked_shift + ) + + if vsm_save_masked or vsm_save_unmasked: + trait_spec = self.inputs.trait("shift_out_file") + trait_spec.output_name = "shift_out_file" if input_fmap: - trait_spec.name_source = 'fmap_in_file' + trait_spec.name_source = "fmap_in_file" elif input_phase: - trait_spec.name_source = 'phasemap_in_file' + trait_spec.name_source = "phasemap_in_file" elif input_vsm: - trait_spec.name_source = 'shift_in_file' + trait_spec.name_source = "shift_in_file" else: raise RuntimeError( - ('Either phasemap_in_file, shift_in_file or ' - 'fmap_in_file must be set.')) + ( + "Either phasemap_in_file, shift_in_file or " + "fmap_in_file must be set." + ) + ) if vsm_save_unmasked: - trait_spec.name_template = '%s_vsm_unmasked' + trait_spec.name_template = "%s_vsm_unmasked" else: - trait_spec.name_template = '%s_vsm' + trait_spec.name_template = "%s_vsm" else: - skip += ['save_shift', 'save_unmasked_shift', 'shift_out_file'] + skip += ["save_shift", "save_unmasked_shift", "shift_out_file"] # Handle fieldmap output if not isdefined(self.inputs.fmap_out_file): - fmap_save_masked = (isdefined(self.inputs.save_fmap) - and self.inputs.save_fmap) - fmap_save_unmasked = (isdefined(self.inputs.save_unmasked_fmap) - and self.inputs.save_unmasked_fmap) - - if (fmap_save_masked or fmap_save_unmasked): - trait_spec = self.inputs.trait('fmap_out_file') - trait_spec.output_name = 'fmap_out_file' + fmap_save_masked = ( + isdefined(self.inputs.save_fmap) and self.inputs.save_fmap + ) + fmap_save_unmasked = ( + isdefined(self.inputs.save_unmasked_fmap) + and self.inputs.save_unmasked_fmap + ) + + if fmap_save_masked or fmap_save_unmasked: + trait_spec = self.inputs.trait("fmap_out_file") + trait_spec.output_name = "fmap_out_file" if input_vsm: - trait_spec.name_source = 'shift_in_file' + trait_spec.name_source = "shift_in_file" elif input_phase: - trait_spec.name_source = 'phasemap_in_file' + trait_spec.name_source = "phasemap_in_file" elif input_fmap: - trait_spec.name_source = 'fmap_in_file' + trait_spec.name_source = "fmap_in_file" else: raise RuntimeError( - ('Either phasemap_in_file, shift_in_file or ' - 'fmap_in_file must be set.')) + ( + "Either phasemap_in_file, shift_in_file or " + "fmap_in_file must be set." + ) + ) if fmap_save_unmasked: - trait_spec.name_template = '%s_fieldmap_unmasked' + trait_spec.name_template = "%s_fieldmap_unmasked" else: - trait_spec.name_template = '%s_fieldmap' + trait_spec.name_template = "%s_fieldmap" else: - skip += ['save_fmap', 'save_unmasked_fmap', 'fmap_out_file'] + skip += ["save_fmap", "save_unmasked_fmap", "fmap_out_file"] return super(FUGUE, self)._parse_inputs(skip=skip) @@ -1816,66 +1996,75 @@ def _parse_inputs(self, skip=None): class PRELUDEInputSpec(FSLCommandInputSpec): complex_phase_file = File( exists=True, - argstr='--complex=%s', + argstr="--complex=%s", mandatory=True, - xor=['magnitude_file', 'phase_file'], - desc='complex phase input volume') + xor=["magnitude_file", "phase_file"], + desc="complex phase input volume", + ) magnitude_file = File( exists=True, - argstr='--abs=%s', + argstr="--abs=%s", mandatory=True, - xor=['complex_phase_file'], - desc='file containing magnitude image') + xor=["complex_phase_file"], + desc="file containing magnitude image", + ) phase_file = File( exists=True, - argstr='--phase=%s', + argstr="--phase=%s", mandatory=True, - xor=['complex_phase_file'], - desc='raw phase file') + xor=["complex_phase_file"], + desc="raw phase file", + ) unwrapped_phase_file = File( genfile=True, - argstr='--unwrap=%s', - desc='file containing unwrapepd phase', - hash_files=False) + argstr="--unwrap=%s", + desc="file containing unwrapepd phase", + hash_files=False, + ) num_partitions = traits.Int( - argstr='--numphasesplit=%d', desc='number of phase partitions to use') + argstr="--numphasesplit=%d", desc="number of phase partitions to use" + ) labelprocess2d = traits.Bool( - argstr='--labelslices', - desc='does label processing in 2D (slice at a time)') + argstr="--labelslices", desc="does label processing in 2D (slice at a time)" + ) process2d = traits.Bool( - argstr='--slices', - xor=['labelprocess2d'], - desc='does all processing in 2D (slice at a time)') + argstr="--slices", + xor=["labelprocess2d"], + desc="does all processing in 2D (slice at a time)", + ) process3d = traits.Bool( - argstr='--force3D', - xor=['labelprocess2d', 'process2d'], - desc='forces all processing to be full 3D') + argstr="--force3D", + xor=["labelprocess2d", "process2d"], + desc="forces all processing to be full 3D", + ) threshold = traits.Float( - argstr='--thresh=%.10f', desc='intensity threshold for masking') + argstr="--thresh=%.10f", desc="intensity threshold for masking" + ) mask_file = File( - exists=True, argstr='--mask=%s', desc='filename of mask input volume') + exists=True, argstr="--mask=%s", desc="filename of mask input volume" + ) start = traits.Int( - argstr='--start=%d', desc='first image number to process (default 0)') + argstr="--start=%d", desc="first image number to process (default 0)" + ) end = traits.Int( - argstr='--end=%d', desc='final image number to process (default Inf)') + argstr="--end=%d", desc="final image number to process (default Inf)" + ) savemask_file = File( - argstr='--savemask=%s', - desc='saving the mask volume', - hash_files=False) + argstr="--savemask=%s", desc="saving the mask volume", hash_files=False + ) rawphase_file = File( - argstr='--rawphase=%s', - desc='saving the raw phase output', - hash_files=False) + argstr="--rawphase=%s", desc="saving the raw phase output", hash_files=False + ) label_file = File( - argstr='--labels=%s', - desc='saving the area labels output', - hash_files=False) + argstr="--labels=%s", desc="saving the area labels output", hash_files=False + ) removeramps = traits.Bool( - argstr='--removeramps', desc='remove phase ramps during unwrapping') + argstr="--removeramps", desc="remove phase ramps during unwrapping" + ) class PRELUDEOutputSpec(TraitedSpec): - unwrapped_phase_file = File(exists=True, desc='unwrapped phase file') + unwrapped_phase_file = File(exists=True, desc="unwrapped phase file") class PRELUDE(FSLCommand): @@ -1887,30 +2076,31 @@ class PRELUDE(FSLCommand): Please insert examples for use of this command """ + input_spec = PRELUDEInputSpec output_spec = PRELUDEOutputSpec - _cmd = 'prelude' + _cmd = "prelude" def __init__(self, **kwargs): super(PRELUDE, self).__init__(**kwargs) - warn('This has not been fully tested. Please report any failures.') + warn("This has not been fully tested. Please report any failures.") def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.unwrapped_phase_file if not isdefined(out_file): if isdefined(self.inputs.phase_file): - out_file = self._gen_fname( - self.inputs.phase_file, suffix='_unwrapped') + out_file = self._gen_fname(self.inputs.phase_file, suffix="_unwrapped") elif isdefined(self.inputs.complex_phase_file): out_file = self._gen_fname( - self.inputs.complex_phase_file, suffix='_phase_unwrapped') - outputs['unwrapped_phase_file'] = os.path.abspath(out_file) + self.inputs.complex_phase_file, suffix="_phase_unwrapped" + ) + outputs["unwrapped_phase_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): - if name == 'unwrapped_phase_file': - return self._list_outputs()['unwrapped_phase_file'] + if name == "unwrapped_phase_file": + return self._list_outputs()["unwrapped_phase_file"] return None @@ -1920,72 +2110,87 @@ class FIRSTInputSpec(FSLCommandInputSpec): mandatory=True, position=-2, copyfile=False, - argstr='-i %s', - desc='input data file') + argstr="-i %s", + desc="input data file", + ) out_file = File( - 'segmented', + "segmented", usedefault=True, mandatory=True, position=-1, - argstr='-o %s', - desc='output data file', - hash_files=False) - verbose = traits.Bool(argstr='-v', position=1, desc="Use verbose logging.") + argstr="-o %s", + desc="output data file", + hash_files=False, + ) + verbose = traits.Bool(argstr="-v", position=1, desc="Use verbose logging.") brain_extracted = traits.Bool( - argstr='-b', + argstr="-b", position=2, - desc="Input structural image is already brain-extracted") + desc="Input structural image is already brain-extracted", + ) no_cleanup = traits.Bool( - argstr='-d', + argstr="-d", position=3, - desc="Input structural image is already brain-extracted") + desc="Input structural image is already brain-extracted", + ) method = traits.Enum( - 'auto', - 'fast', - 'none', - xor=['method_as_numerical_threshold'], - argstr='-m %s', + "auto", + "fast", + "none", + xor=["method_as_numerical_threshold"], + argstr="-m %s", position=4, usedefault=True, - desc=("Method must be one of auto, fast, none, or it can be entered " - "using the 'method_as_numerical_threshold' input")) + desc=( + "Method must be one of auto, fast, none, or it can be entered " + "using the 'method_as_numerical_threshold' input" + ), + ) method_as_numerical_threshold = traits.Float( - argstr='-m %.4f', + argstr="-m %.4f", position=4, - desc=("Specify a numerical threshold value or use the 'method' input " - "to choose auto, fast, or none")) + desc=( + "Specify a numerical threshold value or use the 'method' input " + "to choose auto, fast, or none" + ), + ) list_of_specific_structures = traits.List( traits.Str, - argstr='-s %s', - sep=',', + argstr="-s %s", + sep=",", position=5, minlen=1, - desc='Runs only on the specified structures (e.g. L_Hipp, R_Hipp' - 'L_Accu, R_Accu, L_Amyg, R_Amyg' - 'L_Caud, R_Caud, L_Pall, R_Pall' - 'L_Puta, R_Puta, L_Thal, R_Thal, BrStem') + desc="Runs only on the specified structures (e.g. L_Hipp, R_Hipp" + "L_Accu, R_Accu, L_Amyg, R_Amyg" + "L_Caud, R_Caud, L_Pall, R_Pall" + "L_Puta, R_Puta, L_Thal, R_Thal, BrStem", + ) affine_file = File( exists=True, position=6, - argstr='-a %s', - desc=('Affine matrix to use (e.g. img2std.mat) (does not ' - 're-run registration)')) + argstr="-a %s", + desc=( + "Affine matrix to use (e.g. img2std.mat) (does not " "re-run registration)" + ), + ) class FIRSTOutputSpec(TraitedSpec): vtk_surfaces = OutputMultiPath( - File(exists=True), - desc='VTK format meshes for each subcortical region') - bvars = OutputMultiPath( - File(exists=True), desc='bvars for each subcortical region') + File(exists=True), desc="VTK format meshes for each subcortical region" + ) + bvars = OutputMultiPath(File(exists=True), desc="bvars for each subcortical region") original_segmentations = File( exists=True, - desc=('3D image file containing the segmented regions ' - 'as integer values. Uses CMA labelling')) + desc=( + "3D image file containing the segmented regions " + "as integer values. Uses CMA labelling" + ), + ) segmentation_file = File( exists=True, - desc=('4D image file containing a single volume per ' - 'segmented region')) + desc=("4D image file containing a single volume per " "segmented region"), + ) class FIRST(FSLCommand): @@ -2004,7 +2209,7 @@ class FIRST(FSLCommand): """ - _cmd = 'run_first_all' + _cmd = "run_first_all" input_spec = FIRSTInputSpec output_spec = FIRSTOutputSpec @@ -2015,51 +2220,60 @@ def _list_outputs(self): structures = self.inputs.list_of_specific_structures else: structures = [ - 'L_Hipp', 'R_Hipp', 'L_Accu', 'R_Accu', 'L_Amyg', 'R_Amyg', - 'L_Caud', 'R_Caud', 'L_Pall', 'R_Pall', 'L_Puta', 'R_Puta', - 'L_Thal', 'R_Thal', 'BrStem' + "L_Hipp", + "R_Hipp", + "L_Accu", + "R_Accu", + "L_Amyg", + "R_Amyg", + "L_Caud", + "R_Caud", + "L_Pall", + "R_Pall", + "L_Puta", + "R_Puta", + "L_Thal", + "R_Thal", + "BrStem", ] - outputs['original_segmentations'] = \ - self._gen_fname('original_segmentations') - outputs['segmentation_file'] = self._gen_fname('segmentation_file') - outputs['vtk_surfaces'] = self._gen_mesh_names('vtk_surfaces', - structures) - outputs['bvars'] = self._gen_mesh_names('bvars', structures) + outputs["original_segmentations"] = self._gen_fname("original_segmentations") + outputs["segmentation_file"] = self._gen_fname("segmentation_file") + outputs["vtk_surfaces"] = self._gen_mesh_names("vtk_surfaces", structures) + outputs["bvars"] = self._gen_mesh_names("bvars", structures) return outputs def _gen_fname(self, basename): path, outname, ext = split_filename(self.inputs.out_file) - method = 'none' - if isdefined(self.inputs.method) and self.inputs.method != 'none': - method = 'fast' - if (self.inputs.list_of_specific_structures - and self.inputs.method == 'auto'): - method = 'none' + method = "none" + if isdefined(self.inputs.method) and self.inputs.method != "none": + method = "fast" + if self.inputs.list_of_specific_structures and self.inputs.method == "auto": + method = "none" if isdefined(self.inputs.method_as_numerical_threshold): - thres = '%.4f' % self.inputs.method_as_numerical_threshold - method = thres.replace('.', '') + thres = "%.4f" % self.inputs.method_as_numerical_threshold + method = thres.replace(".", "") - if basename == 'original_segmentations': - return op.abspath('%s_all_%s_origsegs.nii.gz' % (outname, method)) - if basename == 'segmentation_file': - return op.abspath('%s_all_%s_firstseg.nii.gz' % (outname, method)) + if basename == "original_segmentations": + return op.abspath("%s_all_%s_origsegs.nii.gz" % (outname, method)) + if basename == "segmentation_file": + return op.abspath("%s_all_%s_firstseg.nii.gz" % (outname, method)) return None def _gen_mesh_names(self, name, structures): path, prefix, ext = split_filename(self.inputs.out_file) - if name == 'vtk_surfaces': + if name == "vtk_surfaces": vtks = list() for struct in structures: - vtk = prefix + '-' + struct + '_first.vtk' + vtk = prefix + "-" + struct + "_first.vtk" vtks.append(op.abspath(vtk)) return vtks - if name == 'bvars': + if name == "bvars": bvars = list() for struct in structures: - bvar = prefix + '-' + struct + '_first.bvars' + bvar = prefix + "-" + struct + "_first.bvars" bvars.append(op.abspath(bvar)) return bvars return None diff --git a/nipype/interfaces/fsl/tests/test_FILMGLS.py b/nipype/interfaces/fsl/tests/test_FILMGLS.py index a37fc1b116..b4934f4d7a 100644 --- a/nipype/interfaces/fsl/tests/test_FILMGLS.py +++ b/nipype/interfaces/fsl/tests/test_FILMGLS.py @@ -4,119 +4,144 @@ def test_filmgls(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), autocorr_estimate_only=dict( xor=[ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ], - argstr='-ac', + argstr="-ac", ), autocorr_noestimate=dict( xor=[ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ], - argstr='-noest', + argstr="-noest", ), - brightness_threshold=dict(argstr='-epith %d', ), - design_file=dict(argstr='%s', ), - environ=dict(usedefault=True, ), + brightness_threshold=dict(argstr="-epith %d",), + design_file=dict(argstr="%s",), + environ=dict(usedefault=True,), fit_armodel=dict( xor=[ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ], - argstr='-ar', + argstr="-ar", ), - full_data=dict(argstr='-v', ), - in_file=dict( - mandatory=True, - argstr='%s', - ), - mask_size=dict(argstr='-ms %d', ), + full_data=dict(argstr="-v",), + in_file=dict(mandatory=True, argstr="%s",), + mask_size=dict(argstr="-ms %d",), multitaper_product=dict( xor=[ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ], - argstr='-mt %d', + argstr="-mt %d", ), - output_pwdata=dict(argstr='-output_pwdata', ), + output_pwdata=dict(argstr="-output_pwdata",), output_type=dict(), - results_dir=dict( - usedefault=True, - argstr='-rn %s', - ), - smooth_autocorr=dict(argstr='-sa', ), - threshold=dict(argstr='%f', ), + results_dir=dict(usedefault=True, argstr="-rn %s",), + smooth_autocorr=dict(argstr="-sa",), + threshold=dict(argstr="%f",), tukey_window=dict( xor=[ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ], - argstr='-tukey %d', + argstr="-tukey %d", ), - use_pava=dict(argstr='-pava', ), + use_pava=dict(argstr="-pava",), ) input_map2 = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), autocorr_estimate_only=dict( xor=[ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ], - argstr='--ac', + argstr="--ac", ), autocorr_noestimate=dict( xor=[ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ], - argstr='--noest', + argstr="--noest", ), - brightness_threshold=dict(argstr='--epith=%d', ), - design_file=dict(argstr='--pd=%s', ), - environ=dict(usedefault=True, ), + brightness_threshold=dict(argstr="--epith=%d",), + design_file=dict(argstr="--pd=%s",), + environ=dict(usedefault=True,), fit_armodel=dict( xor=[ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ], - argstr='--ar', - ), - full_data=dict(argstr='-v', ), - in_file=dict( - mandatory=True, - argstr='--in=%s', + argstr="--ar", ), - mask_size=dict(argstr='--ms=%d', ), + full_data=dict(argstr="-v",), + in_file=dict(mandatory=True, argstr="--in=%s",), + mask_size=dict(argstr="--ms=%d",), multitaper_product=dict( xor=[ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ], - argstr='--mt=%d', + argstr="--mt=%d", ), - output_pwdata=dict(argstr='--outputPWdata', ), + output_pwdata=dict(argstr="--outputPWdata",), output_type=dict(), - results_dir=dict( - argstr='--rn=%s', - usedefault=True, - ), - smooth_autocorr=dict(argstr='--sa', ), - threshold=dict( - usedefault=True, - argstr='--thr=%f', - ), + results_dir=dict(argstr="--rn=%s", usedefault=True,), + smooth_autocorr=dict(argstr="--sa",), + threshold=dict(usedefault=True, argstr="--thr=%f",), tukey_window=dict( xor=[ - 'autocorr_estimate_only', 'fit_armodel', 'tukey_window', - 'multitaper_product', 'use_pava', 'autocorr_noestimate' + "autocorr_estimate_only", + "fit_armodel", + "tukey_window", + "multitaper_product", + "use_pava", + "autocorr_noestimate", ], - argstr='--tukey=%d', + argstr="--tukey=%d", ), - use_pava=dict(argstr='--pava', ), + use_pava=dict(argstr="--pava",), ) instance = FILMGLS() if isinstance(instance.inputs, FILMGLSInputSpec): diff --git a/nipype/interfaces/fsl/tests/test_Level1Design_functions.py b/nipype/interfaces/fsl/tests/test_Level1Design_functions.py index 44c04d41d5..f13ddfaccf 100644 --- a/nipype/interfaces/fsl/tests/test_Level1Design_functions.py +++ b/nipype/interfaces/fsl/tests/test_Level1Design_functions.py @@ -8,25 +8,31 @@ def test_level1design(tmpdir): old = tmpdir.chdir() l = Level1Design() runinfo = dict( - cond=[{ - 'name': 'test_condition', - 'onset': [0, 10], - 'duration': [10, 10] - }], - regress=[]) + cond=[{"name": "test_condition", "onset": [0, 10], "duration": [10, 10]}], + regress=[], + ) runidx = 0 contrasts = Undefined do_tempfilter = False orthogonalization = {} - basic_ev_parameters = {'temporalderiv': False} - convolution_variants = [('custom', 7, { - 'temporalderiv': False, - 'bfcustompath': '/some/path' - }), ('hrf', 3, basic_ev_parameters), ('dgamma', 3, basic_ev_parameters), - ('gamma', 2, basic_ev_parameters), - ('none', 0, basic_ev_parameters)] + basic_ev_parameters = {"temporalderiv": False} + convolution_variants = [ + ("custom", 7, {"temporalderiv": False, "bfcustompath": "/some/path"}), + ("hrf", 3, basic_ev_parameters), + ("dgamma", 3, basic_ev_parameters), + ("gamma", 2, basic_ev_parameters), + ("none", 0, basic_ev_parameters), + ] for key, val, ev_parameters in convolution_variants: output_num, output_txt = Level1Design._create_ev_files( - l, os.getcwd(), runinfo, runidx, ev_parameters, orthogonalization, - contrasts, do_tempfilter, key) + l, + os.getcwd(), + runinfo, + runidx, + ev_parameters, + orthogonalization, + contrasts, + do_tempfilter, + key, + ) assert "set fmri(convolve1) {0}".format(val) in output_txt diff --git a/nipype/interfaces/fsl/tests/test_auto_AR1Image.py b/nipype/interfaces/fsl/tests/test_auto_AR1Image.py index e631c31d70..23e6a19b6c 100644 --- a/nipype/interfaces/fsl/tests/test_auto_AR1Image.py +++ b/nipype/interfaces/fsl/tests/test_auto_AR1Image.py @@ -4,41 +4,16 @@ def test_AR1Image_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict( - argstr='-%sar1', - position=4, - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - internal_datatype=dict( - argstr='-dt %s', - position=1, - ), - nan2zeros=dict( - argstr='-nan', - position=3, - ), + args=dict(argstr="%s",), + dimension=dict(argstr="-%sar1", position=4, usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + internal_datatype=dict(argstr="-dt %s", position=1,), + nan2zeros=dict(argstr="-nan", position=3,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-2, - ), - output_datatype=dict( - argstr='-odt %s', - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), + output_datatype=dict(argstr="-odt %s", position=-1,), output_type=dict(), ) inputs = AR1Image.input_spec() @@ -46,8 +21,10 @@ def test_AR1Image_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AR1Image_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = AR1Image.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_AccuracyTester.py b/nipype/interfaces/fsl/tests/test_auto_AccuracyTester.py index bc1e962fa1..6faf6d5d27 100644 --- a/nipype/interfaces/fsl/tests/test_auto_AccuracyTester.py +++ b/nipype/interfaces/fsl/tests/test_auto_AccuracyTester.py @@ -4,27 +4,12 @@ def test_AccuracyTester_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - mel_icas=dict( - argstr='%s', - copyfile=False, - mandatory=True, - position=3, - ), - output_directory=dict( - argstr='%s', - mandatory=True, - position=2, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + mel_icas=dict(argstr="%s", copyfile=False, mandatory=True, position=3,), + output_directory=dict(argstr="%s", mandatory=True, position=2,), trained_wts_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, + argstr="%s", extensions=None, mandatory=True, position=1, ), ) inputs = AccuracyTester.input_spec() @@ -32,11 +17,10 @@ def test_AccuracyTester_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AccuracyTester_outputs(): - output_map = dict(output_directory=dict( - argstr='%s', - position=1, - ), ) + output_map = dict(output_directory=dict(argstr="%s", position=1,),) outputs = AccuracyTester.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_ApplyMask.py b/nipype/interfaces/fsl/tests/test_auto_ApplyMask.py index dfc98c5a02..dcd7fc6081 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ApplyMask.py +++ b/nipype/interfaces/fsl/tests/test_auto_ApplyMask.py @@ -4,42 +4,16 @@ def test_ApplyMask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - internal_datatype=dict( - argstr='-dt %s', - position=1, - ), - mask_file=dict( - argstr='-mas %s', - extensions=None, - mandatory=True, - position=4, - ), - nan2zeros=dict( - argstr='-nan', - position=3, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + internal_datatype=dict(argstr="-dt %s", position=1,), + mask_file=dict(argstr="-mas %s", extensions=None, mandatory=True, position=4,), + nan2zeros=dict(argstr="-nan", position=3,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-2, - ), - output_datatype=dict( - argstr='-odt %s', - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), + output_datatype=dict(argstr="-odt %s", position=-1,), output_type=dict(), ) inputs = ApplyMask.input_spec() @@ -47,8 +21,10 @@ def test_ApplyMask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyMask_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ApplyMask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_ApplyTOPUP.py b/nipype/interfaces/fsl/tests/test_auto_ApplyTOPUP.py index 0af0b43b0a..a600c425e6 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ApplyTOPUP.py +++ b/nipype/interfaces/fsl/tests/test_auto_ApplyTOPUP.py @@ -4,44 +4,28 @@ def test_ApplyTOPUP_inputs(): input_map = dict( - args=dict(argstr='%s', ), - datatype=dict(argstr='-d=%s', ), - encoding_file=dict( - argstr='--datain=%s', - extensions=None, - mandatory=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_files=dict( - argstr='--imain=%s', - mandatory=True, - sep=',', - ), - in_index=dict( - argstr='--inindex=%s', - sep=',', - ), + args=dict(argstr="%s",), + datatype=dict(argstr="-d=%s",), + encoding_file=dict(argstr="--datain=%s", extensions=None, mandatory=True,), + environ=dict(nohash=True, usedefault=True,), + in_files=dict(argstr="--imain=%s", mandatory=True, sep=",",), + in_index=dict(argstr="--inindex=%s", sep=",",), in_topup_fieldcoef=dict( - argstr='--topup=%s', + argstr="--topup=%s", copyfile=False, extensions=None, - requires=['in_topup_movpar'], + requires=["in_topup_movpar"], ), in_topup_movpar=dict( - copyfile=False, - extensions=None, - requires=['in_topup_fieldcoef'], + copyfile=False, extensions=None, requires=["in_topup_fieldcoef"], ), - interp=dict(argstr='--interp=%s', ), - method=dict(argstr='--method=%s', ), + interp=dict(argstr="--interp=%s",), + method=dict(argstr="--method=%s",), out_corrected=dict( - argstr='--out=%s', + argstr="--out=%s", extensions=None, - name_source=['in_files'], - name_template='%s_corrected', + name_source=["in_files"], + name_template="%s_corrected", ), output_type=dict(), ) @@ -50,8 +34,10 @@ def test_ApplyTOPUP_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyTOPUP_outputs(): - output_map = dict(out_corrected=dict(extensions=None, ), ) + output_map = dict(out_corrected=dict(extensions=None,),) outputs = ApplyTOPUP.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_ApplyWarp.py b/nipype/interfaces/fsl/tests/test_auto_ApplyWarp.py index a6d924e825..8deaf747c2 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ApplyWarp.py +++ b/nipype/interfaces/fsl/tests/test_auto_ApplyWarp.py @@ -4,71 +4,38 @@ def test_ApplyWarp_inputs(): input_map = dict( - abswarp=dict( - argstr='--abs', - xor=['relwarp'], - ), - args=dict(argstr='%s', ), - datatype=dict(argstr='--datatype=%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - field_file=dict( - argstr='--warp=%s', - extensions=None, - ), - in_file=dict( - argstr='--in=%s', - extensions=None, - mandatory=True, - position=0, - ), - interp=dict( - argstr='--interp=%s', - position=-2, - ), - mask_file=dict( - argstr='--mask=%s', - extensions=None, - ), + abswarp=dict(argstr="--abs", xor=["relwarp"],), + args=dict(argstr="%s",), + datatype=dict(argstr="--datatype=%s",), + environ=dict(nohash=True, usedefault=True,), + field_file=dict(argstr="--warp=%s", extensions=None,), + in_file=dict(argstr="--in=%s", extensions=None, mandatory=True, position=0,), + interp=dict(argstr="--interp=%s", position=-2,), + mask_file=dict(argstr="--mask=%s", extensions=None,), out_file=dict( - argstr='--out=%s', + argstr="--out=%s", extensions=None, genfile=True, hash_files=False, position=2, ), output_type=dict(), - postmat=dict( - argstr='--postmat=%s', - extensions=None, - ), - premat=dict( - argstr='--premat=%s', - extensions=None, - ), - ref_file=dict( - argstr='--ref=%s', - extensions=None, - mandatory=True, - position=1, - ), - relwarp=dict( - argstr='--rel', - position=-1, - xor=['abswarp'], - ), - superlevel=dict(argstr='--superlevel=%s', ), - supersample=dict(argstr='--super', ), + postmat=dict(argstr="--postmat=%s", extensions=None,), + premat=dict(argstr="--premat=%s", extensions=None,), + ref_file=dict(argstr="--ref=%s", extensions=None, mandatory=True, position=1,), + relwarp=dict(argstr="--rel", position=-1, xor=["abswarp"],), + superlevel=dict(argstr="--superlevel=%s",), + supersample=dict(argstr="--super",), ) inputs = ApplyWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyWarp_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ApplyWarp.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_ApplyXFM.py b/nipype/interfaces/fsl/tests/test_auto_ApplyXFM.py index b7264213b9..116748d4c9 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ApplyXFM.py +++ b/nipype/interfaces/fsl/tests/test_auto_ApplyXFM.py @@ -4,175 +4,90 @@ def test_ApplyXFM_inputs(): input_map = dict( - angle_rep=dict(argstr='-anglerep %s', ), - apply_isoxfm=dict( - argstr='-applyisoxfm %f', - xor=['apply_xfm'], - ), - apply_xfm=dict( - argstr='-applyxfm', - usedefault=True, - ), - args=dict(argstr='%s', ), - bbrslope=dict( - argstr='-bbrslope %f', - min_ver='5.0.0', - ), - bbrtype=dict( - argstr='-bbrtype %s', - min_ver='5.0.0', - ), - bgvalue=dict(argstr='-setbackground %f', ), - bins=dict(argstr='-bins %d', ), - coarse_search=dict( - argstr='-coarsesearch %d', - units='degrees', - ), - cost=dict(argstr='-cost %s', ), - cost_func=dict(argstr='-searchcost %s', ), - datatype=dict(argstr='-datatype %s', ), - display_init=dict(argstr='-displayinit', ), - dof=dict(argstr='-dof %d', ), - echospacing=dict( - argstr='-echospacing %f', - min_ver='5.0.0', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fieldmap=dict( - argstr='-fieldmap %s', - extensions=None, - min_ver='5.0.0', - ), - fieldmapmask=dict( - argstr='-fieldmapmask %s', - extensions=None, - min_ver='5.0.0', - ), - fine_search=dict( - argstr='-finesearch %d', - units='degrees', - ), - force_scaling=dict(argstr='-forcescaling', ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - position=0, - ), - in_matrix_file=dict( - argstr='-init %s', - extensions=None, - ), - in_weight=dict( - argstr='-inweight %s', - extensions=None, - ), - interp=dict(argstr='-interp %s', ), - min_sampling=dict( - argstr='-minsampling %f', - units='mm', - ), - no_clamp=dict(argstr='-noclamp', ), - no_resample=dict(argstr='-noresample', ), - no_resample_blur=dict(argstr='-noresampblur', ), - no_search=dict(argstr='-nosearch', ), + angle_rep=dict(argstr="-anglerep %s",), + apply_isoxfm=dict(argstr="-applyisoxfm %f", xor=["apply_xfm"],), + apply_xfm=dict(argstr="-applyxfm", usedefault=True,), + args=dict(argstr="%s",), + bbrslope=dict(argstr="-bbrslope %f", min_ver="5.0.0",), + bbrtype=dict(argstr="-bbrtype %s", min_ver="5.0.0",), + bgvalue=dict(argstr="-setbackground %f",), + bins=dict(argstr="-bins %d",), + coarse_search=dict(argstr="-coarsesearch %d", units="degrees",), + cost=dict(argstr="-cost %s",), + cost_func=dict(argstr="-searchcost %s",), + datatype=dict(argstr="-datatype %s",), + display_init=dict(argstr="-displayinit",), + dof=dict(argstr="-dof %d",), + echospacing=dict(argstr="-echospacing %f", min_ver="5.0.0",), + environ=dict(nohash=True, usedefault=True,), + fieldmap=dict(argstr="-fieldmap %s", extensions=None, min_ver="5.0.0",), + fieldmapmask=dict(argstr="-fieldmapmask %s", extensions=None, min_ver="5.0.0",), + fine_search=dict(argstr="-finesearch %d", units="degrees",), + force_scaling=dict(argstr="-forcescaling",), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True, position=0,), + in_matrix_file=dict(argstr="-init %s", extensions=None,), + in_weight=dict(argstr="-inweight %s", extensions=None,), + interp=dict(argstr="-interp %s",), + min_sampling=dict(argstr="-minsampling %f", units="mm",), + no_clamp=dict(argstr="-noclamp",), + no_resample=dict(argstr="-noresample",), + no_resample_blur=dict(argstr="-noresampblur",), + no_search=dict(argstr="-nosearch",), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, hash_files=False, - name_source=['in_file'], - name_template='%s_flirt', + name_source=["in_file"], + name_template="%s_flirt", position=2, ), out_log=dict( extensions=None, keep_extension=True, - name_source=['in_file'], - name_template='%s_flirt.log', - requires=['save_log'], + name_source=["in_file"], + name_template="%s_flirt.log", + requires=["save_log"], ), out_matrix_file=dict( - argstr='-omat %s', + argstr="-omat %s", extensions=None, hash_files=False, keep_extension=True, - name_source=['in_file'], - name_template='%s_flirt.mat', + name_source=["in_file"], + name_template="%s_flirt.mat", position=3, ), output_type=dict(), - padding_size=dict( - argstr='-paddingsize %d', - units='voxels', - ), - pedir=dict( - argstr='-pedir %d', - min_ver='5.0.0', - ), - ref_weight=dict( - argstr='-refweight %s', - extensions=None, - ), - reference=dict( - argstr='-ref %s', - extensions=None, - mandatory=True, - position=1, - ), - rigid2D=dict(argstr='-2D', ), + padding_size=dict(argstr="-paddingsize %d", units="voxels",), + pedir=dict(argstr="-pedir %d", min_ver="5.0.0",), + ref_weight=dict(argstr="-refweight %s", extensions=None,), + reference=dict(argstr="-ref %s", extensions=None, mandatory=True, position=1,), + rigid2D=dict(argstr="-2D",), save_log=dict(), - schedule=dict( - argstr='-schedule %s', - extensions=None, - ), - searchr_x=dict( - argstr='-searchrx %s', - units='degrees', - ), - searchr_y=dict( - argstr='-searchry %s', - units='degrees', - ), - searchr_z=dict( - argstr='-searchrz %s', - units='degrees', - ), - sinc_width=dict( - argstr='-sincwidth %d', - units='voxels', - ), - sinc_window=dict(argstr='-sincwindow %s', ), - uses_qform=dict(argstr='-usesqform', ), - verbose=dict(argstr='-verbose %d', ), - wm_seg=dict( - argstr='-wmseg %s', - extensions=None, - min_ver='5.0.0', - ), - wmcoords=dict( - argstr='-wmcoords %s', - extensions=None, - min_ver='5.0.0', - ), - wmnorms=dict( - argstr='-wmnorms %s', - extensions=None, - min_ver='5.0.0', - ), + schedule=dict(argstr="-schedule %s", extensions=None,), + searchr_x=dict(argstr="-searchrx %s", units="degrees",), + searchr_y=dict(argstr="-searchry %s", units="degrees",), + searchr_z=dict(argstr="-searchrz %s", units="degrees",), + sinc_width=dict(argstr="-sincwidth %d", units="voxels",), + sinc_window=dict(argstr="-sincwindow %s",), + uses_qform=dict(argstr="-usesqform",), + verbose=dict(argstr="-verbose %d",), + wm_seg=dict(argstr="-wmseg %s", extensions=None, min_ver="5.0.0",), + wmcoords=dict(argstr="-wmcoords %s", extensions=None, min_ver="5.0.0",), + wmnorms=dict(argstr="-wmnorms %s", extensions=None, min_ver="5.0.0",), ) inputs = ApplyXFM.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyXFM_outputs(): output_map = dict( - out_file=dict(extensions=None, ), - out_log=dict(extensions=None, ), - out_matrix_file=dict(extensions=None, ), + out_file=dict(extensions=None,), + out_log=dict(extensions=None,), + out_matrix_file=dict(extensions=None,), ) outputs = ApplyXFM.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_AvScale.py b/nipype/interfaces/fsl/tests/test_auto_AvScale.py index 0b803fd558..5f636ec453 100644 --- a/nipype/interfaces/fsl/tests/test_auto_AvScale.py +++ b/nipype/interfaces/fsl/tests/test_auto_AvScale.py @@ -4,28 +4,19 @@ def test_AvScale_inputs(): input_map = dict( - all_param=dict(argstr='--allparams', ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - mat_file=dict( - argstr='%s', - extensions=None, - position=-2, - ), - ref_file=dict( - argstr='%s', - extensions=None, - position=-1, - ), + all_param=dict(argstr="--allparams",), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + mat_file=dict(argstr="%s", extensions=None, position=-2,), + ref_file=dict(argstr="%s", extensions=None, position=-1,), ) inputs = AvScale.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AvScale_outputs(): output_map = dict( average_scaling=dict(), diff --git a/nipype/interfaces/fsl/tests/test_auto_B0Calc.py b/nipype/interfaces/fsl/tests/test_auto_B0Calc.py index 532780c8cb..85175fd428 100644 --- a/nipype/interfaces/fsl/tests/test_auto_B0Calc.py +++ b/nipype/interfaces/fsl/tests/test_auto_B0Calc.py @@ -4,85 +4,42 @@ def test_B0Calc_inputs(): input_map = dict( - args=dict(argstr='%s', ), - chi_air=dict( - argstr='--chi0=%e', - usedefault=True, - ), - compute_xyz=dict( - argstr='--xyz', - usedefault=True, - ), - delta=dict( - argstr='-d %e', - usedefault=True, - ), - directconv=dict( - argstr='--directconv', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - extendboundary=dict( - argstr='--extendboundary=%0.2f', - usedefault=True, - ), - in_file=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - position=0, - ), + args=dict(argstr="%s",), + chi_air=dict(argstr="--chi0=%e", usedefault=True,), + compute_xyz=dict(argstr="--xyz", usedefault=True,), + delta=dict(argstr="-d %e", usedefault=True,), + directconv=dict(argstr="--directconv", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + extendboundary=dict(argstr="--extendboundary=%0.2f", usedefault=True,), + in_file=dict(argstr="-i %s", extensions=None, mandatory=True, position=0,), out_file=dict( - argstr='-o %s', + argstr="-o %s", extensions=None, - name_source=['in_file'], - name_template='%s_b0field', - output_name='out_file', + name_source=["in_file"], + name_template="%s_b0field", + output_name="out_file", position=1, ), output_type=dict(), - x_b0=dict( - argstr='--b0x=%0.2f', - usedefault=True, - xor=['xyz_b0'], - ), - x_grad=dict( - argstr='--gx=%0.4f', - usedefault=True, - ), + x_b0=dict(argstr="--b0x=%0.2f", usedefault=True, xor=["xyz_b0"],), + x_grad=dict(argstr="--gx=%0.4f", usedefault=True,), xyz_b0=dict( - argstr='--b0x=%0.2f --b0y=%0.2f --b0=%0.2f', - xor=['x_b0', 'y_b0', 'z_b0'], - ), - y_b0=dict( - argstr='--b0y=%0.2f', - usedefault=True, - xor=['xyz_b0'], - ), - y_grad=dict( - argstr='--gy=%0.4f', - usedefault=True, - ), - z_b0=dict( - argstr='--b0=%0.2f', - usedefault=True, - xor=['xyz_b0'], - ), - z_grad=dict( - argstr='--gz=%0.4f', - usedefault=True, + argstr="--b0x=%0.2f --b0y=%0.2f --b0=%0.2f", xor=["x_b0", "y_b0", "z_b0"], ), + y_b0=dict(argstr="--b0y=%0.2f", usedefault=True, xor=["xyz_b0"],), + y_grad=dict(argstr="--gy=%0.4f", usedefault=True,), + z_b0=dict(argstr="--b0=%0.2f", usedefault=True, xor=["xyz_b0"],), + z_grad=dict(argstr="--gz=%0.4f", usedefault=True,), ) inputs = B0Calc.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_B0Calc_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = B0Calc.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_BEDPOSTX5.py b/nipype/interfaces/fsl/tests/test_auto_BEDPOSTX5.py index 54cbcf0410..8941e775c6 100644 --- a/nipype/interfaces/fsl/tests/test_auto_BEDPOSTX5.py +++ b/nipype/interfaces/fsl/tests/test_auto_BEDPOSTX5.py @@ -4,98 +4,39 @@ def test_BEDPOSTX5_inputs(): input_map = dict( - all_ard=dict( - argstr='--allard', - xor=('no_ard', 'all_ard'), - ), - args=dict(argstr='%s', ), - burn_in=dict( - argstr='-b %d', - usedefault=True, - ), - burn_in_no_ard=dict( - argstr='--burnin_noard=%d', - usedefault=True, - ), - bvals=dict( - extensions=None, - mandatory=True, - ), - bvecs=dict( - extensions=None, - mandatory=True, - ), + all_ard=dict(argstr="--allard", xor=("no_ard", "all_ard"),), + args=dict(argstr="%s",), + burn_in=dict(argstr="-b %d", usedefault=True,), + burn_in_no_ard=dict(argstr="--burnin_noard=%d", usedefault=True,), + bvals=dict(extensions=None, mandatory=True,), + bvecs=dict(extensions=None, mandatory=True,), cnlinear=dict( - argstr='--cnonlinear', - xor=('no_spat', 'non_linear', 'cnlinear'), - ), - dwi=dict( - extensions=None, - mandatory=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - f0_ard=dict( - argstr='--f0 --ardf0', - xor=['f0_noard', 'f0_ard', 'all_ard'], - ), - f0_noard=dict( - argstr='--f0', - xor=['f0_noard', 'f0_ard'], - ), - force_dir=dict( - argstr='--forcedir', - usedefault=True, - ), - fudge=dict(argstr='-w %d', ), - grad_dev=dict(extensions=None, ), - gradnonlin=dict(argstr='-g', ), - logdir=dict(argstr='--logdir=%s', ), - mask=dict( - extensions=None, - mandatory=True, - ), - model=dict(argstr='-model %d', ), - n_fibres=dict( - argstr='-n %d', - mandatory=True, - usedefault=True, - ), - n_jumps=dict( - argstr='-j %d', - usedefault=True, - ), - no_ard=dict( - argstr='--noard', - xor=('no_ard', 'all_ard'), - ), - no_spat=dict( - argstr='--nospat', - xor=('no_spat', 'non_linear', 'cnlinear'), - ), + argstr="--cnonlinear", xor=("no_spat", "non_linear", "cnlinear"), + ), + dwi=dict(extensions=None, mandatory=True,), + environ=dict(nohash=True, usedefault=True,), + f0_ard=dict(argstr="--f0 --ardf0", xor=["f0_noard", "f0_ard", "all_ard"],), + f0_noard=dict(argstr="--f0", xor=["f0_noard", "f0_ard"],), + force_dir=dict(argstr="--forcedir", usedefault=True,), + fudge=dict(argstr="-w %d",), + grad_dev=dict(extensions=None,), + gradnonlin=dict(argstr="-g",), + logdir=dict(argstr="--logdir=%s",), + mask=dict(extensions=None, mandatory=True,), + model=dict(argstr="-model %d",), + n_fibres=dict(argstr="-n %d", mandatory=True, usedefault=True,), + n_jumps=dict(argstr="-j %d", usedefault=True,), + no_ard=dict(argstr="--noard", xor=("no_ard", "all_ard"),), + no_spat=dict(argstr="--nospat", xor=("no_spat", "non_linear", "cnlinear"),), non_linear=dict( - argstr='--nonlinear', - xor=('no_spat', 'non_linear', 'cnlinear'), - ), - out_dir=dict( - argstr='%s', - mandatory=True, - position=1, - usedefault=True, + argstr="--nonlinear", xor=("no_spat", "non_linear", "cnlinear"), ), + out_dir=dict(argstr="%s", mandatory=True, position=1, usedefault=True,), output_type=dict(), - rician=dict(argstr='--rician', ), - sample_every=dict( - argstr='-s %d', - usedefault=True, - ), - seed=dict(argstr='--seed=%d', ), - update_proposal_every=dict( - argstr='--updateproposalevery=%d', - usedefault=True, - ), + rician=dict(argstr="--rician",), + sample_every=dict(argstr="-s %d", usedefault=True,), + seed=dict(argstr="--seed=%d",), + update_proposal_every=dict(argstr="--updateproposalevery=%d", usedefault=True,), use_gpu=dict(), ) inputs = BEDPOSTX5.input_spec() @@ -103,12 +44,14 @@ def test_BEDPOSTX5_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BEDPOSTX5_outputs(): output_map = dict( dyads=dict(), dyads_dispersion=dict(), - mean_S0samples=dict(extensions=None, ), - mean_dsamples=dict(extensions=None, ), + mean_S0samples=dict(extensions=None,), + mean_dsamples=dict(extensions=None,), mean_fsamples=dict(), mean_phsamples=dict(), mean_thsamples=dict(), diff --git a/nipype/interfaces/fsl/tests/test_auto_BET.py b/nipype/interfaces/fsl/tests/test_auto_BET.py index 33968d9ac9..538a92b720 100644 --- a/nipype/interfaces/fsl/tests/test_auto_BET.py +++ b/nipype/interfaces/fsl/tests/test_auto_BET.py @@ -4,96 +4,129 @@ def test_BET_inputs(): input_map = dict( - args=dict(argstr='%s', ), - center=dict( - argstr='-c %s', - units='voxels', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - frac=dict(argstr='-f %.2f', ), + args=dict(argstr="%s",), + center=dict(argstr="-c %s", units="voxels",), + environ=dict(nohash=True, usedefault=True,), + frac=dict(argstr="-f %.2f",), functional=dict( - argstr='-F', - xor=('functional', 'reduce_bias', 'robust', 'padding', - 'remove_eyes', 'surfaces', 't2_guided'), - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=0, + argstr="-F", + xor=( + "functional", + "reduce_bias", + "robust", + "padding", + "remove_eyes", + "surfaces", + "t2_guided", + ), ), - mask=dict(argstr='-m', ), - mesh=dict(argstr='-e', ), - no_output=dict(argstr='-n', ), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=0,), + mask=dict(argstr="-m",), + mesh=dict(argstr="-e",), + no_output=dict(argstr="-n",), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=1, ), - outline=dict(argstr='-o', ), + outline=dict(argstr="-o",), output_type=dict(), padding=dict( - argstr='-Z', - xor=('functional', 'reduce_bias', 'robust', 'padding', - 'remove_eyes', 'surfaces', 't2_guided'), - ), - radius=dict( - argstr='-r %d', - units='mm', + argstr="-Z", + xor=( + "functional", + "reduce_bias", + "robust", + "padding", + "remove_eyes", + "surfaces", + "t2_guided", + ), ), + radius=dict(argstr="-r %d", units="mm",), reduce_bias=dict( - argstr='-B', - xor=('functional', 'reduce_bias', 'robust', 'padding', - 'remove_eyes', 'surfaces', 't2_guided'), + argstr="-B", + xor=( + "functional", + "reduce_bias", + "robust", + "padding", + "remove_eyes", + "surfaces", + "t2_guided", + ), ), remove_eyes=dict( - argstr='-S', - xor=('functional', 'reduce_bias', 'robust', 'padding', - 'remove_eyes', 'surfaces', 't2_guided'), + argstr="-S", + xor=( + "functional", + "reduce_bias", + "robust", + "padding", + "remove_eyes", + "surfaces", + "t2_guided", + ), ), robust=dict( - argstr='-R', - xor=('functional', 'reduce_bias', 'robust', 'padding', - 'remove_eyes', 'surfaces', 't2_guided'), + argstr="-R", + xor=( + "functional", + "reduce_bias", + "robust", + "padding", + "remove_eyes", + "surfaces", + "t2_guided", + ), ), - skull=dict(argstr='-s', ), + skull=dict(argstr="-s",), surfaces=dict( - argstr='-A', - xor=('functional', 'reduce_bias', 'robust', 'padding', - 'remove_eyes', 'surfaces', 't2_guided'), + argstr="-A", + xor=( + "functional", + "reduce_bias", + "robust", + "padding", + "remove_eyes", + "surfaces", + "t2_guided", + ), ), t2_guided=dict( - argstr='-A2 %s', + argstr="-A2 %s", extensions=None, - xor=('functional', 'reduce_bias', 'robust', 'padding', - 'remove_eyes', 'surfaces', 't2_guided'), + xor=( + "functional", + "reduce_bias", + "robust", + "padding", + "remove_eyes", + "surfaces", + "t2_guided", + ), ), - threshold=dict(argstr='-t', ), - vertical_gradient=dict(argstr='-g %.2f', ), + threshold=dict(argstr="-t",), + vertical_gradient=dict(argstr="-g %.2f",), ) inputs = BET.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BET_outputs(): output_map = dict( - inskull_mask_file=dict(extensions=None, ), - inskull_mesh_file=dict(extensions=None, ), - mask_file=dict(extensions=None, ), - meshfile=dict(extensions=None, ), - out_file=dict(extensions=None, ), - outline_file=dict(extensions=None, ), - outskin_mask_file=dict(extensions=None, ), - outskin_mesh_file=dict(extensions=None, ), - outskull_mask_file=dict(extensions=None, ), - outskull_mesh_file=dict(extensions=None, ), - skull_mask_file=dict(extensions=None, ), + inskull_mask_file=dict(extensions=None,), + inskull_mesh_file=dict(extensions=None,), + mask_file=dict(extensions=None,), + meshfile=dict(extensions=None,), + out_file=dict(extensions=None,), + outline_file=dict(extensions=None,), + outskin_mask_file=dict(extensions=None,), + outskin_mesh_file=dict(extensions=None,), + outskull_mask_file=dict(extensions=None,), + outskull_mesh_file=dict(extensions=None,), + skull_mask_file=dict(extensions=None,), ) outputs = BET.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_BinaryMaths.py b/nipype/interfaces/fsl/tests/test_auto_BinaryMaths.py index 055f37e35f..685058f2d4 100644 --- a/nipype/interfaces/fsl/tests/test_auto_BinaryMaths.py +++ b/nipype/interfaces/fsl/tests/test_auto_BinaryMaths.py @@ -4,54 +4,26 @@ def test_BinaryMaths_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - internal_datatype=dict( - argstr='-dt %s', - position=1, - ), - nan2zeros=dict( - argstr='-nan', - position=3, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + internal_datatype=dict(argstr="-dt %s", position=1,), + nan2zeros=dict(argstr="-nan", position=3,), operand_file=dict( - argstr='%s', + argstr="%s", extensions=None, mandatory=True, position=5, - xor=['operand_value'], + xor=["operand_value"], ), operand_value=dict( - argstr='%.8f', - mandatory=True, - position=5, - xor=['operand_file'], - ), - operation=dict( - argstr='-%s', - mandatory=True, - position=4, + argstr="%.8f", mandatory=True, position=5, xor=["operand_file"], ), + operation=dict(argstr="-%s", mandatory=True, position=4,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-2, - ), - output_datatype=dict( - argstr='-odt %s', - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), + output_datatype=dict(argstr="-odt %s", position=-1,), output_type=dict(), ) inputs = BinaryMaths.input_spec() @@ -59,8 +31,10 @@ def test_BinaryMaths_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BinaryMaths_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = BinaryMaths.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_ChangeDataType.py b/nipype/interfaces/fsl/tests/test_auto_ChangeDataType.py index a4428446e7..9dfe5ed83f 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ChangeDataType.py +++ b/nipype/interfaces/fsl/tests/test_auto_ChangeDataType.py @@ -4,37 +4,15 @@ def test_ChangeDataType_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - internal_datatype=dict( - argstr='-dt %s', - position=1, - ), - nan2zeros=dict( - argstr='-nan', - position=3, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + internal_datatype=dict(argstr="-dt %s", position=1,), + nan2zeros=dict(argstr="-nan", position=3,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-2, - ), - output_datatype=dict( - argstr='-odt %s', - mandatory=True, - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), + output_datatype=dict(argstr="-odt %s", mandatory=True, position=-1,), output_type=dict(), ) inputs = ChangeDataType.input_spec() @@ -42,8 +20,10 @@ def test_ChangeDataType_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ChangeDataType_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ChangeDataType.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_Classifier.py b/nipype/interfaces/fsl/tests/test_auto_Classifier.py index dbd0f129a2..e81bb72096 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Classifier.py +++ b/nipype/interfaces/fsl/tests/test_auto_Classifier.py @@ -4,28 +4,13 @@ def test_Classifier_inputs(): input_map = dict( - args=dict(argstr='%s', ), - artifacts_list_file=dict(extensions=None, ), - environ=dict( - nohash=True, - usedefault=True, - ), - mel_ica=dict( - argstr='%s', - copyfile=False, - position=1, - ), - thresh=dict( - argstr='%d', - mandatory=True, - position=-1, - ), + args=dict(argstr="%s",), + artifacts_list_file=dict(extensions=None,), + environ=dict(nohash=True, usedefault=True,), + mel_ica=dict(argstr="%s", copyfile=False, position=1,), + thresh=dict(argstr="%d", mandatory=True, position=-1,), trained_wts_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=2, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=2, ), ) inputs = Classifier.input_spec() @@ -33,8 +18,10 @@ def test_Classifier_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Classifier_outputs(): - output_map = dict(artifacts_list_file=dict(extensions=None, ), ) + output_map = dict(artifacts_list_file=dict(extensions=None,),) outputs = Classifier.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_Cleaner.py b/nipype/interfaces/fsl/tests/test_auto_Cleaner.py index 8e587fde99..d5462a23d8 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Cleaner.py +++ b/nipype/interfaces/fsl/tests/test_auto_Cleaner.py @@ -4,53 +4,27 @@ def test_Cleaner_inputs(): input_map = dict( - aggressive=dict( - argstr='-A', - position=3, - ), - args=dict(argstr='%s', ), + aggressive=dict(argstr="-A", position=3,), + args=dict(argstr="%s",), artifacts_list_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), - cleanup_motion=dict( - argstr='-m', - position=2, - ), - confound_file=dict( - argstr='-x %s', - extensions=None, - position=4, - ), - confound_file_1=dict( - argstr='-x %s', - extensions=None, - position=5, - ), - confound_file_2=dict( - argstr='-x %s', - extensions=None, - position=6, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - highpass=dict( - argstr='-m -h %f', - position=2, - usedefault=True, - ), + argstr="%s", extensions=None, mandatory=True, position=1, + ), + cleanup_motion=dict(argstr="-m", position=2,), + confound_file=dict(argstr="-x %s", extensions=None, position=4,), + confound_file_1=dict(argstr="-x %s", extensions=None, position=5,), + confound_file_2=dict(argstr="-x %s", extensions=None, position=6,), + environ=dict(nohash=True, usedefault=True,), + highpass=dict(argstr="-m -h %f", position=2, usedefault=True,), ) inputs = Cleaner.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Cleaner_outputs(): - output_map = dict(cleaned_functional_file=dict(extensions=None, ), ) + output_map = dict(cleaned_functional_file=dict(extensions=None,),) outputs = Cleaner.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_Cluster.py b/nipype/interfaces/fsl/tests/test_auto_Cluster.py index 10d8af2706..dee38d4875 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Cluster.py +++ b/nipype/interfaces/fsl/tests/test_auto_Cluster.py @@ -4,114 +4,52 @@ def test_Cluster_inputs(): input_map = dict( - args=dict(argstr='%s', ), - connectivity=dict(argstr='--connectivity=%d', ), - cope_file=dict( - argstr='--cope=%s', - extensions=None, - ), - dlh=dict(argstr='--dlh=%.10f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - find_min=dict( - argstr='--min', - usedefault=True, - ), - fractional=dict( - argstr='--fractional', - usedefault=True, - ), - in_file=dict( - argstr='--in=%s', - extensions=None, - mandatory=True, - ), - minclustersize=dict( - argstr='--minclustersize', - usedefault=True, - ), - no_table=dict( - argstr='--no_table', - usedefault=True, - ), - num_maxima=dict(argstr='--num=%d', ), - out_index_file=dict( - argstr='--oindex=%s', - hash_files=False, - ), - out_localmax_txt_file=dict( - argstr='--olmax=%s', - hash_files=False, - ), - out_localmax_vol_file=dict( - argstr='--olmaxim=%s', - hash_files=False, - ), - out_max_file=dict( - argstr='--omax=%s', - hash_files=False, - ), - out_mean_file=dict( - argstr='--omean=%s', - hash_files=False, - ), - out_pval_file=dict( - argstr='--opvals=%s', - hash_files=False, - ), - out_size_file=dict( - argstr='--osize=%s', - hash_files=False, - ), - out_threshold_file=dict( - argstr='--othresh=%s', - hash_files=False, - ), + args=dict(argstr="%s",), + connectivity=dict(argstr="--connectivity=%d",), + cope_file=dict(argstr="--cope=%s", extensions=None,), + dlh=dict(argstr="--dlh=%.10f",), + environ=dict(nohash=True, usedefault=True,), + find_min=dict(argstr="--min", usedefault=True,), + fractional=dict(argstr="--fractional", usedefault=True,), + in_file=dict(argstr="--in=%s", extensions=None, mandatory=True,), + minclustersize=dict(argstr="--minclustersize", usedefault=True,), + no_table=dict(argstr="--no_table", usedefault=True,), + num_maxima=dict(argstr="--num=%d",), + out_index_file=dict(argstr="--oindex=%s", hash_files=False,), + out_localmax_txt_file=dict(argstr="--olmax=%s", hash_files=False,), + out_localmax_vol_file=dict(argstr="--olmaxim=%s", hash_files=False,), + out_max_file=dict(argstr="--omax=%s", hash_files=False,), + out_mean_file=dict(argstr="--omean=%s", hash_files=False,), + out_pval_file=dict(argstr="--opvals=%s", hash_files=False,), + out_size_file=dict(argstr="--osize=%s", hash_files=False,), + out_threshold_file=dict(argstr="--othresh=%s", hash_files=False,), output_type=dict(), - peak_distance=dict(argstr='--peakdist=%.10f', ), - pthreshold=dict( - argstr='--pthresh=%.10f', - requires=['dlh', 'volume'], - ), - std_space_file=dict( - argstr='--stdvol=%s', - extensions=None, - ), - threshold=dict( - argstr='--thresh=%.10f', - mandatory=True, - ), - use_mm=dict( - argstr='--mm', - usedefault=True, - ), - volume=dict(argstr='--volume=%d', ), - warpfield_file=dict( - argstr='--warpvol=%s', - extensions=None, - ), - xfm_file=dict( - argstr='--xfm=%s', - extensions=None, - ), + peak_distance=dict(argstr="--peakdist=%.10f",), + pthreshold=dict(argstr="--pthresh=%.10f", requires=["dlh", "volume"],), + std_space_file=dict(argstr="--stdvol=%s", extensions=None,), + threshold=dict(argstr="--thresh=%.10f", mandatory=True,), + use_mm=dict(argstr="--mm", usedefault=True,), + volume=dict(argstr="--volume=%d",), + warpfield_file=dict(argstr="--warpvol=%s", extensions=None,), + xfm_file=dict(argstr="--xfm=%s", extensions=None,), ) inputs = Cluster.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Cluster_outputs(): output_map = dict( - index_file=dict(extensions=None, ), - localmax_txt_file=dict(extensions=None, ), - localmax_vol_file=dict(extensions=None, ), - max_file=dict(extensions=None, ), - mean_file=dict(extensions=None, ), - pval_file=dict(extensions=None, ), - size_file=dict(extensions=None, ), - threshold_file=dict(extensions=None, ), + index_file=dict(extensions=None,), + localmax_txt_file=dict(extensions=None,), + localmax_vol_file=dict(extensions=None,), + max_file=dict(extensions=None,), + mean_file=dict(extensions=None,), + pval_file=dict(extensions=None,), + size_file=dict(extensions=None,), + threshold_file=dict(extensions=None,), ) outputs = Cluster.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_Complex.py b/nipype/interfaces/fsl/tests/test_auto_Complex.py index 2f068e7a1d..ac631cde80 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Complex.py +++ b/nipype/interfaces/fsl/tests/test_auto_Complex.py @@ -4,167 +4,185 @@ def test_Complex_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), complex_cartesian=dict( - argstr='-complex', + argstr="-complex", position=1, xor=[ - 'real_polar', 'real_cartesian', 'complex_cartesian', - 'complex_polar', 'complex_split', 'complex_merge' + "real_polar", + "real_cartesian", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", ], ), - complex_in_file=dict( - argstr='%s', - extensions=None, - position=2, - ), - complex_in_file2=dict( - argstr='%s', - extensions=None, - position=3, - ), + complex_in_file=dict(argstr="%s", extensions=None, position=2,), + complex_in_file2=dict(argstr="%s", extensions=None, position=3,), complex_merge=dict( - argstr='-complexmerge', + argstr="-complexmerge", position=1, xor=[ - 'real_polar', 'real_cartesian', 'complex_cartesian', - 'complex_polar', 'complex_split', 'complex_merge', 'start_vol', - 'end_vol' + "real_polar", + "real_cartesian", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", + "start_vol", + "end_vol", ], ), complex_out_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, position=-3, xor=[ - 'complex_out_file', 'magnitude_out_file', 'phase_out_file', - 'real_out_file', 'imaginary_out_file', 'real_polar', - 'real_cartesian' + "complex_out_file", + "magnitude_out_file", + "phase_out_file", + "real_out_file", + "imaginary_out_file", + "real_polar", + "real_cartesian", ], ), complex_polar=dict( - argstr='-complexpolar', + argstr="-complexpolar", position=1, xor=[ - 'real_polar', 'real_cartesian', 'complex_cartesian', - 'complex_polar', 'complex_split', 'complex_merge' + "real_polar", + "real_cartesian", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", ], ), complex_split=dict( - argstr='-complexsplit', + argstr="-complexsplit", position=1, xor=[ - 'real_polar', 'real_cartesian', 'complex_cartesian', - 'complex_polar', 'complex_split', 'complex_merge' + "real_polar", + "real_cartesian", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", ], ), - end_vol=dict( - argstr='%d', - position=-1, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - imaginary_in_file=dict( - argstr='%s', - extensions=None, - position=3, - ), + end_vol=dict(argstr="%d", position=-1,), + environ=dict(nohash=True, usedefault=True,), + imaginary_in_file=dict(argstr="%s", extensions=None, position=3,), imaginary_out_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, position=-3, xor=[ - 'complex_out_file', 'magnitude_out_file', 'phase_out_file', - 'real_polar', 'complex_cartesian', 'complex_polar', - 'complex_split', 'complex_merge' + "complex_out_file", + "magnitude_out_file", + "phase_out_file", + "real_polar", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", ], ), - magnitude_in_file=dict( - argstr='%s', - extensions=None, - position=2, - ), + magnitude_in_file=dict(argstr="%s", extensions=None, position=2,), magnitude_out_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, position=-4, xor=[ - 'complex_out_file', 'real_out_file', 'imaginary_out_file', - 'real_cartesian', 'complex_cartesian', 'complex_polar', - 'complex_split', 'complex_merge' + "complex_out_file", + "real_out_file", + "imaginary_out_file", + "real_cartesian", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", ], ), output_type=dict(), - phase_in_file=dict( - argstr='%s', - extensions=None, - position=3, - ), + phase_in_file=dict(argstr="%s", extensions=None, position=3,), phase_out_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, position=-3, xor=[ - 'complex_out_file', 'real_out_file', 'imaginary_out_file', - 'real_cartesian', 'complex_cartesian', 'complex_polar', - 'complex_split', 'complex_merge' + "complex_out_file", + "real_out_file", + "imaginary_out_file", + "real_cartesian", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", ], ), real_cartesian=dict( - argstr='-realcartesian', + argstr="-realcartesian", position=1, xor=[ - 'real_polar', 'real_cartesian', 'complex_cartesian', - 'complex_polar', 'complex_split', 'complex_merge' + "real_polar", + "real_cartesian", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", ], ), - real_in_file=dict( - argstr='%s', - extensions=None, - position=2, - ), + real_in_file=dict(argstr="%s", extensions=None, position=2,), real_out_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, position=-4, xor=[ - 'complex_out_file', 'magnitude_out_file', 'phase_out_file', - 'real_polar', 'complex_cartesian', 'complex_polar', - 'complex_split', 'complex_merge' + "complex_out_file", + "magnitude_out_file", + "phase_out_file", + "real_polar", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", ], ), real_polar=dict( - argstr='-realpolar', + argstr="-realpolar", position=1, xor=[ - 'real_polar', 'real_cartesian', 'complex_cartesian', - 'complex_polar', 'complex_split', 'complex_merge' + "real_polar", + "real_cartesian", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", ], ), - start_vol=dict( - argstr='%d', - position=-2, - ), + start_vol=dict(argstr="%d", position=-2,), ) inputs = Complex.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Complex_outputs(): output_map = dict( - complex_out_file=dict(extensions=None, ), - imaginary_out_file=dict(extensions=None, ), - magnitude_out_file=dict(extensions=None, ), - phase_out_file=dict(extensions=None, ), - real_out_file=dict(extensions=None, ), + complex_out_file=dict(extensions=None,), + imaginary_out_file=dict(extensions=None,), + magnitude_out_file=dict(extensions=None,), + phase_out_file=dict(extensions=None,), + real_out_file=dict(extensions=None,), ) outputs = Complex.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_ContrastMgr.py b/nipype/interfaces/fsl/tests/test_auto_ContrastMgr.py index 0a58eac8e3..185c089889 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ContrastMgr.py +++ b/nipype/interfaces/fsl/tests/test_auto_ContrastMgr.py @@ -4,53 +4,27 @@ def test_ContrastMgr_inputs(): input_map = dict( - args=dict(argstr='%s', ), - contrast_num=dict(argstr='-cope', ), - corrections=dict( - copyfile=False, - extensions=None, - mandatory=True, - ), - dof_file=dict( - argstr='', - copyfile=False, - extensions=None, - mandatory=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fcon_file=dict( - argstr='-f %s', - extensions=None, - ), + args=dict(argstr="%s",), + contrast_num=dict(argstr="-cope",), + corrections=dict(copyfile=False, extensions=None, mandatory=True,), + dof_file=dict(argstr="", copyfile=False, extensions=None, mandatory=True,), + environ=dict(nohash=True, usedefault=True,), + fcon_file=dict(argstr="-f %s", extensions=None,), output_type=dict(), - param_estimates=dict( - argstr='', - copyfile=False, - mandatory=True, - ), + param_estimates=dict(argstr="", copyfile=False, mandatory=True,), sigmasquareds=dict( - argstr='', - copyfile=False, - extensions=None, - mandatory=True, - position=-2, - ), - suffix=dict(argstr='-suffix %s', ), - tcon_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, + argstr="", copyfile=False, extensions=None, mandatory=True, position=-2, ), + suffix=dict(argstr="-suffix %s",), + tcon_file=dict(argstr="%s", extensions=None, mandatory=True, position=-1,), ) inputs = ContrastMgr.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ContrastMgr_outputs(): output_map = dict( copes=dict(), diff --git a/nipype/interfaces/fsl/tests/test_auto_ConvertWarp.py b/nipype/interfaces/fsl/tests/test_auto_ConvertWarp.py index 5b321f761f..dcfc562258 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ConvertWarp.py +++ b/nipype/interfaces/fsl/tests/test_auto_ConvertWarp.py @@ -4,81 +4,42 @@ def test_ConvertWarp_inputs(): input_map = dict( - abswarp=dict( - argstr='--abs', - xor=['relwarp'], - ), - args=dict(argstr='%s', ), - cons_jacobian=dict(argstr='--constrainj', ), - environ=dict( - nohash=True, - usedefault=True, - ), - jacobian_max=dict(argstr='--jmax=%f', ), - jacobian_min=dict(argstr='--jmin=%f', ), - midmat=dict( - argstr='--midmat=%s', - extensions=None, - ), - out_abswarp=dict( - argstr='--absout', - xor=['out_relwarp'], - ), + abswarp=dict(argstr="--abs", xor=["relwarp"],), + args=dict(argstr="%s",), + cons_jacobian=dict(argstr="--constrainj",), + environ=dict(nohash=True, usedefault=True,), + jacobian_max=dict(argstr="--jmax=%f",), + jacobian_min=dict(argstr="--jmin=%f",), + midmat=dict(argstr="--midmat=%s", extensions=None,), + out_abswarp=dict(argstr="--absout", xor=["out_relwarp"],), out_file=dict( - argstr='--out=%s', + argstr="--out=%s", extensions=None, - name_source=['reference'], - name_template='%s_concatwarp', - output_name='out_file', + name_source=["reference"], + name_template="%s_concatwarp", + output_name="out_file", position=-1, ), - out_relwarp=dict( - argstr='--relout', - xor=['out_abswarp'], - ), + out_relwarp=dict(argstr="--relout", xor=["out_abswarp"],), output_type=dict(), - postmat=dict( - argstr='--postmat=%s', - extensions=None, - ), - premat=dict( - argstr='--premat=%s', - extensions=None, - ), - reference=dict( - argstr='--ref=%s', - extensions=None, - mandatory=True, - position=1, - ), - relwarp=dict( - argstr='--rel', - xor=['abswarp'], - ), - shift_direction=dict( - argstr='--shiftdir=%s', - requires=['shift_in_file'], - ), - shift_in_file=dict( - argstr='--shiftmap=%s', - extensions=None, - ), - warp1=dict( - argstr='--warp1=%s', - extensions=None, - ), - warp2=dict( - argstr='--warp2=%s', - extensions=None, - ), + postmat=dict(argstr="--postmat=%s", extensions=None,), + premat=dict(argstr="--premat=%s", extensions=None,), + reference=dict(argstr="--ref=%s", extensions=None, mandatory=True, position=1,), + relwarp=dict(argstr="--rel", xor=["abswarp"],), + shift_direction=dict(argstr="--shiftdir=%s", requires=["shift_in_file"],), + shift_in_file=dict(argstr="--shiftmap=%s", extensions=None,), + warp1=dict(argstr="--warp1=%s", extensions=None,), + warp2=dict(argstr="--warp2=%s", extensions=None,), ) inputs = ConvertWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ConvertWarp_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ConvertWarp.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_ConvertXFM.py b/nipype/interfaces/fsl/tests/test_auto_ConvertXFM.py index f4bf1b24f9..b4311d0d37 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ConvertXFM.py +++ b/nipype/interfaces/fsl/tests/test_auto_ConvertXFM.py @@ -4,41 +4,29 @@ def test_ConvertXFM_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), concat_xfm=dict( - argstr='-concat', + argstr="-concat", position=-3, - requires=['in_file2'], - xor=['invert_xfm', 'concat_xfm', 'fix_scale_skew'], - ), - environ=dict( - nohash=True, - usedefault=True, + requires=["in_file2"], + xor=["invert_xfm", "concat_xfm", "fix_scale_skew"], ), + environ=dict(nohash=True, usedefault=True,), fix_scale_skew=dict( - argstr='-fixscaleskew', + argstr="-fixscaleskew", position=-3, - requires=['in_file2'], - xor=['invert_xfm', 'concat_xfm', 'fix_scale_skew'], - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - ), - in_file2=dict( - argstr='%s', - extensions=None, - position=-2, + requires=["in_file2"], + xor=["invert_xfm", "concat_xfm", "fix_scale_skew"], ), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-1,), + in_file2=dict(argstr="%s", extensions=None, position=-2,), invert_xfm=dict( - argstr='-inverse', + argstr="-inverse", position=-3, - xor=['invert_xfm', 'concat_xfm', 'fix_scale_skew'], + xor=["invert_xfm", "concat_xfm", "fix_scale_skew"], ), out_file=dict( - argstr='-omat %s', + argstr="-omat %s", extensions=None, genfile=True, hash_files=False, @@ -51,8 +39,10 @@ def test_ConvertXFM_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ConvertXFM_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ConvertXFM.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_CopyGeom.py b/nipype/interfaces/fsl/tests/test_auto_CopyGeom.py index 9a402237cd..ade301a00e 100644 --- a/nipype/interfaces/fsl/tests/test_auto_CopyGeom.py +++ b/nipype/interfaces/fsl/tests/test_auto_CopyGeom.py @@ -4,31 +4,20 @@ def test_CopyGeom_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), dest_file=dict( - argstr='%s', + argstr="%s", copyfile=True, extensions=None, mandatory=True, - name_source='dest_file', - name_template='%s', - output_name='out_file', + name_source="dest_file", + name_template="%s", + output_name="out_file", position=1, ), - environ=dict( - nohash=True, - usedefault=True, - ), - ignore_dims=dict( - argstr='-d', - position='-1', - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=0, - ), + environ=dict(nohash=True, usedefault=True,), + ignore_dims=dict(argstr="-d", position="-1",), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=0,), output_type=dict(), ) inputs = CopyGeom.input_spec() @@ -36,8 +25,10 @@ def test_CopyGeom_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CopyGeom_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = CopyGeom.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_DTIFit.py b/nipype/interfaces/fsl/tests/test_auto_DTIFit.py index 83dfc512b6..a0cf704d87 100644 --- a/nipype/interfaces/fsl/tests/test_auto_DTIFit.py +++ b/nipype/interfaces/fsl/tests/test_auto_DTIFit.py @@ -4,78 +4,47 @@ def test_DTIFit_inputs(): input_map = dict( - args=dict(argstr='%s', ), - base_name=dict( - argstr='-o %s', - position=1, - usedefault=True, - ), - bvals=dict( - argstr='-b %s', - extensions=None, - mandatory=True, - position=4, - ), - bvecs=dict( - argstr='-r %s', - extensions=None, - mandatory=True, - position=3, - ), - cni=dict( - argstr='--cni=%s', - extensions=None, - ), - dwi=dict( - argstr='-k %s', - extensions=None, - mandatory=True, - position=0, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - gradnonlin=dict( - argstr='--gradnonlin=%s', - extensions=None, - ), - little_bit=dict(argstr='--littlebit', ), - mask=dict( - argstr='-m %s', - extensions=None, - mandatory=True, - position=2, - ), - max_x=dict(argstr='-X %d', ), - max_y=dict(argstr='-Y %d', ), - max_z=dict(argstr='-Z %d', ), - min_x=dict(argstr='-x %d', ), - min_y=dict(argstr='-y %d', ), - min_z=dict(argstr='-z %d', ), + args=dict(argstr="%s",), + base_name=dict(argstr="-o %s", position=1, usedefault=True,), + bvals=dict(argstr="-b %s", extensions=None, mandatory=True, position=4,), + bvecs=dict(argstr="-r %s", extensions=None, mandatory=True, position=3,), + cni=dict(argstr="--cni=%s", extensions=None,), + dwi=dict(argstr="-k %s", extensions=None, mandatory=True, position=0,), + environ=dict(nohash=True, usedefault=True,), + gradnonlin=dict(argstr="--gradnonlin=%s", extensions=None,), + little_bit=dict(argstr="--littlebit",), + mask=dict(argstr="-m %s", extensions=None, mandatory=True, position=2,), + max_x=dict(argstr="-X %d",), + max_y=dict(argstr="-Y %d",), + max_z=dict(argstr="-Z %d",), + min_x=dict(argstr="-x %d",), + min_y=dict(argstr="-y %d",), + min_z=dict(argstr="-z %d",), output_type=dict(), - save_tensor=dict(argstr='--save_tensor', ), - sse=dict(argstr='--sse', ), + save_tensor=dict(argstr="--save_tensor",), + sse=dict(argstr="--sse",), ) inputs = DTIFit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DTIFit_outputs(): output_map = dict( - FA=dict(extensions=None, ), - L1=dict(extensions=None, ), - L2=dict(extensions=None, ), - L3=dict(extensions=None, ), - MD=dict(extensions=None, ), - MO=dict(extensions=None, ), - S0=dict(extensions=None, ), - V1=dict(extensions=None, ), - V2=dict(extensions=None, ), - V3=dict(extensions=None, ), - sse=dict(extensions=None, ), - tensor=dict(extensions=None, ), + FA=dict(extensions=None,), + L1=dict(extensions=None,), + L2=dict(extensions=None,), + L3=dict(extensions=None,), + MD=dict(extensions=None,), + MO=dict(extensions=None,), + S0=dict(extensions=None,), + V1=dict(extensions=None,), + V2=dict(extensions=None,), + V3=dict(extensions=None,), + sse=dict(extensions=None,), + tensor=dict(extensions=None,), ) outputs = DTIFit.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_DilateImage.py b/nipype/interfaces/fsl/tests/test_auto_DilateImage.py index 0bd0fbd869..4a9a49eb45 100644 --- a/nipype/interfaces/fsl/tests/test_auto_DilateImage.py +++ b/nipype/interfaces/fsl/tests/test_auto_DilateImage.py @@ -4,56 +4,21 @@ def test_DilateImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - internal_datatype=dict( - argstr='-dt %s', - position=1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + internal_datatype=dict(argstr="-dt %s", position=1,), kernel_file=dict( - argstr='%s', - extensions=None, - position=5, - xor=['kernel_size'], - ), - kernel_shape=dict( - argstr='-kernel %s', - position=4, - ), - kernel_size=dict( - argstr='%.4f', - position=5, - xor=['kernel_file'], - ), - nan2zeros=dict( - argstr='-nan', - position=3, - ), - operation=dict( - argstr='-dil%s', - mandatory=True, - position=6, + argstr="%s", extensions=None, position=5, xor=["kernel_size"], ), + kernel_shape=dict(argstr="-kernel %s", position=4,), + kernel_size=dict(argstr="%.4f", position=5, xor=["kernel_file"],), + nan2zeros=dict(argstr="-nan", position=3,), + operation=dict(argstr="-dil%s", mandatory=True, position=6,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-2, - ), - output_datatype=dict( - argstr='-odt %s', - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), + output_datatype=dict(argstr="-odt %s", position=-1,), output_type=dict(), ) inputs = DilateImage.input_spec() @@ -61,8 +26,10 @@ def test_DilateImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DilateImage_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = DilateImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_DistanceMap.py b/nipype/interfaces/fsl/tests/test_auto_DistanceMap.py index 6e67f07c4d..bd2134ec5d 100644 --- a/nipype/interfaces/fsl/tests/test_auto_DistanceMap.py +++ b/nipype/interfaces/fsl/tests/test_auto_DistanceMap.py @@ -4,31 +4,15 @@ def test_DistanceMap_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), distance_map=dict( - argstr='--out=%s', - extensions=None, - genfile=True, - hash_files=False, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='--in=%s', - extensions=None, - mandatory=True, - ), - invert_input=dict(argstr='--invert', ), - local_max_file=dict( - argstr='--localmax=%s', - hash_files=False, - ), - mask_file=dict( - argstr='--mask=%s', - extensions=None, + argstr="--out=%s", extensions=None, genfile=True, hash_files=False, ), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="--in=%s", extensions=None, mandatory=True,), + invert_input=dict(argstr="--invert",), + local_max_file=dict(argstr="--localmax=%s", hash_files=False,), + mask_file=dict(argstr="--mask=%s", extensions=None,), output_type=dict(), ) inputs = DistanceMap.input_spec() @@ -36,10 +20,11 @@ def test_DistanceMap_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DistanceMap_outputs(): output_map = dict( - distance_map=dict(extensions=None, ), - local_max_file=dict(extensions=None, ), + distance_map=dict(extensions=None,), local_max_file=dict(extensions=None,), ) outputs = DistanceMap.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_DualRegression.py b/nipype/interfaces/fsl/tests/test_auto_DualRegression.py index 2a9477da6c..99e6a0f23d 100644 --- a/nipype/interfaces/fsl/tests/test_auto_DualRegression.py +++ b/nipype/interfaces/fsl/tests/test_auto_DualRegression.py @@ -4,53 +4,18 @@ def test_DualRegression_inputs(): input_map = dict( - args=dict(argstr='%s', ), - con_file=dict( - argstr='%s', - extensions=None, - position=4, - ), - des_norm=dict( - argstr='%i', - position=2, - usedefault=True, - ), - design_file=dict( - argstr='%s', - extensions=None, - position=3, - ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + con_file=dict(argstr="%s", extensions=None, position=4,), + des_norm=dict(argstr="%i", position=2, usedefault=True,), + design_file=dict(argstr="%s", extensions=None, position=3,), + environ=dict(nohash=True, usedefault=True,), group_IC_maps_4D=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), - in_files=dict( - argstr='%s', - mandatory=True, - position=-1, - sep=' ', - ), - n_perm=dict( - argstr='%i', - mandatory=True, - position=5, - ), - one_sample_group_mean=dict( - argstr='-1', - position=3, - ), - out_dir=dict( - argstr='%s', - genfile=True, - position=6, - usedefault=True, + argstr="%s", extensions=None, mandatory=True, position=1, ), + in_files=dict(argstr="%s", mandatory=True, position=-1, sep=" ",), + n_perm=dict(argstr="%i", mandatory=True, position=5,), + one_sample_group_mean=dict(argstr="-1", position=3,), + out_dir=dict(argstr="%s", genfile=True, position=6, usedefault=True,), output_type=dict(), ) inputs = DualRegression.input_spec() @@ -58,8 +23,10 @@ def test_DualRegression_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DualRegression_outputs(): - output_map = dict(out_dir=dict(), ) + output_map = dict(out_dir=dict(),) outputs = DualRegression.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_EPIDeWarp.py b/nipype/interfaces/fsl/tests/test_auto_EPIDeWarp.py index 3f5396ea8e..39476ab324 100644 --- a/nipype/interfaces/fsl/tests/test_auto_EPIDeWarp.py +++ b/nipype/interfaces/fsl/tests/test_auto_EPIDeWarp.py @@ -4,76 +4,36 @@ def test_EPIDeWarp_inputs(): input_map = dict( - args=dict(argstr='%s', ), - cleanup=dict(argstr='--cleanup', ), - dph_file=dict( - argstr='--dph %s', - extensions=None, - mandatory=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - epi_file=dict( - argstr='--epi %s', - extensions=None, - ), - epidw=dict( - argstr='--epidw %s', - genfile=False, - ), - esp=dict( - argstr='--esp %s', - usedefault=True, - ), - exf_file=dict( - argstr='--exf %s', - extensions=None, - ), - exfdw=dict( - argstr='--exfdw %s', - genfile=True, - ), - mag_file=dict( - argstr='--mag %s', - extensions=None, - mandatory=True, - position=0, - ), - nocleanup=dict( - argstr='--nocleanup', - usedefault=True, - ), + args=dict(argstr="%s",), + cleanup=dict(argstr="--cleanup",), + dph_file=dict(argstr="--dph %s", extensions=None, mandatory=True,), + environ=dict(nohash=True, usedefault=True,), + epi_file=dict(argstr="--epi %s", extensions=None,), + epidw=dict(argstr="--epidw %s", genfile=False,), + esp=dict(argstr="--esp %s", usedefault=True,), + exf_file=dict(argstr="--exf %s", extensions=None,), + exfdw=dict(argstr="--exfdw %s", genfile=True,), + mag_file=dict(argstr="--mag %s", extensions=None, mandatory=True, position=0,), + nocleanup=dict(argstr="--nocleanup", usedefault=True,), output_type=dict(), - sigma=dict( - argstr='--sigma %s', - usedefault=True, - ), - tediff=dict( - argstr='--tediff %s', - usedefault=True, - ), - tmpdir=dict( - argstr='--tmpdir %s', - genfile=True, - ), - vsm=dict( - argstr='--vsm %s', - genfile=True, - ), + sigma=dict(argstr="--sigma %s", usedefault=True,), + tediff=dict(argstr="--tediff %s", usedefault=True,), + tmpdir=dict(argstr="--tmpdir %s", genfile=True,), + vsm=dict(argstr="--vsm %s", genfile=True,), ) inputs = EPIDeWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EPIDeWarp_outputs(): output_map = dict( - exf_mask=dict(extensions=None, ), - exfdw=dict(extensions=None, ), - unwarped_file=dict(extensions=None, ), - vsm_file=dict(extensions=None, ), + exf_mask=dict(extensions=None,), + exfdw=dict(extensions=None,), + unwarped_file=dict(extensions=None,), + vsm_file=dict(extensions=None,), ) outputs = EPIDeWarp.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_Eddy.py b/nipype/interfaces/fsl/tests/test_auto_Eddy.py index 065fbb1889..cc7eff7a27 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Eddy.py +++ b/nipype/interfaces/fsl/tests/test_auto_Eddy.py @@ -4,98 +4,39 @@ def test_Eddy_inputs(): input_map = dict( - args=dict(argstr='%s', ), - cnr_maps=dict( - argstr='--cnr_maps', - min_ver='5.0.10', - ), - dont_peas=dict(argstr='--dont_peas', ), - dont_sep_offs_move=dict(argstr='--dont_sep_offs_move', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fep=dict(argstr='--fep', ), - field=dict(argstr='--field=%s', ), - field_mat=dict( - argstr='--field_mat=%s', - extensions=None, - ), - flm=dict(argstr='--flm=%s', ), - fudge_factor=dict( - argstr='--ff=%s', - usedefault=True, - ), - fwhm=dict(argstr='--fwhm=%s', ), - in_acqp=dict( - argstr='--acqp=%s', - extensions=None, - mandatory=True, - ), - in_bval=dict( - argstr='--bvals=%s', - extensions=None, - mandatory=True, - ), - in_bvec=dict( - argstr='--bvecs=%s', - extensions=None, - mandatory=True, - ), - in_file=dict( - argstr='--imain=%s', - extensions=None, - mandatory=True, - ), - in_index=dict( - argstr='--index=%s', - extensions=None, - mandatory=True, - ), - in_mask=dict( - argstr='--mask=%s', - extensions=None, - mandatory=True, - ), + args=dict(argstr="%s",), + cnr_maps=dict(argstr="--cnr_maps", min_ver="5.0.10",), + dont_peas=dict(argstr="--dont_peas",), + dont_sep_offs_move=dict(argstr="--dont_sep_offs_move",), + environ=dict(nohash=True, usedefault=True,), + fep=dict(argstr="--fep",), + field=dict(argstr="--field=%s",), + field_mat=dict(argstr="--field_mat=%s", extensions=None,), + flm=dict(argstr="--flm=%s",), + fudge_factor=dict(argstr="--ff=%s", usedefault=True,), + fwhm=dict(argstr="--fwhm=%s",), + in_acqp=dict(argstr="--acqp=%s", extensions=None, mandatory=True,), + in_bval=dict(argstr="--bvals=%s", extensions=None, mandatory=True,), + in_bvec=dict(argstr="--bvecs=%s", extensions=None, mandatory=True,), + in_file=dict(argstr="--imain=%s", extensions=None, mandatory=True,), + in_index=dict(argstr="--index=%s", extensions=None, mandatory=True,), + in_mask=dict(argstr="--mask=%s", extensions=None, mandatory=True,), in_topup_fieldcoef=dict( - argstr='--topup=%s', - extensions=None, - requires=['in_topup_movpar'], - ), - in_topup_movpar=dict( - extensions=None, - requires=['in_topup_fieldcoef'], - ), - interp=dict(argstr='--interp=%s', ), - is_shelled=dict(argstr='--data_is_shelled', ), - method=dict(argstr='--resamp=%s', ), - niter=dict( - argstr='--niter=%s', - usedefault=True, - ), - num_threads=dict( - nohash=True, - usedefault=True, - ), - nvoxhp=dict( - argstr='--nvoxhp=%s', - usedefault=True, - ), - out_base=dict( - argstr='--out=%s', - usedefault=True, - ), + argstr="--topup=%s", extensions=None, requires=["in_topup_movpar"], + ), + in_topup_movpar=dict(extensions=None, requires=["in_topup_fieldcoef"],), + interp=dict(argstr="--interp=%s",), + is_shelled=dict(argstr="--data_is_shelled",), + method=dict(argstr="--resamp=%s",), + niter=dict(argstr="--niter=%s", usedefault=True,), + num_threads=dict(nohash=True, usedefault=True,), + nvoxhp=dict(argstr="--nvoxhp=%s", usedefault=True,), + out_base=dict(argstr="--out=%s", usedefault=True,), output_type=dict(), - repol=dict(argstr='--repol', ), - residuals=dict( - argstr='--residuals', - min_ver='5.0.10', - ), - session=dict( - argstr='--session=%s', - extensions=None, - ), - slm=dict(argstr='--slm=%s', ), + repol=dict(argstr="--repol",), + residuals=dict(argstr="--residuals", min_ver="5.0.10",), + session=dict(argstr="--session=%s", extensions=None,), + slm=dict(argstr="--slm=%s",), use_cuda=dict(), ) inputs = Eddy.input_spec() @@ -103,17 +44,19 @@ def test_Eddy_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Eddy_outputs(): output_map = dict( - out_cnr_maps=dict(extensions=None, ), - out_corrected=dict(extensions=None, ), - out_movement_rms=dict(extensions=None, ), - out_outlier_report=dict(extensions=None, ), - out_parameter=dict(extensions=None, ), - out_residuals=dict(extensions=None, ), - out_restricted_movement_rms=dict(extensions=None, ), - out_rotated_bvecs=dict(extensions=None, ), - out_shell_alignment_parameters=dict(extensions=None, ), + out_cnr_maps=dict(extensions=None,), + out_corrected=dict(extensions=None,), + out_movement_rms=dict(extensions=None,), + out_outlier_report=dict(extensions=None,), + out_parameter=dict(extensions=None,), + out_residuals=dict(extensions=None,), + out_restricted_movement_rms=dict(extensions=None,), + out_rotated_bvecs=dict(extensions=None,), + out_shell_alignment_parameters=dict(extensions=None,), ) outputs = Eddy.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_EddyCorrect.py b/nipype/interfaces/fsl/tests/test_auto_EddyCorrect.py index f1d5113661..fbd2af30cb 100644 --- a/nipype/interfaces/fsl/tests/test_auto_EddyCorrect.py +++ b/nipype/interfaces/fsl/tests/test_auto_EddyCorrect.py @@ -4,40 +4,29 @@ def test_EddyCorrect_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=0, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=0,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, - name_source=['in_file'], - name_template='%s_edc', - output_name='eddy_corrected', + name_source=["in_file"], + name_template="%s_edc", + output_name="eddy_corrected", position=1, ), output_type=dict(), - ref_num=dict( - argstr='%d', - mandatory=True, - position=2, - usedefault=True, - ), + ref_num=dict(argstr="%d", mandatory=True, position=2, usedefault=True,), ) inputs = EddyCorrect.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EddyCorrect_outputs(): - output_map = dict(eddy_corrected=dict(extensions=None, ), ) + output_map = dict(eddy_corrected=dict(extensions=None,),) outputs = EddyCorrect.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_EddyQuad.py b/nipype/interfaces/fsl/tests/test_auto_EddyQuad.py index edfe407ae4..14b6ca3a28 100644 --- a/nipype/interfaces/fsl/tests/test_auto_EddyQuad.py +++ b/nipype/interfaces/fsl/tests/test_auto_EddyQuad.py @@ -4,71 +4,39 @@ def test_EddyQuad_inputs(): input_map = dict( - args=dict(argstr='%s', ), - base_name=dict( - argstr='%s', - position=0, - usedefault=True, - ), - bval_file=dict( - argstr='--bvals %s', - extensions=None, - mandatory=True, - ), - bvec_file=dict( - argstr='--bvecs %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - field=dict( - argstr='--field %s', - extensions=None, - ), - idx_file=dict( - argstr='--eddyIdx %s', - extensions=None, - mandatory=True, - ), - mask_file=dict( - argstr='--mask %s', - extensions=None, - mandatory=True, - ), + args=dict(argstr="%s",), + base_name=dict(argstr="%s", position=0, usedefault=True,), + bval_file=dict(argstr="--bvals %s", extensions=None, mandatory=True,), + bvec_file=dict(argstr="--bvecs %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + field=dict(argstr="--field %s", extensions=None,), + idx_file=dict(argstr="--eddyIdx %s", extensions=None, mandatory=True,), + mask_file=dict(argstr="--mask %s", extensions=None, mandatory=True,), output_dir=dict( - argstr='--output-dir %s', - name_source=['base_name'], - name_template='%s.qc', + argstr="--output-dir %s", name_source=["base_name"], name_template="%s.qc", ), output_type=dict(), - param_file=dict( - argstr='--eddyParams %s', - extensions=None, - mandatory=True, - ), - slice_spec=dict( - argstr='--slspec %s', - extensions=None, - ), - verbose=dict(argstr='--verbose', ), + param_file=dict(argstr="--eddyParams %s", extensions=None, mandatory=True,), + slice_spec=dict(argstr="--slspec %s", extensions=None,), + verbose=dict(argstr="--verbose",), ) inputs = EddyQuad.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EddyQuad_outputs(): output_map = dict( avg_b0_pe_png=dict(), avg_b_png=dict(), - clean_volumes=dict(extensions=None, ), + clean_volumes=dict(extensions=None,), cnr_png=dict(), - qc_json=dict(extensions=None, ), - qc_pdf=dict(extensions=None, ), - residuals=dict(extensions=None, ), - vdm_png=dict(extensions=None, ), + qc_json=dict(extensions=None,), + qc_pdf=dict(extensions=None,), + residuals=dict(extensions=None,), + vdm_png=dict(extensions=None,), ) outputs = EddyQuad.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_EpiReg.py b/nipype/interfaces/fsl/tests/test_auto_EpiReg.py index 8f7cadfbb5..f1f1482260 100644 --- a/nipype/interfaces/fsl/tests/test_auto_EpiReg.py +++ b/nipype/interfaces/fsl/tests/test_auto_EpiReg.py @@ -4,84 +4,48 @@ def test_EpiReg_inputs(): input_map = dict( - args=dict(argstr='%s', ), - echospacing=dict(argstr='--echospacing=%f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - epi=dict( - argstr='--epi=%s', - extensions=None, - mandatory=True, - position=-4, - ), - fmap=dict( - argstr='--fmap=%s', - extensions=None, - ), - fmapmag=dict( - argstr='--fmapmag=%s', - extensions=None, - ), - fmapmagbrain=dict( - argstr='--fmapmagbrain=%s', - extensions=None, - ), - no_clean=dict( - argstr='--noclean', - usedefault=True, - ), - no_fmapreg=dict(argstr='--nofmapreg', ), - out_base=dict( - argstr='--out=%s', - position=-1, - usedefault=True, - ), + args=dict(argstr="%s",), + echospacing=dict(argstr="--echospacing=%f",), + environ=dict(nohash=True, usedefault=True,), + epi=dict(argstr="--epi=%s", extensions=None, mandatory=True, position=-4,), + fmap=dict(argstr="--fmap=%s", extensions=None,), + fmapmag=dict(argstr="--fmapmag=%s", extensions=None,), + fmapmagbrain=dict(argstr="--fmapmagbrain=%s", extensions=None,), + no_clean=dict(argstr="--noclean", usedefault=True,), + no_fmapreg=dict(argstr="--nofmapreg",), + out_base=dict(argstr="--out=%s", position=-1, usedefault=True,), output_type=dict(), - pedir=dict(argstr='--pedir=%s', ), + pedir=dict(argstr="--pedir=%s",), t1_brain=dict( - argstr='--t1brain=%s', - extensions=None, - mandatory=True, - position=-2, - ), - t1_head=dict( - argstr='--t1=%s', - extensions=None, - mandatory=True, - position=-3, - ), - weight_image=dict( - argstr='--weight=%s', - extensions=None, - ), - wmseg=dict( - argstr='--wmseg=%s', - extensions=None, + argstr="--t1brain=%s", extensions=None, mandatory=True, position=-2, ), + t1_head=dict(argstr="--t1=%s", extensions=None, mandatory=True, position=-3,), + weight_image=dict(argstr="--weight=%s", extensions=None,), + wmseg=dict(argstr="--wmseg=%s", extensions=None,), ) inputs = EpiReg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EpiReg_outputs(): output_map = dict( - epi2str_inv=dict(extensions=None, ), - epi2str_mat=dict(extensions=None, ), - fmap2epi_mat=dict(extensions=None, ), - fmap2str_mat=dict(extensions=None, ), - fmap_epi=dict(extensions=None, ), - fmap_str=dict(extensions=None, ), - fmapmag_str=dict(extensions=None, ), - fullwarp=dict(extensions=None, ), - out_1vol=dict(extensions=None, ), - out_file=dict(extensions=None, ), - seg=dict(extensions=None, ), - shiftmap=dict(extensions=None, ), - wmedge=dict(extensions=None, ), - wmseg=dict(extensions=None, ), + epi2str_inv=dict(extensions=None,), + epi2str_mat=dict(extensions=None,), + fmap2epi_mat=dict(extensions=None,), + fmap2str_mat=dict(extensions=None,), + fmap_epi=dict(extensions=None,), + fmap_str=dict(extensions=None,), + fmapmag_str=dict(extensions=None,), + fullwarp=dict(extensions=None,), + out_1vol=dict(extensions=None,), + out_file=dict(extensions=None,), + seg=dict(extensions=None,), + shiftmap=dict(extensions=None,), + wmedge=dict(extensions=None,), + wmseg=dict(extensions=None,), ) outputs = EpiReg.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_ErodeImage.py b/nipype/interfaces/fsl/tests/test_auto_ErodeImage.py index 09612d176a..1b813812c3 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ErodeImage.py +++ b/nipype/interfaces/fsl/tests/test_auto_ErodeImage.py @@ -4,56 +4,21 @@ def test_ErodeImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - internal_datatype=dict( - argstr='-dt %s', - position=1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + internal_datatype=dict(argstr="-dt %s", position=1,), kernel_file=dict( - argstr='%s', - extensions=None, - position=5, - xor=['kernel_size'], - ), - kernel_shape=dict( - argstr='-kernel %s', - position=4, - ), - kernel_size=dict( - argstr='%.4f', - position=5, - xor=['kernel_file'], - ), - minimum_filter=dict( - argstr='%s', - position=6, - usedefault=True, - ), - nan2zeros=dict( - argstr='-nan', - position=3, + argstr="%s", extensions=None, position=5, xor=["kernel_size"], ), + kernel_shape=dict(argstr="-kernel %s", position=4,), + kernel_size=dict(argstr="%.4f", position=5, xor=["kernel_file"],), + minimum_filter=dict(argstr="%s", position=6, usedefault=True,), + nan2zeros=dict(argstr="-nan", position=3,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-2, - ), - output_datatype=dict( - argstr='-odt %s', - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), + output_datatype=dict(argstr="-odt %s", position=-1,), output_type=dict(), ) inputs = ErodeImage.input_spec() @@ -61,8 +26,10 @@ def test_ErodeImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ErodeImage_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ErodeImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_ExtractROI.py b/nipype/interfaces/fsl/tests/test_auto_ExtractROI.py index 04bcc217bc..4039eb50a2 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ExtractROI.py +++ b/nipype/interfaces/fsl/tests/test_auto_ExtractROI.py @@ -4,73 +4,45 @@ def test_ExtractROI_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), crop_list=dict( - argstr='%s', + argstr="%s", position=2, xor=[ - 'x_min', 'x_size', 'y_min', 'y_size', 'z_min', 'z_size', - 't_min', 't_size' + "x_min", + "x_size", + "y_min", + "y_size", + "z_min", + "z_size", + "t_min", + "t_size", ], ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=0, - ), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=0,), output_type=dict(), roi_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=1, - ), - t_min=dict( - argstr='%d', - position=8, - ), - t_size=dict( - argstr='%d', - position=9, - ), - x_min=dict( - argstr='%d', - position=2, - ), - x_size=dict( - argstr='%d', - position=3, - ), - y_min=dict( - argstr='%d', - position=4, - ), - y_size=dict( - argstr='%d', - position=5, - ), - z_min=dict( - argstr='%d', - position=6, - ), - z_size=dict( - argstr='%d', - position=7, - ), + argstr="%s", extensions=None, genfile=True, hash_files=False, position=1, + ), + t_min=dict(argstr="%d", position=8,), + t_size=dict(argstr="%d", position=9,), + x_min=dict(argstr="%d", position=2,), + x_size=dict(argstr="%d", position=3,), + y_min=dict(argstr="%d", position=4,), + y_size=dict(argstr="%d", position=5,), + z_min=dict(argstr="%d", position=6,), + z_size=dict(argstr="%d", position=7,), ) inputs = ExtractROI.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ExtractROI_outputs(): - output_map = dict(roi_file=dict(extensions=None, ), ) + output_map = dict(roi_file=dict(extensions=None,),) outputs = ExtractROI.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_FAST.py b/nipype/interfaces/fsl/tests/test_auto_FAST.py index aae90cd3a3..3c245682a6 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FAST.py +++ b/nipype/interfaces/fsl/tests/test_auto_FAST.py @@ -4,67 +4,49 @@ def test_FAST_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bias_iters=dict(argstr='-I %d', ), - bias_lowpass=dict( - argstr='-l %d', - units='mm', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - hyper=dict(argstr='-H %.2f', ), - img_type=dict(argstr='-t %d', ), - in_files=dict( - argstr='%s', - copyfile=False, - mandatory=True, - position=-1, - ), - init_seg_smooth=dict(argstr='-f %.3f', ), - init_transform=dict( - argstr='-a %s', - extensions=None, - ), - iters_afterbias=dict(argstr='-O %d', ), - manual_seg=dict( - argstr='-s %s', - extensions=None, - ), - mixel_smooth=dict(argstr='-R %.2f', ), - no_bias=dict(argstr='-N', ), - no_pve=dict(argstr='--nopve', ), - number_classes=dict(argstr='-n %d', ), - other_priors=dict(argstr='-A %s', ), - out_basename=dict( - argstr='-o %s', - extensions=None, - ), - output_biascorrected=dict(argstr='-B', ), - output_biasfield=dict(argstr='-b', ), + args=dict(argstr="%s",), + bias_iters=dict(argstr="-I %d",), + bias_lowpass=dict(argstr="-l %d", units="mm",), + environ=dict(nohash=True, usedefault=True,), + hyper=dict(argstr="-H %.2f",), + img_type=dict(argstr="-t %d",), + in_files=dict(argstr="%s", copyfile=False, mandatory=True, position=-1,), + init_seg_smooth=dict(argstr="-f %.3f",), + init_transform=dict(argstr="-a %s", extensions=None,), + iters_afterbias=dict(argstr="-O %d",), + manual_seg=dict(argstr="-s %s", extensions=None,), + mixel_smooth=dict(argstr="-R %.2f",), + no_bias=dict(argstr="-N",), + no_pve=dict(argstr="--nopve",), + number_classes=dict(argstr="-n %d",), + other_priors=dict(argstr="-A %s",), + out_basename=dict(argstr="-o %s", extensions=None,), + output_biascorrected=dict(argstr="-B",), + output_biasfield=dict(argstr="-b",), output_type=dict(), - probability_maps=dict(argstr='-p', ), - segment_iters=dict(argstr='-W %d', ), - segments=dict(argstr='-g', ), - use_priors=dict(argstr='-P', ), - verbose=dict(argstr='-v', ), + probability_maps=dict(argstr="-p",), + segment_iters=dict(argstr="-W %d",), + segments=dict(argstr="-g",), + use_priors=dict(argstr="-P",), + verbose=dict(argstr="-v",), ) inputs = FAST.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FAST_outputs(): output_map = dict( bias_field=dict(), - mixeltype=dict(extensions=None, ), + mixeltype=dict(extensions=None,), partial_volume_files=dict(), - partial_volume_map=dict(extensions=None, ), + partial_volume_map=dict(extensions=None,), probability_maps=dict(), restored_image=dict(), tissue_class_files=dict(), - tissue_class_map=dict(extensions=None, ), + tissue_class_map=dict(extensions=None,), ) outputs = FAST.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_FEAT.py b/nipype/interfaces/fsl/tests/test_auto_FEAT.py index 3b86062923..f2d4e1a90d 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FEAT.py +++ b/nipype/interfaces/fsl/tests/test_auto_FEAT.py @@ -4,17 +4,9 @@ def test_FEAT_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fsf_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=0, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + fsf_file=dict(argstr="%s", extensions=None, mandatory=True, position=0,), output_type=dict(), ) inputs = FEAT.input_spec() @@ -22,8 +14,10 @@ def test_FEAT_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FEAT_outputs(): - output_map = dict(feat_dir=dict(), ) + output_map = dict(feat_dir=dict(),) outputs = FEAT.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_FEATModel.py b/nipype/interfaces/fsl/tests/test_auto_FEATModel.py index 7f1b98c8c4..46e959a2a0 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FEATModel.py +++ b/nipype/interfaces/fsl/tests/test_auto_FEATModel.py @@ -4,23 +4,11 @@ def test_FEATModel_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - ev_files=dict( - argstr='%s', - copyfile=False, - mandatory=True, - position=1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + ev_files=dict(argstr="%s", copyfile=False, mandatory=True, position=1,), fsf_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=0, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=0, ), output_type=dict(), ) @@ -29,13 +17,15 @@ def test_FEATModel_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FEATModel_outputs(): output_map = dict( - con_file=dict(extensions=None, ), - design_cov=dict(extensions=None, ), - design_file=dict(extensions=None, ), - design_image=dict(extensions=None, ), - fcon_file=dict(extensions=None, ), + con_file=dict(extensions=None,), + design_cov=dict(extensions=None,), + design_file=dict(extensions=None,), + design_image=dict(extensions=None,), + fcon_file=dict(extensions=None,), ) outputs = FEATModel.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_FEATRegister.py b/nipype/interfaces/fsl/tests/test_auto_FEATRegister.py index 56a3e2ba01..bd7ae5f7c3 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FEATRegister.py +++ b/nipype/interfaces/fsl/tests/test_auto_FEATRegister.py @@ -4,20 +4,19 @@ def test_FEATRegister_inputs(): input_map = dict( - feat_dirs=dict(mandatory=True, ), - reg_dof=dict(usedefault=True, ), - reg_image=dict( - extensions=None, - mandatory=True, - ), + feat_dirs=dict(mandatory=True,), + reg_dof=dict(usedefault=True,), + reg_image=dict(extensions=None, mandatory=True,), ) inputs = FEATRegister.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FEATRegister_outputs(): - output_map = dict(fsf_file=dict(extensions=None, ), ) + output_map = dict(fsf_file=dict(extensions=None,),) outputs = FEATRegister.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_FIRST.py b/nipype/interfaces/fsl/tests/test_auto_FIRST.py index 4b9cb5928b..c34f1737d2 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FIRST.py +++ b/nipype/interfaces/fsl/tests/test_auto_FIRST.py @@ -4,48 +4,28 @@ def test_FIRST_inputs(): input_map = dict( - affine_file=dict( - argstr='-a %s', - extensions=None, - position=6, - ), - args=dict(argstr='%s', ), - brain_extracted=dict( - argstr='-b', - position=2, - ), - environ=dict( - nohash=True, - usedefault=True, - ), + affine_file=dict(argstr="-a %s", extensions=None, position=6,), + args=dict(argstr="%s",), + brain_extracted=dict(argstr="-b", position=2,), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='-i %s', + argstr="-i %s", copyfile=False, extensions=None, mandatory=True, position=-2, ), - list_of_specific_structures=dict( - argstr='-s %s', - position=5, - sep=',', - ), + list_of_specific_structures=dict(argstr="-s %s", position=5, sep=",",), method=dict( - argstr='-m %s', + argstr="-m %s", position=4, usedefault=True, - xor=['method_as_numerical_threshold'], - ), - method_as_numerical_threshold=dict( - argstr='-m %.4f', - position=4, - ), - no_cleanup=dict( - argstr='-d', - position=3, + xor=["method_as_numerical_threshold"], ), + method_as_numerical_threshold=dict(argstr="-m %.4f", position=4,), + no_cleanup=dict(argstr="-d", position=3,), out_file=dict( - argstr='-o %s', + argstr="-o %s", extensions=None, hash_files=False, mandatory=True, @@ -53,21 +33,20 @@ def test_FIRST_inputs(): usedefault=True, ), output_type=dict(), - verbose=dict( - argstr='-v', - position=1, - ), + verbose=dict(argstr="-v", position=1,), ) inputs = FIRST.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FIRST_outputs(): output_map = dict( bvars=dict(), - original_segmentations=dict(extensions=None, ), - segmentation_file=dict(extensions=None, ), + original_segmentations=dict(extensions=None,), + segmentation_file=dict(extensions=None,), vtk_surfaces=dict(), ) outputs = FIRST.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_FLAMEO.py b/nipype/interfaces/fsl/tests/test_auto_FLAMEO.py index e3dfb7f933..13690485a5 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FLAMEO.py +++ b/nipype/interfaces/fsl/tests/test_auto_FLAMEO.py @@ -4,71 +4,37 @@ def test_FLAMEO_inputs(): input_map = dict( - args=dict(argstr='%s', ), - burnin=dict(argstr='--burnin=%d', ), - cope_file=dict( - argstr='--copefile=%s', - extensions=None, - mandatory=True, - ), + args=dict(argstr="%s",), + burnin=dict(argstr="--burnin=%d",), + cope_file=dict(argstr="--copefile=%s", extensions=None, mandatory=True,), cov_split_file=dict( - argstr='--covsplitfile=%s', - extensions=None, - mandatory=True, - ), - design_file=dict( - argstr='--designfile=%s', - extensions=None, - mandatory=True, - ), - dof_var_cope_file=dict( - argstr='--dofvarcopefile=%s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - f_con_file=dict( - argstr='--fcontrastsfile=%s', - extensions=None, - ), - fix_mean=dict(argstr='--fixmean', ), - infer_outliers=dict(argstr='--inferoutliers', ), - log_dir=dict( - argstr='--ld=%s', - usedefault=True, - ), - mask_file=dict( - argstr='--maskfile=%s', - extensions=None, - mandatory=True, - ), - n_jumps=dict(argstr='--njumps=%d', ), - no_pe_outputs=dict(argstr='--nopeoutput', ), - outlier_iter=dict(argstr='--ioni=%d', ), + argstr="--covsplitfile=%s", extensions=None, mandatory=True, + ), + design_file=dict(argstr="--designfile=%s", extensions=None, mandatory=True,), + dof_var_cope_file=dict(argstr="--dofvarcopefile=%s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + f_con_file=dict(argstr="--fcontrastsfile=%s", extensions=None,), + fix_mean=dict(argstr="--fixmean",), + infer_outliers=dict(argstr="--inferoutliers",), + log_dir=dict(argstr="--ld=%s", usedefault=True,), + mask_file=dict(argstr="--maskfile=%s", extensions=None, mandatory=True,), + n_jumps=dict(argstr="--njumps=%d",), + no_pe_outputs=dict(argstr="--nopeoutput",), + outlier_iter=dict(argstr="--ioni=%d",), output_type=dict(), - run_mode=dict( - argstr='--runmode=%s', - mandatory=True, - ), - sample_every=dict(argstr='--sampleevery=%d', ), - sigma_dofs=dict(argstr='--sigma_dofs=%d', ), - t_con_file=dict( - argstr='--tcontrastsfile=%s', - extensions=None, - mandatory=True, - ), - var_cope_file=dict( - argstr='--varcopefile=%s', - extensions=None, - ), + run_mode=dict(argstr="--runmode=%s", mandatory=True,), + sample_every=dict(argstr="--sampleevery=%d",), + sigma_dofs=dict(argstr="--sigma_dofs=%d",), + t_con_file=dict(argstr="--tcontrastsfile=%s", extensions=None, mandatory=True,), + var_cope_file=dict(argstr="--varcopefile=%s", extensions=None,), ) inputs = FLAMEO.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FLAMEO_outputs(): output_map = dict( copes=dict(), diff --git a/nipype/interfaces/fsl/tests/test_auto_FLIRT.py b/nipype/interfaces/fsl/tests/test_auto_FLIRT.py index 88cb684c01..ce2fca2486 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FLIRT.py +++ b/nipype/interfaces/fsl/tests/test_auto_FLIRT.py @@ -4,172 +4,90 @@ def test_FLIRT_inputs(): input_map = dict( - angle_rep=dict(argstr='-anglerep %s', ), - apply_isoxfm=dict( - argstr='-applyisoxfm %f', - xor=['apply_xfm'], - ), - apply_xfm=dict(argstr='-applyxfm', ), - args=dict(argstr='%s', ), - bbrslope=dict( - argstr='-bbrslope %f', - min_ver='5.0.0', - ), - bbrtype=dict( - argstr='-bbrtype %s', - min_ver='5.0.0', - ), - bgvalue=dict(argstr='-setbackground %f', ), - bins=dict(argstr='-bins %d', ), - coarse_search=dict( - argstr='-coarsesearch %d', - units='degrees', - ), - cost=dict(argstr='-cost %s', ), - cost_func=dict(argstr='-searchcost %s', ), - datatype=dict(argstr='-datatype %s', ), - display_init=dict(argstr='-displayinit', ), - dof=dict(argstr='-dof %d', ), - echospacing=dict( - argstr='-echospacing %f', - min_ver='5.0.0', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fieldmap=dict( - argstr='-fieldmap %s', - extensions=None, - min_ver='5.0.0', - ), - fieldmapmask=dict( - argstr='-fieldmapmask %s', - extensions=None, - min_ver='5.0.0', - ), - fine_search=dict( - argstr='-finesearch %d', - units='degrees', - ), - force_scaling=dict(argstr='-forcescaling', ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - position=0, - ), - in_matrix_file=dict( - argstr='-init %s', - extensions=None, - ), - in_weight=dict( - argstr='-inweight %s', - extensions=None, - ), - interp=dict(argstr='-interp %s', ), - min_sampling=dict( - argstr='-minsampling %f', - units='mm', - ), - no_clamp=dict(argstr='-noclamp', ), - no_resample=dict(argstr='-noresample', ), - no_resample_blur=dict(argstr='-noresampblur', ), - no_search=dict(argstr='-nosearch', ), + angle_rep=dict(argstr="-anglerep %s",), + apply_isoxfm=dict(argstr="-applyisoxfm %f", xor=["apply_xfm"],), + apply_xfm=dict(argstr="-applyxfm",), + args=dict(argstr="%s",), + bbrslope=dict(argstr="-bbrslope %f", min_ver="5.0.0",), + bbrtype=dict(argstr="-bbrtype %s", min_ver="5.0.0",), + bgvalue=dict(argstr="-setbackground %f",), + bins=dict(argstr="-bins %d",), + coarse_search=dict(argstr="-coarsesearch %d", units="degrees",), + cost=dict(argstr="-cost %s",), + cost_func=dict(argstr="-searchcost %s",), + datatype=dict(argstr="-datatype %s",), + display_init=dict(argstr="-displayinit",), + dof=dict(argstr="-dof %d",), + echospacing=dict(argstr="-echospacing %f", min_ver="5.0.0",), + environ=dict(nohash=True, usedefault=True,), + fieldmap=dict(argstr="-fieldmap %s", extensions=None, min_ver="5.0.0",), + fieldmapmask=dict(argstr="-fieldmapmask %s", extensions=None, min_ver="5.0.0",), + fine_search=dict(argstr="-finesearch %d", units="degrees",), + force_scaling=dict(argstr="-forcescaling",), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True, position=0,), + in_matrix_file=dict(argstr="-init %s", extensions=None,), + in_weight=dict(argstr="-inweight %s", extensions=None,), + interp=dict(argstr="-interp %s",), + min_sampling=dict(argstr="-minsampling %f", units="mm",), + no_clamp=dict(argstr="-noclamp",), + no_resample=dict(argstr="-noresample",), + no_resample_blur=dict(argstr="-noresampblur",), + no_search=dict(argstr="-nosearch",), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, hash_files=False, - name_source=['in_file'], - name_template='%s_flirt', + name_source=["in_file"], + name_template="%s_flirt", position=2, ), out_log=dict( extensions=None, keep_extension=True, - name_source=['in_file'], - name_template='%s_flirt.log', - requires=['save_log'], + name_source=["in_file"], + name_template="%s_flirt.log", + requires=["save_log"], ), out_matrix_file=dict( - argstr='-omat %s', + argstr="-omat %s", extensions=None, hash_files=False, keep_extension=True, - name_source=['in_file'], - name_template='%s_flirt.mat', + name_source=["in_file"], + name_template="%s_flirt.mat", position=3, ), output_type=dict(), - padding_size=dict( - argstr='-paddingsize %d', - units='voxels', - ), - pedir=dict( - argstr='-pedir %d', - min_ver='5.0.0', - ), - ref_weight=dict( - argstr='-refweight %s', - extensions=None, - ), - reference=dict( - argstr='-ref %s', - extensions=None, - mandatory=True, - position=1, - ), - rigid2D=dict(argstr='-2D', ), + padding_size=dict(argstr="-paddingsize %d", units="voxels",), + pedir=dict(argstr="-pedir %d", min_ver="5.0.0",), + ref_weight=dict(argstr="-refweight %s", extensions=None,), + reference=dict(argstr="-ref %s", extensions=None, mandatory=True, position=1,), + rigid2D=dict(argstr="-2D",), save_log=dict(), - schedule=dict( - argstr='-schedule %s', - extensions=None, - ), - searchr_x=dict( - argstr='-searchrx %s', - units='degrees', - ), - searchr_y=dict( - argstr='-searchry %s', - units='degrees', - ), - searchr_z=dict( - argstr='-searchrz %s', - units='degrees', - ), - sinc_width=dict( - argstr='-sincwidth %d', - units='voxels', - ), - sinc_window=dict(argstr='-sincwindow %s', ), - uses_qform=dict(argstr='-usesqform', ), - verbose=dict(argstr='-verbose %d', ), - wm_seg=dict( - argstr='-wmseg %s', - extensions=None, - min_ver='5.0.0', - ), - wmcoords=dict( - argstr='-wmcoords %s', - extensions=None, - min_ver='5.0.0', - ), - wmnorms=dict( - argstr='-wmnorms %s', - extensions=None, - min_ver='5.0.0', - ), + schedule=dict(argstr="-schedule %s", extensions=None,), + searchr_x=dict(argstr="-searchrx %s", units="degrees",), + searchr_y=dict(argstr="-searchry %s", units="degrees",), + searchr_z=dict(argstr="-searchrz %s", units="degrees",), + sinc_width=dict(argstr="-sincwidth %d", units="voxels",), + sinc_window=dict(argstr="-sincwindow %s",), + uses_qform=dict(argstr="-usesqform",), + verbose=dict(argstr="-verbose %d",), + wm_seg=dict(argstr="-wmseg %s", extensions=None, min_ver="5.0.0",), + wmcoords=dict(argstr="-wmcoords %s", extensions=None, min_ver="5.0.0",), + wmnorms=dict(argstr="-wmnorms %s", extensions=None, min_ver="5.0.0",), ) inputs = FLIRT.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FLIRT_outputs(): output_map = dict( - out_file=dict(extensions=None, ), - out_log=dict(extensions=None, ), - out_matrix_file=dict(extensions=None, ), + out_file=dict(extensions=None,), + out_log=dict(extensions=None,), + out_matrix_file=dict(extensions=None,), ) outputs = FLIRT.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_FNIRT.py b/nipype/interfaces/fsl/tests/test_auto_FNIRT.py index c54a2d2955..794ae7d5f0 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FNIRT.py +++ b/nipype/interfaces/fsl/tests/test_auto_FNIRT.py @@ -4,133 +4,57 @@ def test_FNIRT_inputs(): input_map = dict( - affine_file=dict( - argstr='--aff=%s', - extensions=None, - ), - apply_inmask=dict( - argstr='--applyinmask=%s', - sep=',', - xor=['skip_inmask'], - ), + affine_file=dict(argstr="--aff=%s", extensions=None,), + apply_inmask=dict(argstr="--applyinmask=%s", sep=",", xor=["skip_inmask"],), apply_intensity_mapping=dict( - argstr='--estint=%s', - sep=',', - xor=['skip_intensity_mapping'], - ), - apply_refmask=dict( - argstr='--applyrefmask=%s', - sep=',', - xor=['skip_refmask'], - ), - args=dict(argstr='%s', ), - bias_regularization_lambda=dict(argstr='--biaslambda=%f', ), - biasfield_resolution=dict(argstr='--biasres=%d,%d,%d', ), - config_file=dict(argstr='--config=%s', ), - derive_from_ref=dict(argstr='--refderiv', ), - environ=dict( - nohash=True, - usedefault=True, - ), - field_file=dict( - argstr='--fout=%s', - hash_files=False, - ), - fieldcoeff_file=dict(argstr='--cout=%s', ), - hessian_precision=dict(argstr='--numprec=%s', ), - in_file=dict( - argstr='--in=%s', - extensions=None, - mandatory=True, - ), - in_fwhm=dict( - argstr='--infwhm=%s', - sep=',', - ), - in_intensitymap_file=dict( - argstr='--intin=%s', - copyfile=False, - ), - inmask_file=dict( - argstr='--inmask=%s', - extensions=None, - ), - inmask_val=dict(argstr='--impinval=%f', ), - intensity_mapping_model=dict(argstr='--intmod=%s', ), - intensity_mapping_order=dict(argstr='--intorder=%d', ), - inwarp_file=dict( - argstr='--inwarp=%s', - extensions=None, - ), - jacobian_file=dict( - argstr='--jout=%s', - hash_files=False, - ), - jacobian_range=dict(argstr='--jacrange=%f,%f', ), + argstr="--estint=%s", sep=",", xor=["skip_intensity_mapping"], + ), + apply_refmask=dict(argstr="--applyrefmask=%s", sep=",", xor=["skip_refmask"],), + args=dict(argstr="%s",), + bias_regularization_lambda=dict(argstr="--biaslambda=%f",), + biasfield_resolution=dict(argstr="--biasres=%d,%d,%d",), + config_file=dict(argstr="--config=%s",), + derive_from_ref=dict(argstr="--refderiv",), + environ=dict(nohash=True, usedefault=True,), + field_file=dict(argstr="--fout=%s", hash_files=False,), + fieldcoeff_file=dict(argstr="--cout=%s",), + hessian_precision=dict(argstr="--numprec=%s",), + in_file=dict(argstr="--in=%s", extensions=None, mandatory=True,), + in_fwhm=dict(argstr="--infwhm=%s", sep=",",), + in_intensitymap_file=dict(argstr="--intin=%s", copyfile=False,), + inmask_file=dict(argstr="--inmask=%s", extensions=None,), + inmask_val=dict(argstr="--impinval=%f",), + intensity_mapping_model=dict(argstr="--intmod=%s",), + intensity_mapping_order=dict(argstr="--intorder=%d",), + inwarp_file=dict(argstr="--inwarp=%s", extensions=None,), + jacobian_file=dict(argstr="--jout=%s", hash_files=False,), + jacobian_range=dict(argstr="--jacrange=%f,%f",), log_file=dict( - argstr='--logout=%s', - extensions=None, - genfile=True, - hash_files=False, - ), - max_nonlin_iter=dict( - argstr='--miter=%s', - sep=',', - ), - modulatedref_file=dict( - argstr='--refout=%s', - hash_files=False, - ), - out_intensitymap_file=dict( - argstr='--intout=%s', - hash_files=False, + argstr="--logout=%s", extensions=None, genfile=True, hash_files=False, ), + max_nonlin_iter=dict(argstr="--miter=%s", sep=",",), + modulatedref_file=dict(argstr="--refout=%s", hash_files=False,), + out_intensitymap_file=dict(argstr="--intout=%s", hash_files=False,), output_type=dict(), - ref_file=dict( - argstr='--ref=%s', - extensions=None, - mandatory=True, - ), - ref_fwhm=dict( - argstr='--reffwhm=%s', - sep=',', - ), - refmask_file=dict( - argstr='--refmask=%s', - extensions=None, - ), - refmask_val=dict(argstr='--imprefval=%f', ), - regularization_lambda=dict( - argstr='--lambda=%s', - sep=',', - ), - regularization_model=dict(argstr='--regmod=%s', ), - skip_implicit_in_masking=dict(argstr='--impinm=0', ), - skip_implicit_ref_masking=dict(argstr='--imprefm=0', ), - skip_inmask=dict( - argstr='--applyinmask=0', - xor=['apply_inmask'], - ), + ref_file=dict(argstr="--ref=%s", extensions=None, mandatory=True,), + ref_fwhm=dict(argstr="--reffwhm=%s", sep=",",), + refmask_file=dict(argstr="--refmask=%s", extensions=None,), + refmask_val=dict(argstr="--imprefval=%f",), + regularization_lambda=dict(argstr="--lambda=%s", sep=",",), + regularization_model=dict(argstr="--regmod=%s",), + skip_implicit_in_masking=dict(argstr="--impinm=0",), + skip_implicit_ref_masking=dict(argstr="--imprefm=0",), + skip_inmask=dict(argstr="--applyinmask=0", xor=["apply_inmask"],), skip_intensity_mapping=dict( - argstr='--estint=0', - xor=['apply_intensity_mapping'], - ), - skip_lambda_ssq=dict(argstr='--ssqlambda=0', ), - skip_refmask=dict( - argstr='--applyrefmask=0', - xor=['apply_refmask'], + argstr="--estint=0", xor=["apply_intensity_mapping"], ), - spline_order=dict(argstr='--splineorder=%d', ), - subsampling_scheme=dict( - argstr='--subsamp=%s', - sep=',', - ), - warp_resolution=dict(argstr='--warpres=%d,%d,%d', ), + skip_lambda_ssq=dict(argstr="--ssqlambda=0",), + skip_refmask=dict(argstr="--applyrefmask=0", xor=["apply_refmask"],), + spline_order=dict(argstr="--splineorder=%d",), + subsampling_scheme=dict(argstr="--subsamp=%s", sep=",",), + warp_resolution=dict(argstr="--warpres=%d,%d,%d",), warped_file=dict( - argstr='--iout=%s', - extensions=None, - genfile=True, - hash_files=False, + argstr="--iout=%s", extensions=None, genfile=True, hash_files=False, ), ) inputs = FNIRT.input_spec() @@ -138,15 +62,17 @@ def test_FNIRT_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FNIRT_outputs(): output_map = dict( - field_file=dict(extensions=None, ), - fieldcoeff_file=dict(extensions=None, ), - jacobian_file=dict(extensions=None, ), - log_file=dict(extensions=None, ), - modulatedref_file=dict(extensions=None, ), + field_file=dict(extensions=None,), + fieldcoeff_file=dict(extensions=None,), + jacobian_file=dict(extensions=None,), + log_file=dict(extensions=None,), + modulatedref_file=dict(extensions=None,), out_intensitymap_file=dict(), - warped_file=dict(extensions=None, ), + warped_file=dict(extensions=None,), ) outputs = FNIRT.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_FSLCommand.py b/nipype/interfaces/fsl/tests/test_auto_FSLCommand.py index 5398e4532d..bc4c0443ee 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FSLCommand.py +++ b/nipype/interfaces/fsl/tests/test_auto_FSLCommand.py @@ -4,11 +4,8 @@ def test_FSLCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), output_type=dict(), ) inputs = FSLCommand.input_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_FSLXCommand.py b/nipype/interfaces/fsl/tests/test_auto_FSLXCommand.py index 4ca61eebf1..d9c30cd262 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FSLXCommand.py +++ b/nipype/interfaces/fsl/tests/test_auto_FSLXCommand.py @@ -4,111 +4,52 @@ def test_FSLXCommand_inputs(): input_map = dict( - all_ard=dict( - argstr='--allard', - xor=('no_ard', 'all_ard'), - ), - args=dict(argstr='%s', ), - burn_in=dict( - argstr='--burnin=%d', - usedefault=True, - ), - burn_in_no_ard=dict( - argstr='--burnin_noard=%d', - usedefault=True, - ), - bvals=dict( - argstr='--bvals=%s', - extensions=None, - mandatory=True, - ), - bvecs=dict( - argstr='--bvecs=%s', - extensions=None, - mandatory=True, - ), + all_ard=dict(argstr="--allard", xor=("no_ard", "all_ard"),), + args=dict(argstr="%s",), + burn_in=dict(argstr="--burnin=%d", usedefault=True,), + burn_in_no_ard=dict(argstr="--burnin_noard=%d", usedefault=True,), + bvals=dict(argstr="--bvals=%s", extensions=None, mandatory=True,), + bvecs=dict(argstr="--bvecs=%s", extensions=None, mandatory=True,), cnlinear=dict( - argstr='--cnonlinear', - xor=('no_spat', 'non_linear', 'cnlinear'), - ), - dwi=dict( - argstr='--data=%s', - extensions=None, - mandatory=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - f0_ard=dict( - argstr='--f0 --ardf0', - xor=['f0_noard', 'f0_ard', 'all_ard'], - ), - f0_noard=dict( - argstr='--f0', - xor=['f0_noard', 'f0_ard'], - ), - force_dir=dict( - argstr='--forcedir', - usedefault=True, - ), - fudge=dict(argstr='--fudge=%d', ), - logdir=dict( - argstr='--logdir=%s', - usedefault=True, - ), - mask=dict( - argstr='--mask=%s', - extensions=None, - mandatory=True, - ), - model=dict(argstr='--model=%d', ), - n_fibres=dict( - argstr='--nfibres=%d', - mandatory=True, - usedefault=True, - ), - n_jumps=dict( - argstr='--njumps=%d', - usedefault=True, - ), - no_ard=dict( - argstr='--noard', - xor=('no_ard', 'all_ard'), - ), - no_spat=dict( - argstr='--nospat', - xor=('no_spat', 'non_linear', 'cnlinear'), - ), + argstr="--cnonlinear", xor=("no_spat", "non_linear", "cnlinear"), + ), + dwi=dict(argstr="--data=%s", extensions=None, mandatory=True,), + environ=dict(nohash=True, usedefault=True,), + f0_ard=dict(argstr="--f0 --ardf0", xor=["f0_noard", "f0_ard", "all_ard"],), + f0_noard=dict(argstr="--f0", xor=["f0_noard", "f0_ard"],), + force_dir=dict(argstr="--forcedir", usedefault=True,), + fudge=dict(argstr="--fudge=%d",), + logdir=dict(argstr="--logdir=%s", usedefault=True,), + mask=dict(argstr="--mask=%s", extensions=None, mandatory=True,), + model=dict(argstr="--model=%d",), + n_fibres=dict(argstr="--nfibres=%d", mandatory=True, usedefault=True,), + n_jumps=dict(argstr="--njumps=%d", usedefault=True,), + no_ard=dict(argstr="--noard", xor=("no_ard", "all_ard"),), + no_spat=dict(argstr="--nospat", xor=("no_spat", "non_linear", "cnlinear"),), non_linear=dict( - argstr='--nonlinear', - xor=('no_spat', 'non_linear', 'cnlinear'), + argstr="--nonlinear", xor=("no_spat", "non_linear", "cnlinear"), ), output_type=dict(), - rician=dict(argstr='--rician', ), - sample_every=dict( - argstr='--sampleevery=%d', - usedefault=True, - ), - seed=dict(argstr='--seed=%d', ), - update_proposal_every=dict( - argstr='--updateproposalevery=%d', - usedefault=True, - ), + rician=dict(argstr="--rician",), + sample_every=dict(argstr="--sampleevery=%d", usedefault=True,), + seed=dict(argstr="--seed=%d",), + update_proposal_every=dict(argstr="--updateproposalevery=%d", usedefault=True,), ) inputs = FSLXCommand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FSLXCommand_outputs(): output_map = dict( dyads=dict(), fsamples=dict(), - mean_S0samples=dict(extensions=None, ), - mean_dsamples=dict(extensions=None, ), + mean_S0samples=dict(extensions=None,), + mean_dsamples=dict(extensions=None,), mean_fsamples=dict(), - mean_tausamples=dict(extensions=None, ), + mean_tausamples=dict(extensions=None,), phsamples=dict(), thsamples=dict(), ) diff --git a/nipype/interfaces/fsl/tests/test_auto_FUGUE.py b/nipype/interfaces/fsl/tests/test_auto_FUGUE.py index d1fcb85640..a1f6873658 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FUGUE.py +++ b/nipype/interfaces/fsl/tests/test_auto_FUGUE.py @@ -4,86 +4,50 @@ def test_FUGUE_inputs(): input_map = dict( - args=dict(argstr='%s', ), - asym_se_time=dict(argstr='--asym=%.10f', ), - despike_2dfilter=dict(argstr='--despike', ), - despike_threshold=dict(argstr='--despikethreshold=%s', ), - dwell_time=dict(argstr='--dwell=%.10f', ), - dwell_to_asym_ratio=dict(argstr='--dwelltoasym=%.10f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fmap_in_file=dict( - argstr='--loadfmap=%s', - extensions=None, - ), - fmap_out_file=dict( - argstr='--savefmap=%s', - extensions=None, - ), - forward_warping=dict(usedefault=True, ), - fourier_order=dict(argstr='--fourier=%d', ), - icorr=dict( - argstr='--icorr', - requires=['shift_in_file'], - ), - icorr_only=dict( - argstr='--icorronly', - requires=['unwarped_file'], - ), - in_file=dict( - argstr='--in=%s', - extensions=None, - ), - mask_file=dict( - argstr='--mask=%s', - extensions=None, - ), - median_2dfilter=dict(argstr='--median', ), - no_extend=dict(argstr='--noextend', ), - no_gap_fill=dict(argstr='--nofill', ), - nokspace=dict(argstr='--nokspace', ), + args=dict(argstr="%s",), + asym_se_time=dict(argstr="--asym=%.10f",), + despike_2dfilter=dict(argstr="--despike",), + despike_threshold=dict(argstr="--despikethreshold=%s",), + dwell_time=dict(argstr="--dwell=%.10f",), + dwell_to_asym_ratio=dict(argstr="--dwelltoasym=%.10f",), + environ=dict(nohash=True, usedefault=True,), + fmap_in_file=dict(argstr="--loadfmap=%s", extensions=None,), + fmap_out_file=dict(argstr="--savefmap=%s", extensions=None,), + forward_warping=dict(usedefault=True,), + fourier_order=dict(argstr="--fourier=%d",), + icorr=dict(argstr="--icorr", requires=["shift_in_file"],), + icorr_only=dict(argstr="--icorronly", requires=["unwarped_file"],), + in_file=dict(argstr="--in=%s", extensions=None,), + mask_file=dict(argstr="--mask=%s", extensions=None,), + median_2dfilter=dict(argstr="--median",), + no_extend=dict(argstr="--noextend",), + no_gap_fill=dict(argstr="--nofill",), + nokspace=dict(argstr="--nokspace",), output_type=dict(), - pava=dict(argstr='--pava', ), - phase_conjugate=dict(argstr='--phaseconj', ), - phasemap_in_file=dict( - argstr='--phasemap=%s', - extensions=None, - ), - poly_order=dict(argstr='--poly=%d', ), - save_fmap=dict(xor=['save_unmasked_fmap'], ), - save_shift=dict(xor=['save_unmasked_shift'], ), - save_unmasked_fmap=dict( - argstr='--unmaskfmap', - xor=['save_fmap'], - ), - save_unmasked_shift=dict( - argstr='--unmaskshift', - xor=['save_shift'], - ), - shift_in_file=dict( - argstr='--loadshift=%s', - extensions=None, - ), - shift_out_file=dict( - argstr='--saveshift=%s', - extensions=None, - ), - smooth2d=dict(argstr='--smooth2=%.2f', ), - smooth3d=dict(argstr='--smooth3=%.2f', ), - unwarp_direction=dict(argstr='--unwarpdir=%s', ), + pava=dict(argstr="--pava",), + phase_conjugate=dict(argstr="--phaseconj",), + phasemap_in_file=dict(argstr="--phasemap=%s", extensions=None,), + poly_order=dict(argstr="--poly=%d",), + save_fmap=dict(xor=["save_unmasked_fmap"],), + save_shift=dict(xor=["save_unmasked_shift"],), + save_unmasked_fmap=dict(argstr="--unmaskfmap", xor=["save_fmap"],), + save_unmasked_shift=dict(argstr="--unmaskshift", xor=["save_shift"],), + shift_in_file=dict(argstr="--loadshift=%s", extensions=None,), + shift_out_file=dict(argstr="--saveshift=%s", extensions=None,), + smooth2d=dict(argstr="--smooth2=%.2f",), + smooth3d=dict(argstr="--smooth3=%.2f",), + unwarp_direction=dict(argstr="--unwarpdir=%s",), unwarped_file=dict( - argstr='--unwarp=%s', + argstr="--unwarp=%s", extensions=None, - requires=['in_file'], - xor=['warped_file'], + requires=["in_file"], + xor=["warped_file"], ), warped_file=dict( - argstr='--warp=%s', + argstr="--warp=%s", extensions=None, - requires=['in_file'], - xor=['unwarped_file'], + requires=["in_file"], + xor=["unwarped_file"], ), ) inputs = FUGUE.input_spec() @@ -91,12 +55,14 @@ def test_FUGUE_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FUGUE_outputs(): output_map = dict( - fmap_out_file=dict(extensions=None, ), - shift_out_file=dict(extensions=None, ), - unwarped_file=dict(extensions=None, ), - warped_file=dict(extensions=None, ), + fmap_out_file=dict(extensions=None,), + shift_out_file=dict(extensions=None,), + unwarped_file=dict(extensions=None,), + warped_file=dict(extensions=None,), ) outputs = FUGUE.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_FeatureExtractor.py b/nipype/interfaces/fsl/tests/test_auto_FeatureExtractor.py index 20fc8061f3..03d627a0bf 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FeatureExtractor.py +++ b/nipype/interfaces/fsl/tests/test_auto_FeatureExtractor.py @@ -4,28 +4,19 @@ def test_FeatureExtractor_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - mel_ica=dict( - argstr='%s', - copyfile=False, - position=-1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + mel_ica=dict(argstr="%s", copyfile=False, position=-1,), ) inputs = FeatureExtractor.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FeatureExtractor_outputs(): - output_map = dict(mel_ica=dict( - argstr='%s', - copyfile=False, - position=-1, - ), ) + output_map = dict(mel_ica=dict(argstr="%s", copyfile=False, position=-1,),) outputs = FeatureExtractor.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_FilterRegressor.py b/nipype/interfaces/fsl/tests/test_auto_FilterRegressor.py index 47470f5209..d63a61ea1c 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FilterRegressor.py +++ b/nipype/interfaces/fsl/tests/test_auto_FilterRegressor.py @@ -4,57 +4,33 @@ def test_FilterRegressor_inputs(): input_map = dict( - args=dict(argstr='%s', ), - design_file=dict( - argstr='-d %s', - extensions=None, - mandatory=True, - position=3, - ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + design_file=dict(argstr="-d %s", extensions=None, mandatory=True, position=3,), + environ=dict(nohash=True, usedefault=True,), filter_all=dict( - argstr="-f '%s'", - mandatory=True, - position=4, - xor=['filter_columns'], + argstr="-f '%s'", mandatory=True, position=4, xor=["filter_columns"], ), filter_columns=dict( - argstr="-f '%s'", - mandatory=True, - position=4, - xor=['filter_all'], - ), - in_file=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - position=1, - ), - mask=dict( - argstr='-m %s', - extensions=None, + argstr="-f '%s'", mandatory=True, position=4, xor=["filter_all"], ), + in_file=dict(argstr="-i %s", extensions=None, mandatory=True, position=1,), + mask=dict(argstr="-m %s", extensions=None,), out_file=dict( - argstr='-o %s', - extensions=None, - genfile=True, - hash_files=False, - position=2, + argstr="-o %s", extensions=None, genfile=True, hash_files=False, position=2, ), - out_vnscales=dict(argstr='--out_vnscales', ), + out_vnscales=dict(argstr="--out_vnscales",), output_type=dict(), - var_norm=dict(argstr='--vn', ), + var_norm=dict(argstr="--vn",), ) inputs = FilterRegressor.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FilterRegressor_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = FilterRegressor.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_FindTheBiggest.py b/nipype/interfaces/fsl/tests/test_auto_FindTheBiggest.py index a6ed3974e1..0152e34ed0 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FindTheBiggest.py +++ b/nipype/interfaces/fsl/tests/test_auto_FindTheBiggest.py @@ -4,22 +4,11 @@ def test_FindTheBiggest_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_files=dict( - argstr='%s', - mandatory=True, - position=0, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_files=dict(argstr="%s", mandatory=True, position=0,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=2, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=2, ), output_type=dict(), ) @@ -28,11 +17,10 @@ def test_FindTheBiggest_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FindTheBiggest_outputs(): - output_map = dict(out_file=dict( - argstr='%s', - extensions=None, - ), ) + output_map = dict(out_file=dict(argstr="%s", extensions=None,),) outputs = FindTheBiggest.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_GLM.py b/nipype/interfaces/fsl/tests/test_auto_GLM.py index 887e5fcee5..61a550884d 100644 --- a/nipype/interfaces/fsl/tests/test_auto_GLM.py +++ b/nipype/interfaces/fsl/tests/test_auto_GLM.py @@ -4,101 +4,51 @@ def test_GLM_inputs(): input_map = dict( - args=dict(argstr='%s', ), - contrasts=dict( - argstr='-c %s', - extensions=None, - ), - dat_norm=dict(argstr='--dat_norm', ), - demean=dict(argstr='--demean', ), - des_norm=dict(argstr='--des_norm', ), - design=dict( - argstr='-d %s', - extensions=None, - mandatory=True, - position=2, - ), - dof=dict(argstr='--dof=%d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - position=1, - ), - mask=dict( - argstr='-m %s', - extensions=None, - ), - out_cope=dict( - argstr='--out_cope=%s', - extensions=None, - ), - out_data_name=dict( - argstr='--out_data=%s', - extensions=None, - ), - out_f_name=dict( - argstr='--out_f=%s', - extensions=None, - ), + args=dict(argstr="%s",), + contrasts=dict(argstr="-c %s", extensions=None,), + dat_norm=dict(argstr="--dat_norm",), + demean=dict(argstr="--demean",), + des_norm=dict(argstr="--des_norm",), + design=dict(argstr="-d %s", extensions=None, mandatory=True, position=2,), + dof=dict(argstr="--dof=%d",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-i %s", extensions=None, mandatory=True, position=1,), + mask=dict(argstr="-m %s", extensions=None,), + out_cope=dict(argstr="--out_cope=%s", extensions=None,), + out_data_name=dict(argstr="--out_data=%s", extensions=None,), + out_f_name=dict(argstr="--out_f=%s", extensions=None,), out_file=dict( - argstr='-o %s', + argstr="-o %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_glm', + name_source="in_file", + name_template="%s_glm", position=3, ), - out_p_name=dict( - argstr='--out_p=%s', - extensions=None, - ), - out_pf_name=dict( - argstr='--out_pf=%s', - extensions=None, - ), - out_res_name=dict( - argstr='--out_res=%s', - extensions=None, - ), - out_sigsq_name=dict( - argstr='--out_sigsq=%s', - extensions=None, - ), - out_t_name=dict( - argstr='--out_t=%s', - extensions=None, - ), - out_varcb_name=dict( - argstr='--out_varcb=%s', - extensions=None, - ), - out_vnscales_name=dict( - argstr='--out_vnscales=%s', - extensions=None, - ), - out_z_name=dict( - argstr='--out_z=%s', - extensions=None, - ), + out_p_name=dict(argstr="--out_p=%s", extensions=None,), + out_pf_name=dict(argstr="--out_pf=%s", extensions=None,), + out_res_name=dict(argstr="--out_res=%s", extensions=None,), + out_sigsq_name=dict(argstr="--out_sigsq=%s", extensions=None,), + out_t_name=dict(argstr="--out_t=%s", extensions=None,), + out_varcb_name=dict(argstr="--out_varcb=%s", extensions=None,), + out_vnscales_name=dict(argstr="--out_vnscales=%s", extensions=None,), + out_z_name=dict(argstr="--out_z=%s", extensions=None,), output_type=dict(), - var_norm=dict(argstr='--vn', ), + var_norm=dict(argstr="--vn",), ) inputs = GLM.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GLM_outputs(): output_map = dict( out_cope=dict(), out_data=dict(), out_f=dict(), - out_file=dict(extensions=None, ), + out_file=dict(extensions=None,), out_p=dict(), out_pf=dict(), out_res=dict(), diff --git a/nipype/interfaces/fsl/tests/test_auto_ICA_AROMA.py b/nipype/interfaces/fsl/tests/test_auto_ICA_AROMA.py index cb55f36e28..51975c5bef 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ICA_AROMA.py +++ b/nipype/interfaces/fsl/tests/test_auto_ICA_AROMA.py @@ -4,68 +4,39 @@ def test_ICA_AROMA_inputs(): input_map = dict( - TR=dict(argstr='-tr %.3f', ), - args=dict(argstr='%s', ), - denoise_type=dict( - argstr='-den %s', - mandatory=True, - usedefault=True, - ), - dim=dict(argstr='-dim %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), + TR=dict(argstr="-tr %.3f",), + args=dict(argstr="%s",), + denoise_type=dict(argstr="-den %s", mandatory=True, usedefault=True,), + dim=dict(argstr="-dim %d",), + environ=dict(nohash=True, usedefault=True,), feat_dir=dict( - argstr='-feat %s', + argstr="-feat %s", mandatory=True, - xor=[ - 'in_file', 'mat_file', 'fnirt_warp_file', 'motion_parameters' - ], - ), - fnirt_warp_file=dict( - argstr='-warp %s', - extensions=None, - xor=['feat_dir'], + xor=["in_file", "mat_file", "fnirt_warp_file", "motion_parameters"], ), + fnirt_warp_file=dict(argstr="-warp %s", extensions=None, xor=["feat_dir"],), in_file=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - xor=['feat_dir'], - ), - mask=dict( - argstr='-m %s', - extensions=None, - xor=['feat_dir'], + argstr="-i %s", extensions=None, mandatory=True, xor=["feat_dir"], ), - mat_file=dict( - argstr='-affmat %s', - extensions=None, - xor=['feat_dir'], - ), - melodic_dir=dict(argstr='-meldir %s', ), + mask=dict(argstr="-m %s", extensions=None, xor=["feat_dir"],), + mat_file=dict(argstr="-affmat %s", extensions=None, xor=["feat_dir"],), + melodic_dir=dict(argstr="-meldir %s",), motion_parameters=dict( - argstr='-mc %s', - extensions=None, - mandatory=True, - xor=['feat_dir'], - ), - out_dir=dict( - argstr='-o %s', - mandatory=True, - usedefault=True, + argstr="-mc %s", extensions=None, mandatory=True, xor=["feat_dir"], ), + out_dir=dict(argstr="-o %s", mandatory=True, usedefault=True,), ) inputs = ICA_AROMA.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ICA_AROMA_outputs(): output_map = dict( - aggr_denoised_file=dict(extensions=None, ), - nonaggr_denoised_file=dict(extensions=None, ), + aggr_denoised_file=dict(extensions=None,), + nonaggr_denoised_file=dict(extensions=None,), out_dir=dict(), ) outputs = ICA_AROMA.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_ImageMaths.py b/nipype/interfaces/fsl/tests/test_auto_ImageMaths.py index 9c3c254fea..02a73d2662 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ImageMaths.py +++ b/nipype/interfaces/fsl/tests/test_auto_ImageMaths.py @@ -4,40 +4,15 @@ def test_ImageMaths_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), - in_file2=dict( - argstr='%s', - extensions=None, - position=3, - ), - mask_file=dict( - argstr='-mas %s', - extensions=None, - ), - op_string=dict( - argstr='%s', - position=2, - ), - out_data_type=dict( - argstr='-odt %s', - position=-1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=1,), + in_file2=dict(argstr="%s", extensions=None, position=3,), + mask_file=dict(argstr="-mas %s", extensions=None,), + op_string=dict(argstr="%s", position=2,), + out_data_type=dict(argstr="-odt %s", position=-1,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-2, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_type=dict(), suffix=dict(), @@ -47,8 +22,10 @@ def test_ImageMaths_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ImageMaths_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ImageMaths.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_ImageMeants.py b/nipype/interfaces/fsl/tests/test_auto_ImageMeants.py index 2565d8536d..2d53d25c1f 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ImageMeants.py +++ b/nipype/interfaces/fsl/tests/test_auto_ImageMeants.py @@ -4,46 +4,29 @@ def test_ImageMeants_inputs(): input_map = dict( - args=dict(argstr='%s', ), - eig=dict(argstr='--eig', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - position=0, - ), - mask=dict( - argstr='-m %s', - extensions=None, - ), - nobin=dict(argstr='--no_bin', ), - order=dict( - argstr='--order=%d', - usedefault=True, - ), - out_file=dict( - argstr='-o %s', - extensions=None, - genfile=True, - hash_files=False, - ), + args=dict(argstr="%s",), + eig=dict(argstr="--eig",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-i %s", extensions=None, mandatory=True, position=0,), + mask=dict(argstr="-m %s", extensions=None,), + nobin=dict(argstr="--no_bin",), + order=dict(argstr="--order=%d", usedefault=True,), + out_file=dict(argstr="-o %s", extensions=None, genfile=True, hash_files=False,), output_type=dict(), - show_all=dict(argstr='--showall', ), - spatial_coord=dict(argstr='-c %s', ), - transpose=dict(argstr='--transpose', ), - use_mm=dict(argstr='--usemm', ), + show_all=dict(argstr="--showall",), + spatial_coord=dict(argstr="-c %s",), + transpose=dict(argstr="--transpose",), + use_mm=dict(argstr="--usemm",), ) inputs = ImageMeants.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ImageMeants_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ImageMeants.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_ImageStats.py b/nipype/interfaces/fsl/tests/test_auto_ImageStats.py index f1ad146797..0c309880bb 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ImageStats.py +++ b/nipype/interfaces/fsl/tests/test_auto_ImageStats.py @@ -4,44 +4,24 @@ def test_ImageStats_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=3, - ), - index_mask_file=dict( - argstr='-K %s', - extensions=None, - position=2, - ), - mask_file=dict( - argstr='', - extensions=None, - ), - op_string=dict( - argstr='%s', - mandatory=True, - position=4, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=3,), + index_mask_file=dict(argstr="-K %s", extensions=None, position=2,), + mask_file=dict(argstr="", extensions=None,), + op_string=dict(argstr="%s", mandatory=True, position=4,), output_type=dict(), - split_4d=dict( - argstr='-t', - position=1, - ), + split_4d=dict(argstr="-t", position=1,), ) inputs = ImageStats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ImageStats_outputs(): - output_map = dict(out_stat=dict(), ) + output_map = dict(out_stat=dict(),) outputs = ImageStats.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_InvWarp.py b/nipype/interfaces/fsl/tests/test_auto_InvWarp.py index b78ede0ec4..b116f19737 100644 --- a/nipype/interfaces/fsl/tests/test_auto_InvWarp.py +++ b/nipype/interfaces/fsl/tests/test_auto_InvWarp.py @@ -4,50 +4,35 @@ def test_InvWarp_inputs(): input_map = dict( - absolute=dict( - argstr='--abs', - xor=['relative'], - ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + absolute=dict(argstr="--abs", xor=["relative"],), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), inverse_warp=dict( - argstr='--out=%s', + argstr="--out=%s", extensions=None, hash_files=False, - name_source=['warp'], - name_template='%s_inverse', + name_source=["warp"], + name_template="%s_inverse", ), - jacobian_max=dict(argstr='--jmax=%f', ), - jacobian_min=dict(argstr='--jmin=%f', ), - niter=dict(argstr='--niter=%d', ), - noconstraint=dict(argstr='--noconstraint', ), + jacobian_max=dict(argstr="--jmax=%f",), + jacobian_min=dict(argstr="--jmin=%f",), + niter=dict(argstr="--niter=%d",), + noconstraint=dict(argstr="--noconstraint",), output_type=dict(), - reference=dict( - argstr='--ref=%s', - extensions=None, - mandatory=True, - ), - regularise=dict(argstr='--regularise=%f', ), - relative=dict( - argstr='--rel', - xor=['absolute'], - ), - warp=dict( - argstr='--warp=%s', - extensions=None, - mandatory=True, - ), + reference=dict(argstr="--ref=%s", extensions=None, mandatory=True,), + regularise=dict(argstr="--regularise=%f",), + relative=dict(argstr="--rel", xor=["absolute"],), + warp=dict(argstr="--warp=%s", extensions=None, mandatory=True,), ) inputs = InvWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_InvWarp_outputs(): - output_map = dict(inverse_warp=dict(extensions=None, ), ) + output_map = dict(inverse_warp=dict(extensions=None,),) outputs = InvWarp.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_IsotropicSmooth.py b/nipype/interfaces/fsl/tests/test_auto_IsotropicSmooth.py index 5cad38954b..58186672ec 100644 --- a/nipype/interfaces/fsl/tests/test_auto_IsotropicSmooth.py +++ b/nipype/interfaces/fsl/tests/test_auto_IsotropicSmooth.py @@ -4,57 +4,28 @@ def test_IsotropicSmooth_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fwhm=dict( - argstr='-s %.5f', - mandatory=True, - position=4, - xor=['sigma'], - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - internal_datatype=dict( - argstr='-dt %s', - position=1, - ), - nan2zeros=dict( - argstr='-nan', - position=3, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + fwhm=dict(argstr="-s %.5f", mandatory=True, position=4, xor=["sigma"],), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + internal_datatype=dict(argstr="-dt %s", position=1,), + nan2zeros=dict(argstr="-nan", position=3,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-2, - ), - output_datatype=dict( - argstr='-odt %s', - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), + output_datatype=dict(argstr="-odt %s", position=-1,), output_type=dict(), - sigma=dict( - argstr='-s %.5f', - mandatory=True, - position=4, - xor=['fwhm'], - ), + sigma=dict(argstr="-s %.5f", mandatory=True, position=4, xor=["fwhm"],), ) inputs = IsotropicSmooth.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_IsotropicSmooth_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = IsotropicSmooth.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_L2Model.py b/nipype/interfaces/fsl/tests/test_auto_L2Model.py index aad29206e2..6d16cc6038 100644 --- a/nipype/interfaces/fsl/tests/test_auto_L2Model.py +++ b/nipype/interfaces/fsl/tests/test_auto_L2Model.py @@ -3,17 +3,19 @@ def test_L2Model_inputs(): - input_map = dict(num_copes=dict(mandatory=True, ), ) + input_map = dict(num_copes=dict(mandatory=True,),) inputs = L2Model.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_L2Model_outputs(): output_map = dict( - design_con=dict(extensions=None, ), - design_grp=dict(extensions=None, ), - design_mat=dict(extensions=None, ), + design_con=dict(extensions=None,), + design_grp=dict(extensions=None,), + design_mat=dict(extensions=None,), ) outputs = L2Model.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_Level1Design.py b/nipype/interfaces/fsl/tests/test_auto_Level1Design.py index 8b1c076ac6..f8ed336e43 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Level1Design.py +++ b/nipype/interfaces/fsl/tests/test_auto_Level1Design.py @@ -4,23 +4,22 @@ def test_Level1Design_inputs(): input_map = dict( - bases=dict(mandatory=True, ), + bases=dict(mandatory=True,), contrasts=dict(), - interscan_interval=dict(mandatory=True, ), - model_serial_correlations=dict(mandatory=True, ), - orthogonalization=dict(usedefault=True, ), - session_info=dict(mandatory=True, ), + interscan_interval=dict(mandatory=True,), + model_serial_correlations=dict(mandatory=True,), + orthogonalization=dict(usedefault=True,), + session_info=dict(mandatory=True,), ) inputs = Level1Design.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Level1Design_outputs(): - output_map = dict( - ev_files=dict(), - fsf_files=dict(), - ) + output_map = dict(ev_files=dict(), fsf_files=dict(),) outputs = Level1Design.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_MCFLIRT.py b/nipype/interfaces/fsl/tests/test_auto_MCFLIRT.py index 84cf5cdc98..8f52f40eb0 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MCFLIRT.py +++ b/nipype/interfaces/fsl/tests/test_auto_MCFLIRT.py @@ -4,63 +4,48 @@ def test_MCFLIRT_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bins=dict(argstr='-bins %d', ), - cost=dict(argstr='-cost %s', ), - dof=dict(argstr='-dof %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - position=0, - ), - init=dict( - argstr='-init %s', - extensions=None, - ), - interpolation=dict(argstr='-%s_final', ), - mean_vol=dict(argstr='-meanvol', ), + args=dict(argstr="%s",), + bins=dict(argstr="-bins %d",), + cost=dict(argstr="-cost %s",), + dof=dict(argstr="-dof %d",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True, position=0,), + init=dict(argstr="-init %s", extensions=None,), + interpolation=dict(argstr="-%s_final",), + mean_vol=dict(argstr="-meanvol",), out_file=dict( - argstr='-out %s', - extensions=None, - genfile=True, - hash_files=False, + argstr="-out %s", extensions=None, genfile=True, hash_files=False, ), output_type=dict(), - ref_file=dict( - argstr='-reffile %s', - extensions=None, - ), - ref_vol=dict(argstr='-refvol %d', ), - rotation=dict(argstr='-rotation %d', ), - save_mats=dict(argstr='-mats', ), - save_plots=dict(argstr='-plots', ), - save_rms=dict(argstr='-rmsabs -rmsrel', ), - scaling=dict(argstr='-scaling %.2f', ), - smooth=dict(argstr='-smooth %.2f', ), - stages=dict(argstr='-stages %d', ), - stats_imgs=dict(argstr='-stats', ), - use_contour=dict(argstr='-edge', ), - use_gradient=dict(argstr='-gdt', ), + ref_file=dict(argstr="-reffile %s", extensions=None,), + ref_vol=dict(argstr="-refvol %d",), + rotation=dict(argstr="-rotation %d",), + save_mats=dict(argstr="-mats",), + save_plots=dict(argstr="-plots",), + save_rms=dict(argstr="-rmsabs -rmsrel",), + scaling=dict(argstr="-scaling %.2f",), + smooth=dict(argstr="-smooth %.2f",), + stages=dict(argstr="-stages %d",), + stats_imgs=dict(argstr="-stats",), + use_contour=dict(argstr="-edge",), + use_gradient=dict(argstr="-gdt",), ) inputs = MCFLIRT.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MCFLIRT_outputs(): output_map = dict( mat_file=dict(), - mean_img=dict(extensions=None, ), - out_file=dict(extensions=None, ), - par_file=dict(extensions=None, ), + mean_img=dict(extensions=None,), + out_file=dict(extensions=None,), + par_file=dict(extensions=None,), rms_files=dict(), - std_img=dict(extensions=None, ), - variance_img=dict(extensions=None, ), + std_img=dict(extensions=None,), + variance_img=dict(extensions=None,), ) outputs = MCFLIRT.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_MELODIC.py b/nipype/interfaces/fsl/tests/test_auto_MELODIC.py index 2c0aeab9f1..86e4e0e2a2 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MELODIC.py +++ b/nipype/interfaces/fsl/tests/test_auto_MELODIC.py @@ -4,106 +4,67 @@ def test_MELODIC_inputs(): input_map = dict( - ICs=dict( - argstr='--ICs=%s', - extensions=None, - ), - approach=dict(argstr='-a %s', ), - args=dict(argstr='%s', ), - bg_image=dict( - argstr='--bgimage=%s', - extensions=None, - ), - bg_threshold=dict(argstr='--bgthreshold=%f', ), - cov_weight=dict(argstr='--covarweight=%f', ), - dim=dict(argstr='-d %d', ), - dim_est=dict(argstr='--dimest=%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - epsilon=dict(argstr='--eps=%f', ), - epsilonS=dict(argstr='--epsS=%f', ), - in_files=dict( - argstr='-i %s', - mandatory=True, - position=0, - sep=',', - ), - log_power=dict(argstr='--logPower', ), - mask=dict( - argstr='-m %s', - extensions=None, - ), - max_restart=dict(argstr='--maxrestart=%d', ), - maxit=dict(argstr='--maxit=%d', ), - migp=dict(argstr='--migp', ), - migpN=dict(argstr='--migpN %d', ), - migp_factor=dict(argstr='--migp_factor %d', ), - migp_shuffle=dict(argstr='--migp_shuffle', ), - mix=dict( - argstr='--mix=%s', - extensions=None, - ), - mm_thresh=dict(argstr='--mmthresh=%f', ), - no_bet=dict(argstr='--nobet', ), - no_mask=dict(argstr='--nomask', ), - no_mm=dict(argstr='--no_mm', ), - non_linearity=dict(argstr='--nl=%s', ), - num_ICs=dict(argstr='-n %d', ), - out_all=dict(argstr='--Oall', ), - out_dir=dict( - argstr='-o %s', - genfile=True, - ), - out_mean=dict(argstr='--Omean', ), - out_orig=dict(argstr='--Oorig', ), - out_pca=dict(argstr='--Opca', ), - out_stats=dict(argstr='--Ostats', ), - out_unmix=dict(argstr='--Ounmix', ), - out_white=dict(argstr='--Owhite', ), + ICs=dict(argstr="--ICs=%s", extensions=None,), + approach=dict(argstr="-a %s",), + args=dict(argstr="%s",), + bg_image=dict(argstr="--bgimage=%s", extensions=None,), + bg_threshold=dict(argstr="--bgthreshold=%f",), + cov_weight=dict(argstr="--covarweight=%f",), + dim=dict(argstr="-d %d",), + dim_est=dict(argstr="--dimest=%s",), + environ=dict(nohash=True, usedefault=True,), + epsilon=dict(argstr="--eps=%f",), + epsilonS=dict(argstr="--epsS=%f",), + in_files=dict(argstr="-i %s", mandatory=True, position=0, sep=",",), + log_power=dict(argstr="--logPower",), + mask=dict(argstr="-m %s", extensions=None,), + max_restart=dict(argstr="--maxrestart=%d",), + maxit=dict(argstr="--maxit=%d",), + migp=dict(argstr="--migp",), + migpN=dict(argstr="--migpN %d",), + migp_factor=dict(argstr="--migp_factor %d",), + migp_shuffle=dict(argstr="--migp_shuffle",), + mix=dict(argstr="--mix=%s", extensions=None,), + mm_thresh=dict(argstr="--mmthresh=%f",), + no_bet=dict(argstr="--nobet",), + no_mask=dict(argstr="--nomask",), + no_mm=dict(argstr="--no_mm",), + non_linearity=dict(argstr="--nl=%s",), + num_ICs=dict(argstr="-n %d",), + out_all=dict(argstr="--Oall",), + out_dir=dict(argstr="-o %s", genfile=True,), + out_mean=dict(argstr="--Omean",), + out_orig=dict(argstr="--Oorig",), + out_pca=dict(argstr="--Opca",), + out_stats=dict(argstr="--Ostats",), + out_unmix=dict(argstr="--Ounmix",), + out_white=dict(argstr="--Owhite",), output_type=dict(), - pbsc=dict(argstr='--pbsc', ), - rem_cmp=dict(argstr='-f %d', ), - remove_deriv=dict(argstr='--remove_deriv', ), - report=dict(argstr='--report', ), - report_maps=dict(argstr='--report_maps=%s', ), - s_con=dict( - argstr='--Scon=%s', - extensions=None, - ), - s_des=dict( - argstr='--Sdes=%s', - extensions=None, - ), - sep_vn=dict(argstr='--sep_vn', ), - sep_whiten=dict(argstr='--sep_whiten', ), - smode=dict( - argstr='--smode=%s', - extensions=None, - ), - t_con=dict( - argstr='--Tcon=%s', - extensions=None, - ), - t_des=dict( - argstr='--Tdes=%s', - extensions=None, - ), - tr_sec=dict(argstr='--tr=%f', ), - update_mask=dict(argstr='--update_mask', ), - var_norm=dict(argstr='--vn', ), + pbsc=dict(argstr="--pbsc",), + rem_cmp=dict(argstr="-f %d",), + remove_deriv=dict(argstr="--remove_deriv",), + report=dict(argstr="--report",), + report_maps=dict(argstr="--report_maps=%s",), + s_con=dict(argstr="--Scon=%s", extensions=None,), + s_des=dict(argstr="--Sdes=%s", extensions=None,), + sep_vn=dict(argstr="--sep_vn",), + sep_whiten=dict(argstr="--sep_whiten",), + smode=dict(argstr="--smode=%s", extensions=None,), + t_con=dict(argstr="--Tcon=%s", extensions=None,), + t_des=dict(argstr="--Tdes=%s", extensions=None,), + tr_sec=dict(argstr="--tr=%f",), + update_mask=dict(argstr="--update_mask",), + var_norm=dict(argstr="--vn",), ) inputs = MELODIC.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MELODIC_outputs(): - output_map = dict( - out_dir=dict(), - report_dir=dict(), - ) + output_map = dict(out_dir=dict(), report_dir=dict(),) outputs = MELODIC.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_MakeDyadicVectors.py b/nipype/interfaces/fsl/tests/test_auto_MakeDyadicVectors.py index 05611f4b80..760072bab9 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MakeDyadicVectors.py +++ b/nipype/interfaces/fsl/tests/test_auto_MakeDyadicVectors.py @@ -4,51 +4,26 @@ def test_MakeDyadicVectors_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - mask=dict( - argstr='%s', - extensions=None, - position=2, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + mask=dict(argstr="%s", extensions=None, position=2,), output=dict( - argstr='%s', - extensions=None, - hash_files=False, - position=3, - usedefault=True, + argstr="%s", extensions=None, hash_files=False, position=3, usedefault=True, ), output_type=dict(), - perc=dict( - argstr='%f', - position=4, - ), - phi_vol=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), - theta_vol=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=0, - ), + perc=dict(argstr="%f", position=4,), + phi_vol=dict(argstr="%s", extensions=None, mandatory=True, position=1,), + theta_vol=dict(argstr="%s", extensions=None, mandatory=True, position=0,), ) inputs = MakeDyadicVectors.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MakeDyadicVectors_outputs(): - output_map = dict( - dispersion=dict(extensions=None, ), - dyads=dict(extensions=None, ), - ) + output_map = dict(dispersion=dict(extensions=None,), dyads=dict(extensions=None,),) outputs = MakeDyadicVectors.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_MathsCommand.py b/nipype/interfaces/fsl/tests/test_auto_MathsCommand.py index ff7ae2090b..ce7f058663 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MathsCommand.py +++ b/nipype/interfaces/fsl/tests/test_auto_MathsCommand.py @@ -4,36 +4,15 @@ def test_MathsCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - internal_datatype=dict( - argstr='-dt %s', - position=1, - ), - nan2zeros=dict( - argstr='-nan', - position=3, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + internal_datatype=dict(argstr="-dt %s", position=1,), + nan2zeros=dict(argstr="-nan", position=3,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-2, - ), - output_datatype=dict( - argstr='-odt %s', - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), + output_datatype=dict(argstr="-odt %s", position=-1,), output_type=dict(), ) inputs = MathsCommand.input_spec() @@ -41,8 +20,10 @@ def test_MathsCommand_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MathsCommand_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MathsCommand.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_MaxImage.py b/nipype/interfaces/fsl/tests/test_auto_MaxImage.py index 03fbb31d6e..1baa75963b 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MaxImage.py +++ b/nipype/interfaces/fsl/tests/test_auto_MaxImage.py @@ -4,41 +4,16 @@ def test_MaxImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict( - argstr='-%smax', - position=4, - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - internal_datatype=dict( - argstr='-dt %s', - position=1, - ), - nan2zeros=dict( - argstr='-nan', - position=3, - ), + args=dict(argstr="%s",), + dimension=dict(argstr="-%smax", position=4, usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + internal_datatype=dict(argstr="-dt %s", position=1,), + nan2zeros=dict(argstr="-nan", position=3,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-2, - ), - output_datatype=dict( - argstr='-odt %s', - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), + output_datatype=dict(argstr="-odt %s", position=-1,), output_type=dict(), ) inputs = MaxImage.input_spec() @@ -46,8 +21,10 @@ def test_MaxImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MaxImage_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MaxImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_MaxnImage.py b/nipype/interfaces/fsl/tests/test_auto_MaxnImage.py index a53b285396..aa52ba3bb7 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MaxnImage.py +++ b/nipype/interfaces/fsl/tests/test_auto_MaxnImage.py @@ -4,41 +4,16 @@ def test_MaxnImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict( - argstr='-%smaxn', - position=4, - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - internal_datatype=dict( - argstr='-dt %s', - position=1, - ), - nan2zeros=dict( - argstr='-nan', - position=3, - ), + args=dict(argstr="%s",), + dimension=dict(argstr="-%smaxn", position=4, usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + internal_datatype=dict(argstr="-dt %s", position=1,), + nan2zeros=dict(argstr="-nan", position=3,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-2, - ), - output_datatype=dict( - argstr='-odt %s', - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), + output_datatype=dict(argstr="-odt %s", position=-1,), output_type=dict(), ) inputs = MaxnImage.input_spec() @@ -46,8 +21,10 @@ def test_MaxnImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MaxnImage_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MaxnImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_MeanImage.py b/nipype/interfaces/fsl/tests/test_auto_MeanImage.py index ad2c6633ef..076cb08a76 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MeanImage.py +++ b/nipype/interfaces/fsl/tests/test_auto_MeanImage.py @@ -4,41 +4,16 @@ def test_MeanImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict( - argstr='-%smean', - position=4, - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - internal_datatype=dict( - argstr='-dt %s', - position=1, - ), - nan2zeros=dict( - argstr='-nan', - position=3, - ), + args=dict(argstr="%s",), + dimension=dict(argstr="-%smean", position=4, usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + internal_datatype=dict(argstr="-dt %s", position=1,), + nan2zeros=dict(argstr="-nan", position=3,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-2, - ), - output_datatype=dict( - argstr='-odt %s', - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), + output_datatype=dict(argstr="-odt %s", position=-1,), output_type=dict(), ) inputs = MeanImage.input_spec() @@ -46,8 +21,10 @@ def test_MeanImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MeanImage_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MeanImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_MedianImage.py b/nipype/interfaces/fsl/tests/test_auto_MedianImage.py index 38b06c61eb..a70ff14b2f 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MedianImage.py +++ b/nipype/interfaces/fsl/tests/test_auto_MedianImage.py @@ -4,41 +4,16 @@ def test_MedianImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict( - argstr='-%smedian', - position=4, - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - internal_datatype=dict( - argstr='-dt %s', - position=1, - ), - nan2zeros=dict( - argstr='-nan', - position=3, - ), + args=dict(argstr="%s",), + dimension=dict(argstr="-%smedian", position=4, usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + internal_datatype=dict(argstr="-dt %s", position=1,), + nan2zeros=dict(argstr="-nan", position=3,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-2, - ), - output_datatype=dict( - argstr='-odt %s', - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), + output_datatype=dict(argstr="-odt %s", position=-1,), output_type=dict(), ) inputs = MedianImage.input_spec() @@ -46,8 +21,10 @@ def test_MedianImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MedianImage_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MedianImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_Merge.py b/nipype/interfaces/fsl/tests/test_auto_Merge.py index 04cb5eea2c..45db6482a9 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Merge.py +++ b/nipype/interfaces/fsl/tests/test_auto_Merge.py @@ -4,42 +4,30 @@ def test_Merge_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict( - argstr='-%s', - mandatory=True, - position=0, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_files=dict( - argstr='%s', - mandatory=True, - position=2, - ), + args=dict(argstr="%s",), + dimension=dict(argstr="-%s", mandatory=True, position=0,), + environ=dict(nohash=True, usedefault=True,), + in_files=dict(argstr="%s", mandatory=True, position=2,), merged_file=dict( - argstr='%s', + argstr="%s", extensions=None, hash_files=False, - name_source='in_files', - name_template='%s_merged', + name_source="in_files", + name_template="%s_merged", position=1, ), output_type=dict(), - tr=dict( - argstr='%.2f', - position=-1, - ), + tr=dict(argstr="%.2f", position=-1,), ) inputs = Merge.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Merge_outputs(): - output_map = dict(merged_file=dict(extensions=None, ), ) + output_map = dict(merged_file=dict(extensions=None,),) outputs = Merge.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_MinImage.py b/nipype/interfaces/fsl/tests/test_auto_MinImage.py index e16d5f2a26..9d5416bd15 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MinImage.py +++ b/nipype/interfaces/fsl/tests/test_auto_MinImage.py @@ -4,41 +4,16 @@ def test_MinImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict( - argstr='-%smin', - position=4, - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - internal_datatype=dict( - argstr='-dt %s', - position=1, - ), - nan2zeros=dict( - argstr='-nan', - position=3, - ), + args=dict(argstr="%s",), + dimension=dict(argstr="-%smin", position=4, usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + internal_datatype=dict(argstr="-dt %s", position=1,), + nan2zeros=dict(argstr="-nan", position=3,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-2, - ), - output_datatype=dict( - argstr='-odt %s', - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), + output_datatype=dict(argstr="-odt %s", position=-1,), output_type=dict(), ) inputs = MinImage.input_spec() @@ -46,8 +21,10 @@ def test_MinImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MinImage_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MinImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_MotionOutliers.py b/nipype/interfaces/fsl/tests/test_auto_MotionOutliers.py index 8ef7c61cb1..4c8ce55636 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MotionOutliers.py +++ b/nipype/interfaces/fsl/tests/test_auto_MotionOutliers.py @@ -4,60 +4,52 @@ def test_MotionOutliers_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dummy=dict(argstr='--dummy=%d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - ), - mask=dict( - argstr='-m %s', - extensions=None, - ), - metric=dict(argstr='--%s', ), - no_motion_correction=dict(argstr='--nomoco', ), + args=dict(argstr="%s",), + dummy=dict(argstr="--dummy=%d",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-i %s", extensions=None, mandatory=True,), + mask=dict(argstr="-m %s", extensions=None,), + metric=dict(argstr="--%s",), + no_motion_correction=dict(argstr="--nomoco",), out_file=dict( - argstr='-o %s', + argstr="-o %s", extensions=None, hash_files=False, keep_extension=True, - name_source='in_file', - name_template='%s_outliers.txt', + name_source="in_file", + name_template="%s_outliers.txt", ), out_metric_plot=dict( - argstr='-p %s', + argstr="-p %s", extensions=None, hash_files=False, keep_extension=True, - name_source='in_file', - name_template='%s_metrics.png', + name_source="in_file", + name_template="%s_metrics.png", ), out_metric_values=dict( - argstr='-s %s', + argstr="-s %s", extensions=None, hash_files=False, keep_extension=True, - name_source='in_file', - name_template='%s_metrics.txt', + name_source="in_file", + name_template="%s_metrics.txt", ), output_type=dict(), - threshold=dict(argstr='--thresh=%g', ), + threshold=dict(argstr="--thresh=%g",), ) inputs = MotionOutliers.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MotionOutliers_outputs(): output_map = dict( - out_file=dict(extensions=None, ), - out_metric_plot=dict(extensions=None, ), - out_metric_values=dict(extensions=None, ), + out_file=dict(extensions=None,), + out_metric_plot=dict(extensions=None,), + out_metric_values=dict(extensions=None,), ) outputs = MotionOutliers.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_MultiImageMaths.py b/nipype/interfaces/fsl/tests/test_auto_MultiImageMaths.py index a6963467a6..ba96daf994 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MultiImageMaths.py +++ b/nipype/interfaces/fsl/tests/test_auto_MultiImageMaths.py @@ -4,42 +4,17 @@ def test_MultiImageMaths_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - internal_datatype=dict( - argstr='-dt %s', - position=1, - ), - nan2zeros=dict( - argstr='-nan', - position=3, - ), - op_string=dict( - argstr='%s', - mandatory=True, - position=4, - ), - operand_files=dict(mandatory=True, ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + internal_datatype=dict(argstr="-dt %s", position=1,), + nan2zeros=dict(argstr="-nan", position=3,), + op_string=dict(argstr="%s", mandatory=True, position=4,), + operand_files=dict(mandatory=True,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-2, - ), - output_datatype=dict( - argstr='-odt %s', - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), + output_datatype=dict(argstr="-odt %s", position=-1,), output_type=dict(), ) inputs = MultiImageMaths.input_spec() @@ -47,8 +22,10 @@ def test_MultiImageMaths_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MultiImageMaths_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MultiImageMaths.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_MultipleRegressDesign.py b/nipype/interfaces/fsl/tests/test_auto_MultipleRegressDesign.py index 17b692f5ac..fe3ce1b0b1 100644 --- a/nipype/interfaces/fsl/tests/test_auto_MultipleRegressDesign.py +++ b/nipype/interfaces/fsl/tests/test_auto_MultipleRegressDesign.py @@ -4,21 +4,23 @@ def test_MultipleRegressDesign_inputs(): input_map = dict( - contrasts=dict(mandatory=True, ), + contrasts=dict(mandatory=True,), groups=dict(), - regressors=dict(mandatory=True, ), + regressors=dict(mandatory=True,), ) inputs = MultipleRegressDesign.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MultipleRegressDesign_outputs(): output_map = dict( - design_con=dict(extensions=None, ), - design_fts=dict(extensions=None, ), - design_grp=dict(extensions=None, ), - design_mat=dict(extensions=None, ), + design_con=dict(extensions=None,), + design_fts=dict(extensions=None,), + design_grp=dict(extensions=None,), + design_mat=dict(extensions=None,), ) outputs = MultipleRegressDesign.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_Overlay.py b/nipype/interfaces/fsl/tests/test_auto_Overlay.py index 8a2dad8690..e09ef17541 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Overlay.py +++ b/nipype/interfaces/fsl/tests/test_auto_Overlay.py @@ -4,91 +4,53 @@ def test_Overlay_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), auto_thresh_bg=dict( - argstr='-a', + argstr="-a", mandatory=True, position=5, - xor=('auto_thresh_bg', 'full_bg_range', 'bg_thresh'), + xor=("auto_thresh_bg", "full_bg_range", "bg_thresh"), ), background_image=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=4, + argstr="%s", extensions=None, mandatory=True, position=4, ), bg_thresh=dict( - argstr='%.3f %.3f', + argstr="%.3f %.3f", mandatory=True, position=5, - xor=('auto_thresh_bg', 'full_bg_range', 'bg_thresh'), - ), - environ=dict( - nohash=True, - usedefault=True, + xor=("auto_thresh_bg", "full_bg_range", "bg_thresh"), ), + environ=dict(nohash=True, usedefault=True,), full_bg_range=dict( - argstr='-A', + argstr="-A", mandatory=True, position=5, - xor=('auto_thresh_bg', 'full_bg_range', 'bg_thresh'), + xor=("auto_thresh_bg", "full_bg_range", "bg_thresh"), ), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-1, - ), - out_type=dict( - argstr='%s', - position=2, - usedefault=True, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-1, ), + out_type=dict(argstr="%s", position=2, usedefault=True,), output_type=dict(), - show_negative_stats=dict( - argstr='%s', - position=8, - xor=['stat_image2'], - ), - stat_image=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=6, - ), + show_negative_stats=dict(argstr="%s", position=8, xor=["stat_image2"],), + stat_image=dict(argstr="%s", extensions=None, mandatory=True, position=6,), stat_image2=dict( - argstr='%s', - extensions=None, - position=9, - xor=['show_negative_stats'], - ), - stat_thresh=dict( - argstr='%.2f %.2f', - mandatory=True, - position=7, - ), - stat_thresh2=dict( - argstr='%.2f %.2f', - position=10, - ), - transparency=dict( - argstr='%s', - position=1, - usedefault=True, - ), - use_checkerboard=dict( - argstr='-c', - position=3, + argstr="%s", extensions=None, position=9, xor=["show_negative_stats"], ), + stat_thresh=dict(argstr="%.2f %.2f", mandatory=True, position=7,), + stat_thresh2=dict(argstr="%.2f %.2f", position=10,), + transparency=dict(argstr="%s", position=1, usedefault=True,), + use_checkerboard=dict(argstr="-c", position=3,), ) inputs = Overlay.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Overlay_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Overlay.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_PRELUDE.py b/nipype/interfaces/fsl/tests/test_auto_PRELUDE.py index cc5cdad018..2c3623a76a 100644 --- a/nipype/interfaces/fsl/tests/test_auto_PRELUDE.py +++ b/nipype/interfaces/fsl/tests/test_auto_PRELUDE.py @@ -4,68 +4,41 @@ def test_PRELUDE_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), complex_phase_file=dict( - argstr='--complex=%s', + argstr="--complex=%s", extensions=None, mandatory=True, - xor=['magnitude_file', 'phase_file'], + xor=["magnitude_file", "phase_file"], ), - end=dict(argstr='--end=%d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - label_file=dict( - argstr='--labels=%s', - extensions=None, - hash_files=False, - ), - labelprocess2d=dict(argstr='--labelslices', ), + end=dict(argstr="--end=%d",), + environ=dict(nohash=True, usedefault=True,), + label_file=dict(argstr="--labels=%s", extensions=None, hash_files=False,), + labelprocess2d=dict(argstr="--labelslices",), magnitude_file=dict( - argstr='--abs=%s', + argstr="--abs=%s", extensions=None, mandatory=True, - xor=['complex_phase_file'], - ), - mask_file=dict( - argstr='--mask=%s', - extensions=None, + xor=["complex_phase_file"], ), - num_partitions=dict(argstr='--numphasesplit=%d', ), + mask_file=dict(argstr="--mask=%s", extensions=None,), + num_partitions=dict(argstr="--numphasesplit=%d",), output_type=dict(), phase_file=dict( - argstr='--phase=%s', + argstr="--phase=%s", extensions=None, mandatory=True, - xor=['complex_phase_file'], - ), - process2d=dict( - argstr='--slices', - xor=['labelprocess2d'], - ), - process3d=dict( - argstr='--force3D', - xor=['labelprocess2d', 'process2d'], - ), - rawphase_file=dict( - argstr='--rawphase=%s', - extensions=None, - hash_files=False, - ), - removeramps=dict(argstr='--removeramps', ), - savemask_file=dict( - argstr='--savemask=%s', - extensions=None, - hash_files=False, - ), - start=dict(argstr='--start=%d', ), - threshold=dict(argstr='--thresh=%.10f', ), + xor=["complex_phase_file"], + ), + process2d=dict(argstr="--slices", xor=["labelprocess2d"],), + process3d=dict(argstr="--force3D", xor=["labelprocess2d", "process2d"],), + rawphase_file=dict(argstr="--rawphase=%s", extensions=None, hash_files=False,), + removeramps=dict(argstr="--removeramps",), + savemask_file=dict(argstr="--savemask=%s", extensions=None, hash_files=False,), + start=dict(argstr="--start=%d",), + threshold=dict(argstr="--thresh=%.10f",), unwrapped_phase_file=dict( - argstr='--unwrap=%s', - extensions=None, - genfile=True, - hash_files=False, + argstr="--unwrap=%s", extensions=None, genfile=True, hash_files=False, ), ) inputs = PRELUDE.input_spec() @@ -73,8 +46,10 @@ def test_PRELUDE_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PRELUDE_outputs(): - output_map = dict(unwrapped_phase_file=dict(extensions=None, ), ) + output_map = dict(unwrapped_phase_file=dict(extensions=None,),) outputs = PRELUDE.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_PercentileImage.py b/nipype/interfaces/fsl/tests/test_auto_PercentileImage.py index 6e186c9d44..2b272b006c 100644 --- a/nipype/interfaces/fsl/tests/test_auto_PercentileImage.py +++ b/nipype/interfaces/fsl/tests/test_auto_PercentileImage.py @@ -4,54 +4,28 @@ def test_PercentileImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict( - argstr='-%sperc', - position=4, - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - internal_datatype=dict( - argstr='-dt %s', - position=1, - ), - nan2zeros=dict( - argstr='-nan', - position=3, - ), + args=dict(argstr="%s",), + dimension=dict(argstr="-%sperc", position=4, usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + internal_datatype=dict(argstr="-dt %s", position=1,), + nan2zeros=dict(argstr="-nan", position=3,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-2, - ), - output_datatype=dict( - argstr='-odt %s', - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), + output_datatype=dict(argstr="-odt %s", position=-1,), output_type=dict(), - perc=dict( - argstr='%f', - position=5, - ), + perc=dict(argstr="%f", position=5,), ) inputs = PercentileImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PercentileImage_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = PercentileImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_PlotMotionParams.py b/nipype/interfaces/fsl/tests/test_auto_PlotMotionParams.py index c548766a71..c12494e50b 100644 --- a/nipype/interfaces/fsl/tests/test_auto_PlotMotionParams.py +++ b/nipype/interfaces/fsl/tests/test_auto_PlotMotionParams.py @@ -4,37 +4,24 @@ def test_PlotMotionParams_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - mandatory=True, - position=1, - ), - in_source=dict(mandatory=True, ), - out_file=dict( - argstr='-o %s', - extensions=None, - genfile=True, - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", mandatory=True, position=1,), + in_source=dict(mandatory=True,), + out_file=dict(argstr="-o %s", extensions=None, genfile=True, hash_files=False,), output_type=dict(), - plot_size=dict(argstr='%s', ), - plot_type=dict( - argstr='%s', - mandatory=True, - ), + plot_size=dict(argstr="%s",), + plot_type=dict(argstr="%s", mandatory=True,), ) inputs = PlotMotionParams.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PlotMotionParams_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = PlotMotionParams.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_PlotTimeSeries.py b/nipype/interfaces/fsl/tests/test_auto_PlotTimeSeries.py index 8d51e1dd5a..0f3954fcf2 100644 --- a/nipype/interfaces/fsl/tests/test_auto_PlotTimeSeries.py +++ b/nipype/interfaces/fsl/tests/test_auto_PlotTimeSeries.py @@ -4,68 +4,34 @@ def test_PlotTimeSeries_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - mandatory=True, - position=1, - ), - labels=dict(argstr='%s', ), - legend_file=dict( - argstr='--legend=%s', - extensions=None, - ), - out_file=dict( - argstr='-o %s', - extensions=None, - genfile=True, - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", mandatory=True, position=1,), + labels=dict(argstr="%s",), + legend_file=dict(argstr="--legend=%s", extensions=None,), + out_file=dict(argstr="-o %s", extensions=None, genfile=True, hash_files=False,), output_type=dict(), - plot_finish=dict( - argstr='--finish=%d', - xor=('plot_range', ), - ), - plot_range=dict( - argstr='%s', - xor=('plot_start', 'plot_finish'), - ), - plot_size=dict(argstr='%s', ), - plot_start=dict( - argstr='--start=%d', - xor=('plot_range', ), - ), - sci_notation=dict(argstr='--sci', ), - title=dict(argstr='%s', ), - x_precision=dict(argstr='--precision=%d', ), - x_units=dict( - argstr='-u %d', - usedefault=True, - ), - y_max=dict( - argstr='--ymax=%.2f', - xor=('y_range', ), - ), - y_min=dict( - argstr='--ymin=%.2f', - xor=('y_range', ), - ), - y_range=dict( - argstr='%s', - xor=('y_min', 'y_max'), - ), + plot_finish=dict(argstr="--finish=%d", xor=("plot_range",),), + plot_range=dict(argstr="%s", xor=("plot_start", "plot_finish"),), + plot_size=dict(argstr="%s",), + plot_start=dict(argstr="--start=%d", xor=("plot_range",),), + sci_notation=dict(argstr="--sci",), + title=dict(argstr="%s",), + x_precision=dict(argstr="--precision=%d",), + x_units=dict(argstr="-u %d", usedefault=True,), + y_max=dict(argstr="--ymax=%.2f", xor=("y_range",),), + y_min=dict(argstr="--ymin=%.2f", xor=("y_range",),), + y_range=dict(argstr="%s", xor=("y_min", "y_max"),), ) inputs = PlotTimeSeries.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PlotTimeSeries_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = PlotTimeSeries.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_PowerSpectrum.py b/nipype/interfaces/fsl/tests/test_auto_PowerSpectrum.py index df30704138..5aa19309fc 100644 --- a/nipype/interfaces/fsl/tests/test_auto_PowerSpectrum.py +++ b/nipype/interfaces/fsl/tests/test_auto_PowerSpectrum.py @@ -4,23 +4,11 @@ def test_PowerSpectrum_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=0, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=0,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=1, ), output_type=dict(), ) @@ -29,8 +17,10 @@ def test_PowerSpectrum_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PowerSpectrum_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = PowerSpectrum.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_PrepareFieldmap.py b/nipype/interfaces/fsl/tests/test_auto_PrepareFieldmap.py index cf2b9c41b0..d6d39b595c 100644 --- a/nipype/interfaces/fsl/tests/test_auto_PrepareFieldmap.py +++ b/nipype/interfaces/fsl/tests/test_auto_PrepareFieldmap.py @@ -4,53 +4,25 @@ def test_PrepareFieldmap_inputs(): input_map = dict( - args=dict(argstr='%s', ), - delta_TE=dict( - argstr='%f', - mandatory=True, - position=-2, - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_magnitude=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=3, - ), - in_phase=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - nocheck=dict( - argstr='--nocheck', - position=-1, - usedefault=True, - ), - out_fieldmap=dict( - argstr='%s', - extensions=None, - position=4, - ), + args=dict(argstr="%s",), + delta_TE=dict(argstr="%f", mandatory=True, position=-2, usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + in_magnitude=dict(argstr="%s", extensions=None, mandatory=True, position=3,), + in_phase=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + nocheck=dict(argstr="--nocheck", position=-1, usedefault=True,), + out_fieldmap=dict(argstr="%s", extensions=None, position=4,), output_type=dict(), - scanner=dict( - argstr='%s', - position=1, - usedefault=True, - ), + scanner=dict(argstr="%s", position=1, usedefault=True,), ) inputs = PrepareFieldmap.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PrepareFieldmap_outputs(): - output_map = dict(out_fieldmap=dict(extensions=None, ), ) + output_map = dict(out_fieldmap=dict(extensions=None,),) outputs = PrepareFieldmap.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_ProbTrackX.py b/nipype/interfaces/fsl/tests/test_auto_ProbTrackX.py index b7eacdaa01..1e2d0f5486 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ProbTrackX.py +++ b/nipype/interfaces/fsl/tests/test_auto_ProbTrackX.py @@ -4,110 +4,60 @@ def test_ProbTrackX_inputs(): input_map = dict( - args=dict(argstr='%s', ), - avoid_mp=dict( - argstr='--avoid=%s', - extensions=None, - ), - c_thresh=dict(argstr='--cthr=%.3f', ), - correct_path_distribution=dict(argstr='--pd', ), - dist_thresh=dict(argstr='--distthresh=%.3f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fibst=dict(argstr='--fibst=%d', ), - force_dir=dict( - argstr='--forcedir', - usedefault=True, - ), - fsamples=dict(mandatory=True, ), - inv_xfm=dict( - argstr='--invxfm=%s', - extensions=None, - ), - loop_check=dict(argstr='--loopcheck', ), - mask=dict( - argstr='-m %s', - extensions=None, - mandatory=True, - ), - mask2=dict( - argstr='--mask2=%s', - extensions=None, - ), - mesh=dict( - argstr='--mesh=%s', - extensions=None, - ), - mod_euler=dict(argstr='--modeuler', ), - mode=dict( - argstr='--mode=%s', - genfile=True, - ), - n_samples=dict( - argstr='--nsamples=%d', - usedefault=True, - ), - n_steps=dict(argstr='--nsteps=%d', ), - network=dict(argstr='--network', ), - opd=dict( - argstr='--opd', - usedefault=True, - ), - os2t=dict(argstr='--os2t', ), - out_dir=dict( - argstr='--dir=%s', - genfile=True, - ), + args=dict(argstr="%s",), + avoid_mp=dict(argstr="--avoid=%s", extensions=None,), + c_thresh=dict(argstr="--cthr=%.3f",), + correct_path_distribution=dict(argstr="--pd",), + dist_thresh=dict(argstr="--distthresh=%.3f",), + environ=dict(nohash=True, usedefault=True,), + fibst=dict(argstr="--fibst=%d",), + force_dir=dict(argstr="--forcedir", usedefault=True,), + fsamples=dict(mandatory=True,), + inv_xfm=dict(argstr="--invxfm=%s", extensions=None,), + loop_check=dict(argstr="--loopcheck",), + mask=dict(argstr="-m %s", extensions=None, mandatory=True,), + mask2=dict(argstr="--mask2=%s", extensions=None,), + mesh=dict(argstr="--mesh=%s", extensions=None,), + mod_euler=dict(argstr="--modeuler",), + mode=dict(argstr="--mode=%s", genfile=True,), + n_samples=dict(argstr="--nsamples=%d", usedefault=True,), + n_steps=dict(argstr="--nsteps=%d",), + network=dict(argstr="--network",), + opd=dict(argstr="--opd", usedefault=True,), + os2t=dict(argstr="--os2t",), + out_dir=dict(argstr="--dir=%s", genfile=True,), output_type=dict(), - phsamples=dict(mandatory=True, ), - rand_fib=dict(argstr='--randfib=%d', ), - random_seed=dict(argstr='--rseed', ), - s2tastext=dict(argstr='--s2tastext', ), - sample_random_points=dict(argstr='--sampvox', ), - samples_base_name=dict( - argstr='--samples=%s', - usedefault=True, - ), - seed=dict( - argstr='--seed=%s', - mandatory=True, - ), - seed_ref=dict( - argstr='--seedref=%s', - extensions=None, - ), - step_length=dict(argstr='--steplength=%.3f', ), - stop_mask=dict( - argstr='--stop=%s', - extensions=None, - ), - target_masks=dict(argstr='--targetmasks=%s', ), - thsamples=dict(mandatory=True, ), - use_anisotropy=dict(argstr='--usef', ), - verbose=dict(argstr='--verbose=%d', ), - waypoints=dict( - argstr='--waypoints=%s', - extensions=None, - ), - xfm=dict( - argstr='--xfm=%s', - extensions=None, - ), + phsamples=dict(mandatory=True,), + rand_fib=dict(argstr="--randfib=%d",), + random_seed=dict(argstr="--rseed",), + s2tastext=dict(argstr="--s2tastext",), + sample_random_points=dict(argstr="--sampvox",), + samples_base_name=dict(argstr="--samples=%s", usedefault=True,), + seed=dict(argstr="--seed=%s", mandatory=True,), + seed_ref=dict(argstr="--seedref=%s", extensions=None,), + step_length=dict(argstr="--steplength=%.3f",), + stop_mask=dict(argstr="--stop=%s", extensions=None,), + target_masks=dict(argstr="--targetmasks=%s",), + thsamples=dict(mandatory=True,), + use_anisotropy=dict(argstr="--usef",), + verbose=dict(argstr="--verbose=%d",), + waypoints=dict(argstr="--waypoints=%s", extensions=None,), + xfm=dict(argstr="--xfm=%s", extensions=None,), ) inputs = ProbTrackX.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ProbTrackX_outputs(): output_map = dict( fdt_paths=dict(), - log=dict(extensions=None, ), + log=dict(extensions=None,), particle_files=dict(), targets=dict(), - way_total=dict(extensions=None, ), + way_total=dict(extensions=None,), ) outputs = ProbTrackX.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_ProbTrackX2.py b/nipype/interfaces/fsl/tests/test_auto_ProbTrackX2.py index 3e1a6c12c3..56bff1e5ac 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ProbTrackX2.py +++ b/nipype/interfaces/fsl/tests/test_auto_ProbTrackX2.py @@ -4,144 +4,79 @@ def test_ProbTrackX2_inputs(): input_map = dict( - args=dict(argstr='%s', ), - avoid_mp=dict( - argstr='--avoid=%s', - extensions=None, - ), - c_thresh=dict(argstr='--cthr=%.3f', ), - colmask4=dict( - argstr='--colmask4=%s', - extensions=None, - ), - correct_path_distribution=dict(argstr='--pd', ), - dist_thresh=dict(argstr='--distthresh=%.3f', ), - distthresh1=dict(argstr='--distthresh1=%.3f', ), - distthresh3=dict(argstr='--distthresh3=%.3f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fibst=dict(argstr='--fibst=%d', ), - fopd=dict( - argstr='--fopd=%s', - extensions=None, - ), - force_dir=dict( - argstr='--forcedir', - usedefault=True, - ), - fsamples=dict(mandatory=True, ), - inv_xfm=dict( - argstr='--invxfm=%s', - extensions=None, - ), - loop_check=dict(argstr='--loopcheck', ), - lrtarget3=dict( - argstr='--lrtarget3=%s', - extensions=None, - ), - mask=dict( - argstr='-m %s', - extensions=None, - mandatory=True, - ), - meshspace=dict(argstr='--meshspace=%s', ), - mod_euler=dict(argstr='--modeuler', ), - n_samples=dict( - argstr='--nsamples=%d', - usedefault=True, - ), - n_steps=dict(argstr='--nsteps=%d', ), - network=dict(argstr='--network', ), - omatrix1=dict(argstr='--omatrix1', ), - omatrix2=dict( - argstr='--omatrix2', - requires=['target2'], - ), - omatrix3=dict( - argstr='--omatrix3', - requires=['target3', 'lrtarget3'], - ), - omatrix4=dict(argstr='--omatrix4', ), - onewaycondition=dict(argstr='--onewaycondition', ), - opd=dict( - argstr='--opd', - usedefault=True, - ), - os2t=dict(argstr='--os2t', ), - out_dir=dict( - argstr='--dir=%s', - genfile=True, - ), + args=dict(argstr="%s",), + avoid_mp=dict(argstr="--avoid=%s", extensions=None,), + c_thresh=dict(argstr="--cthr=%.3f",), + colmask4=dict(argstr="--colmask4=%s", extensions=None,), + correct_path_distribution=dict(argstr="--pd",), + dist_thresh=dict(argstr="--distthresh=%.3f",), + distthresh1=dict(argstr="--distthresh1=%.3f",), + distthresh3=dict(argstr="--distthresh3=%.3f",), + environ=dict(nohash=True, usedefault=True,), + fibst=dict(argstr="--fibst=%d",), + fopd=dict(argstr="--fopd=%s", extensions=None,), + force_dir=dict(argstr="--forcedir", usedefault=True,), + fsamples=dict(mandatory=True,), + inv_xfm=dict(argstr="--invxfm=%s", extensions=None,), + loop_check=dict(argstr="--loopcheck",), + lrtarget3=dict(argstr="--lrtarget3=%s", extensions=None,), + mask=dict(argstr="-m %s", extensions=None, mandatory=True,), + meshspace=dict(argstr="--meshspace=%s",), + mod_euler=dict(argstr="--modeuler",), + n_samples=dict(argstr="--nsamples=%d", usedefault=True,), + n_steps=dict(argstr="--nsteps=%d",), + network=dict(argstr="--network",), + omatrix1=dict(argstr="--omatrix1",), + omatrix2=dict(argstr="--omatrix2", requires=["target2"],), + omatrix3=dict(argstr="--omatrix3", requires=["target3", "lrtarget3"],), + omatrix4=dict(argstr="--omatrix4",), + onewaycondition=dict(argstr="--onewaycondition",), + opd=dict(argstr="--opd", usedefault=True,), + os2t=dict(argstr="--os2t",), + out_dir=dict(argstr="--dir=%s", genfile=True,), output_type=dict(), - phsamples=dict(mandatory=True, ), - rand_fib=dict(argstr='--randfib=%d', ), - random_seed=dict(argstr='--rseed', ), - s2tastext=dict(argstr='--s2tastext', ), - sample_random_points=dict(argstr='--sampvox', ), - samples_base_name=dict( - argstr='--samples=%s', - usedefault=True, - ), - seed=dict( - argstr='--seed=%s', - mandatory=True, - ), - seed_ref=dict( - argstr='--seedref=%s', - extensions=None, - ), - simple=dict(argstr='--simple', ), - step_length=dict(argstr='--steplength=%.3f', ), - stop_mask=dict( - argstr='--stop=%s', - extensions=None, - ), - target2=dict( - argstr='--target2=%s', - extensions=None, - ), - target3=dict( - argstr='--target3=%s', - extensions=None, - ), - target4=dict( - argstr='--target4=%s', - extensions=None, - ), - target_masks=dict(argstr='--targetmasks=%s', ), - thsamples=dict(mandatory=True, ), - use_anisotropy=dict(argstr='--usef', ), - verbose=dict(argstr='--verbose=%d', ), - waycond=dict(argstr='--waycond=%s', ), - wayorder=dict(argstr='--wayorder', ), - waypoints=dict( - argstr='--waypoints=%s', - extensions=None, - ), - xfm=dict( - argstr='--xfm=%s', - extensions=None, - ), + phsamples=dict(mandatory=True,), + rand_fib=dict(argstr="--randfib=%d",), + random_seed=dict(argstr="--rseed",), + s2tastext=dict(argstr="--s2tastext",), + sample_random_points=dict(argstr="--sampvox",), + samples_base_name=dict(argstr="--samples=%s", usedefault=True,), + seed=dict(argstr="--seed=%s", mandatory=True,), + seed_ref=dict(argstr="--seedref=%s", extensions=None,), + simple=dict(argstr="--simple",), + step_length=dict(argstr="--steplength=%.3f",), + stop_mask=dict(argstr="--stop=%s", extensions=None,), + target2=dict(argstr="--target2=%s", extensions=None,), + target3=dict(argstr="--target3=%s", extensions=None,), + target4=dict(argstr="--target4=%s", extensions=None,), + target_masks=dict(argstr="--targetmasks=%s",), + thsamples=dict(mandatory=True,), + use_anisotropy=dict(argstr="--usef",), + verbose=dict(argstr="--verbose=%d",), + waycond=dict(argstr="--waycond=%s",), + wayorder=dict(argstr="--wayorder",), + waypoints=dict(argstr="--waypoints=%s", extensions=None,), + xfm=dict(argstr="--xfm=%s", extensions=None,), ) inputs = ProbTrackX2.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ProbTrackX2_outputs(): output_map = dict( fdt_paths=dict(), - log=dict(extensions=None, ), - lookup_tractspace=dict(extensions=None, ), - matrix1_dot=dict(extensions=None, ), - matrix2_dot=dict(extensions=None, ), - matrix3_dot=dict(extensions=None, ), - network_matrix=dict(extensions=None, ), + log=dict(extensions=None,), + lookup_tractspace=dict(extensions=None,), + matrix1_dot=dict(extensions=None,), + matrix2_dot=dict(extensions=None,), + matrix3_dot=dict(extensions=None,), + network_matrix=dict(extensions=None,), particle_files=dict(), targets=dict(), - way_total=dict(extensions=None, ), + way_total=dict(extensions=None,), ) outputs = ProbTrackX2.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_ProjThresh.py b/nipype/interfaces/fsl/tests/test_auto_ProjThresh.py index 304beace0d..cc1a6a03ac 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ProjThresh.py +++ b/nipype/interfaces/fsl/tests/test_auto_ProjThresh.py @@ -4,30 +4,21 @@ def test_ProjThresh_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_files=dict( - argstr='%s', - mandatory=True, - position=0, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_files=dict(argstr="%s", mandatory=True, position=0,), output_type=dict(), - threshold=dict( - argstr='%d', - mandatory=True, - position=1, - ), + threshold=dict(argstr="%d", mandatory=True, position=1,), ) inputs = ProjThresh.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ProjThresh_outputs(): - output_map = dict(out_files=dict(), ) + output_map = dict(out_files=dict(),) outputs = ProjThresh.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_Randomise.py b/nipype/interfaces/fsl/tests/test_auto_Randomise.py index 4b6194a3d8..95c1cf7d59 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Randomise.py +++ b/nipype/interfaces/fsl/tests/test_auto_Randomise.py @@ -4,71 +4,44 @@ def test_Randomise_inputs(): input_map = dict( - args=dict(argstr='%s', ), - base_name=dict( - argstr='-o "%s"', - position=1, - usedefault=True, - ), - c_thresh=dict(argstr='-c %.1f', ), - cm_thresh=dict(argstr='-C %.1f', ), - demean=dict(argstr='-D', ), - design_mat=dict( - argstr='-d %s', - extensions=None, - position=2, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - f_c_thresh=dict(argstr='-F %.2f', ), - f_cm_thresh=dict(argstr='-S %.2f', ), - f_only=dict(argstr='--f_only', ), - fcon=dict( - argstr='-f %s', - extensions=None, - ), - in_file=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - position=0, - ), - mask=dict( - argstr='-m %s', - extensions=None, - ), - num_perm=dict(argstr='-n %d', ), - one_sample_group_mean=dict(argstr='-1', ), + args=dict(argstr="%s",), + base_name=dict(argstr='-o "%s"', position=1, usedefault=True,), + c_thresh=dict(argstr="-c %.1f",), + cm_thresh=dict(argstr="-C %.1f",), + demean=dict(argstr="-D",), + design_mat=dict(argstr="-d %s", extensions=None, position=2,), + environ=dict(nohash=True, usedefault=True,), + f_c_thresh=dict(argstr="-F %.2f",), + f_cm_thresh=dict(argstr="-S %.2f",), + f_only=dict(argstr="--f_only",), + fcon=dict(argstr="-f %s", extensions=None,), + in_file=dict(argstr="-i %s", extensions=None, mandatory=True, position=0,), + mask=dict(argstr="-m %s", extensions=None,), + num_perm=dict(argstr="-n %d",), + one_sample_group_mean=dict(argstr="-1",), output_type=dict(), - p_vec_n_dist_files=dict(argstr='-P', ), - raw_stats_imgs=dict(argstr='-R', ), - seed=dict(argstr='--seed=%d', ), - show_info_parallel_mode=dict(argstr='-Q', ), - show_total_perms=dict(argstr='-q', ), - tcon=dict( - argstr='-t %s', - extensions=None, - position=3, - ), - tfce=dict(argstr='-T', ), - tfce2D=dict(argstr='--T2', ), - tfce_C=dict(argstr='--tfce_C=%.2f', ), - tfce_E=dict(argstr='--tfce_E=%.2f', ), - tfce_H=dict(argstr='--tfce_H=%.2f', ), - var_smooth=dict(argstr='-v %d', ), - vox_p_values=dict(argstr='-x', ), - x_block_labels=dict( - argstr='-e %s', - extensions=None, - ), + p_vec_n_dist_files=dict(argstr="-P",), + raw_stats_imgs=dict(argstr="-R",), + seed=dict(argstr="--seed=%d",), + show_info_parallel_mode=dict(argstr="-Q",), + show_total_perms=dict(argstr="-q",), + tcon=dict(argstr="-t %s", extensions=None, position=3,), + tfce=dict(argstr="-T",), + tfce2D=dict(argstr="--T2",), + tfce_C=dict(argstr="--tfce_C=%.2f",), + tfce_E=dict(argstr="--tfce_E=%.2f",), + tfce_H=dict(argstr="--tfce_H=%.2f",), + var_smooth=dict(argstr="-v %d",), + vox_p_values=dict(argstr="-x",), + x_block_labels=dict(argstr="-e %s", extensions=None,), ) inputs = Randomise.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Randomise_outputs(): output_map = dict( f_corrected_p_files=dict(), diff --git a/nipype/interfaces/fsl/tests/test_auto_Reorient2Std.py b/nipype/interfaces/fsl/tests/test_auto_Reorient2Std.py index e3843be681..d81874e76a 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Reorient2Std.py +++ b/nipype/interfaces/fsl/tests/test_auto_Reorient2Std.py @@ -4,22 +4,10 @@ def test_Reorient2Std_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - ), - out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True,), + out_file=dict(argstr="%s", extensions=None, genfile=True, hash_files=False,), output_type=dict(), ) inputs = Reorient2Std.input_spec() @@ -27,8 +15,10 @@ def test_Reorient2Std_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Reorient2Std_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Reorient2Std.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_RobustFOV.py b/nipype/interfaces/fsl/tests/test_auto_RobustFOV.py index 3e0ce74dd3..fbadb82c99 100644 --- a/nipype/interfaces/fsl/tests/test_auto_RobustFOV.py +++ b/nipype/interfaces/fsl/tests/test_auto_RobustFOV.py @@ -4,31 +4,23 @@ def test_RobustFOV_inputs(): input_map = dict( - args=dict(argstr='%s', ), - brainsize=dict(argstr='-b %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - position=0, - ), + args=dict(argstr="%s",), + brainsize=dict(argstr="-b %d",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-i %s", extensions=None, mandatory=True, position=0,), out_roi=dict( - argstr='-r %s', + argstr="-r %s", extensions=None, hash_files=False, - name_source=['in_file'], - name_template='%s_ROI', + name_source=["in_file"], + name_template="%s_ROI", ), out_transform=dict( - argstr='-m %s', + argstr="-m %s", extensions=None, hash_files=False, - name_source=['in_file'], - name_template='%s_to_ROI', + name_source=["in_file"], + name_template="%s_to_ROI", ), output_type=dict(), ) @@ -37,10 +29,11 @@ def test_RobustFOV_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RobustFOV_outputs(): output_map = dict( - out_roi=dict(extensions=None, ), - out_transform=dict(extensions=None, ), + out_roi=dict(extensions=None,), out_transform=dict(extensions=None,), ) outputs = RobustFOV.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_SMM.py b/nipype/interfaces/fsl/tests/test_auto_SMM.py index 9d3591ae27..2042d0845f 100644 --- a/nipype/interfaces/fsl/tests/test_auto_SMM.py +++ b/nipype/interfaces/fsl/tests/test_auto_SMM.py @@ -4,11 +4,8 @@ def test_SMM_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), mask=dict( argstr='--mask="%s"', copyfile=False, @@ -16,10 +13,7 @@ def test_SMM_inputs(): mandatory=True, position=1, ), - no_deactivation_class=dict( - argstr='--zfstatmode', - position=2, - ), + no_deactivation_class=dict(argstr="--zfstatmode", position=2,), output_type=dict(), spatial_data_file=dict( argstr='--sdf="%s"', @@ -34,11 +28,13 @@ def test_SMM_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SMM_outputs(): output_map = dict( - activation_p_map=dict(extensions=None, ), - deactivation_p_map=dict(extensions=None, ), - null_p_map=dict(extensions=None, ), + activation_p_map=dict(extensions=None,), + deactivation_p_map=dict(extensions=None,), + null_p_map=dict(extensions=None,), ) outputs = SMM.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_SUSAN.py b/nipype/interfaces/fsl/tests/test_auto_SUSAN.py index 44b19350be..7f7f270be1 100644 --- a/nipype/interfaces/fsl/tests/test_auto_SUSAN.py +++ b/nipype/interfaces/fsl/tests/test_auto_SUSAN.py @@ -4,58 +4,28 @@ def test_SUSAN_inputs(): input_map = dict( - args=dict(argstr='%s', ), - brightness_threshold=dict( - argstr='%.10f', - mandatory=True, - position=2, - ), - dimension=dict( - argstr='%d', - position=4, - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fwhm=dict( - argstr='%.10f', - mandatory=True, - position=3, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), + args=dict(argstr="%s",), + brightness_threshold=dict(argstr="%.10f", mandatory=True, position=2,), + dimension=dict(argstr="%d", position=4, usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + fwhm=dict(argstr="%.10f", mandatory=True, position=3,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=1,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-1, ), output_type=dict(), - usans=dict( - argstr='', - position=6, - usedefault=True, - ), - use_median=dict( - argstr='%d', - position=5, - usedefault=True, - ), + usans=dict(argstr="", position=6, usedefault=True,), + use_median=dict(argstr="%d", position=5, usedefault=True,), ) inputs = SUSAN.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SUSAN_outputs(): - output_map = dict(smoothed_file=dict(extensions=None, ), ) + output_map = dict(smoothed_file=dict(extensions=None,),) outputs = SUSAN.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_SigLoss.py b/nipype/interfaces/fsl/tests/test_auto_SigLoss.py index 193557d297..fe63fdce23 100644 --- a/nipype/interfaces/fsl/tests/test_auto_SigLoss.py +++ b/nipype/interfaces/fsl/tests/test_auto_SigLoss.py @@ -4,36 +4,24 @@ def test_SigLoss_inputs(): input_map = dict( - args=dict(argstr='%s', ), - echo_time=dict(argstr='--te=%f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - ), - mask_file=dict( - argstr='-m %s', - extensions=None, - ), - out_file=dict( - argstr='-s %s', - extensions=None, - genfile=True, - ), + args=dict(argstr="%s",), + echo_time=dict(argstr="--te=%f",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-i %s", extensions=None, mandatory=True,), + mask_file=dict(argstr="-m %s", extensions=None,), + out_file=dict(argstr="-s %s", extensions=None, genfile=True,), output_type=dict(), - slice_direction=dict(argstr='-d %s', ), + slice_direction=dict(argstr="-d %s",), ) inputs = SigLoss.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SigLoss_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = SigLoss.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_Slice.py b/nipype/interfaces/fsl/tests/test_auto_Slice.py index a3e604b657..f96ee854b3 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Slice.py +++ b/nipype/interfaces/fsl/tests/test_auto_Slice.py @@ -4,22 +4,12 @@ def test_Slice_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), in_file=dict( - argstr='%s', - copyfile=False, - extensions=None, - mandatory=True, - position=0, - ), - out_base_name=dict( - argstr='%s', - position=1, + argstr="%s", copyfile=False, extensions=None, mandatory=True, position=0, ), + out_base_name=dict(argstr="%s", position=1,), output_type=dict(), ) inputs = Slice.input_spec() @@ -27,8 +17,10 @@ def test_Slice_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Slice_outputs(): - output_map = dict(out_files=dict(), ) + output_map = dict(out_files=dict(),) outputs = Slice.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_SliceTimer.py b/nipype/interfaces/fsl/tests/test_auto_SliceTimer.py index 9e26d7952b..0d0c0fc0f4 100644 --- a/nipype/interfaces/fsl/tests/test_auto_SliceTimer.py +++ b/nipype/interfaces/fsl/tests/test_auto_SliceTimer.py @@ -4,45 +4,30 @@ def test_SliceTimer_inputs(): input_map = dict( - args=dict(argstr='%s', ), - custom_order=dict( - argstr='--ocustom=%s', - extensions=None, - ), - custom_timings=dict( - argstr='--tcustom=%s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - global_shift=dict(argstr='--tglobal', ), - in_file=dict( - argstr='--in=%s', - extensions=None, - mandatory=True, - position=0, - ), - index_dir=dict(argstr='--down', ), - interleaved=dict(argstr='--odd', ), + args=dict(argstr="%s",), + custom_order=dict(argstr="--ocustom=%s", extensions=None,), + custom_timings=dict(argstr="--tcustom=%s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + global_shift=dict(argstr="--tglobal",), + in_file=dict(argstr="--in=%s", extensions=None, mandatory=True, position=0,), + index_dir=dict(argstr="--down",), + interleaved=dict(argstr="--odd",), out_file=dict( - argstr='--out=%s', - extensions=None, - genfile=True, - hash_files=False, + argstr="--out=%s", extensions=None, genfile=True, hash_files=False, ), output_type=dict(), - slice_direction=dict(argstr='--direction=%d', ), - time_repetition=dict(argstr='--repeat=%f', ), + slice_direction=dict(argstr="--direction=%d",), + time_repetition=dict(argstr="--repeat=%f",), ) inputs = SliceTimer.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SliceTimer_outputs(): - output_map = dict(slice_time_corrected_file=dict(extensions=None, ), ) + output_map = dict(slice_time_corrected_file=dict(extensions=None,),) outputs = SliceTimer.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_Slicer.py b/nipype/interfaces/fsl/tests/test_auto_Slicer.py index d843870561..205aab061b 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Slicer.py +++ b/nipype/interfaces/fsl/tests/test_auto_Slicer.py @@ -5,103 +5,56 @@ def test_Slicer_inputs(): input_map = dict( all_axial=dict( - argstr='-A', + argstr="-A", position=10, - requires=['image_width'], - xor=('single_slice', 'middle_slices', 'all_axial', 'sample_axial'), - ), - args=dict(argstr='%s', ), - colour_map=dict( - argstr='-l %s', - extensions=None, - position=4, - ), - dither_edges=dict( - argstr='-t', - position=7, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - image_edges=dict( - argstr='%s', - extensions=None, - position=2, - ), - image_width=dict( - argstr='%d', - position=-2, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), - intensity_range=dict( - argstr='-i %.3f %.3f', - position=5, - ), - label_slices=dict( - argstr='-L', - position=3, - usedefault=True, - ), + requires=["image_width"], + xor=("single_slice", "middle_slices", "all_axial", "sample_axial"), + ), + args=dict(argstr="%s",), + colour_map=dict(argstr="-l %s", extensions=None, position=4,), + dither_edges=dict(argstr="-t", position=7,), + environ=dict(nohash=True, usedefault=True,), + image_edges=dict(argstr="%s", extensions=None, position=2,), + image_width=dict(argstr="%d", position=-2,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=1,), + intensity_range=dict(argstr="-i %.3f %.3f", position=5,), + label_slices=dict(argstr="-L", position=3, usedefault=True,), middle_slices=dict( - argstr='-a', + argstr="-a", position=10, - xor=('single_slice', 'middle_slices', 'all_axial', 'sample_axial'), - ), - nearest_neighbour=dict( - argstr='-n', - position=8, + xor=("single_slice", "middle_slices", "all_axial", "sample_axial"), ), + nearest_neighbour=dict(argstr="-n", position=8,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-1, ), output_type=dict(), sample_axial=dict( - argstr='-S %d', + argstr="-S %d", position=10, - requires=['image_width'], - xor=('single_slice', 'middle_slices', 'all_axial', 'sample_axial'), - ), - scaling=dict( - argstr='-s %f', - position=0, - ), - show_orientation=dict( - argstr='%s', - position=9, - usedefault=True, + requires=["image_width"], + xor=("single_slice", "middle_slices", "all_axial", "sample_axial"), ), + scaling=dict(argstr="-s %f", position=0,), + show_orientation=dict(argstr="%s", position=9, usedefault=True,), single_slice=dict( - argstr='-%s', + argstr="-%s", position=10, - requires=['slice_number'], - xor=('single_slice', 'middle_slices', 'all_axial', 'sample_axial'), - ), - slice_number=dict( - argstr='-%d', - position=11, - ), - threshold_edges=dict( - argstr='-e %.3f', - position=6, + requires=["slice_number"], + xor=("single_slice", "middle_slices", "all_axial", "sample_axial"), ), + slice_number=dict(argstr="-%d", position=11,), + threshold_edges=dict(argstr="-e %.3f", position=6,), ) inputs = Slicer.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Slicer_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Slicer.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_Smooth.py b/nipype/interfaces/fsl/tests/test_auto_Smooth.py index bc710d76a5..733f0e83f1 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Smooth.py +++ b/nipype/interfaces/fsl/tests/test_auto_Smooth.py @@ -4,36 +4,28 @@ def test_Smooth_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), fwhm=dict( - argstr='-kernel gauss %.03f -fmean', + argstr="-kernel gauss %.03f -fmean", mandatory=True, position=1, - xor=['sigma'], - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=0, + xor=["sigma"], ), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=0,), output_type=dict(), sigma=dict( - argstr='-kernel gauss %.03f -fmean', + argstr="-kernel gauss %.03f -fmean", mandatory=True, position=1, - xor=['fwhm'], + xor=["fwhm"], ), smoothed_file=dict( - argstr='%s', + argstr="%s", extensions=None, hash_files=False, - name_source=['in_file'], - name_template='%s_smooth', + name_source=["in_file"], + name_template="%s_smooth", position=2, ), ) @@ -42,8 +34,10 @@ def test_Smooth_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Smooth_outputs(): - output_map = dict(smoothed_file=dict(extensions=None, ), ) + output_map = dict(smoothed_file=dict(extensions=None,),) outputs = Smooth.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_SmoothEstimate.py b/nipype/interfaces/fsl/tests/test_auto_SmoothEstimate.py index 811bc1e4b9..b6fac84352 100644 --- a/nipype/interfaces/fsl/tests/test_auto_SmoothEstimate.py +++ b/nipype/interfaces/fsl/tests/test_auto_SmoothEstimate.py @@ -4,44 +4,23 @@ def test_SmoothEstimate_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dof=dict( - argstr='--dof=%d', - mandatory=True, - xor=['zstat_file'], - ), - environ=dict( - nohash=True, - usedefault=True, - ), - mask_file=dict( - argstr='--mask=%s', - extensions=None, - mandatory=True, - ), + args=dict(argstr="%s",), + dof=dict(argstr="--dof=%d", mandatory=True, xor=["zstat_file"],), + environ=dict(nohash=True, usedefault=True,), + mask_file=dict(argstr="--mask=%s", extensions=None, mandatory=True,), output_type=dict(), - residual_fit_file=dict( - argstr='--res=%s', - extensions=None, - requires=['dof'], - ), - zstat_file=dict( - argstr='--zstat=%s', - extensions=None, - xor=['dof'], - ), + residual_fit_file=dict(argstr="--res=%s", extensions=None, requires=["dof"],), + zstat_file=dict(argstr="--zstat=%s", extensions=None, xor=["dof"],), ) inputs = SmoothEstimate.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SmoothEstimate_outputs(): - output_map = dict( - dlh=dict(), - resels=dict(), - volume=dict(), - ) + output_map = dict(dlh=dict(), resels=dict(), volume=dict(),) outputs = SmoothEstimate.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_SpatialFilter.py b/nipype/interfaces/fsl/tests/test_auto_SpatialFilter.py index 2f267eb6bf..ec2b59ba6d 100644 --- a/nipype/interfaces/fsl/tests/test_auto_SpatialFilter.py +++ b/nipype/interfaces/fsl/tests/test_auto_SpatialFilter.py @@ -4,56 +4,21 @@ def test_SpatialFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - internal_datatype=dict( - argstr='-dt %s', - position=1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + internal_datatype=dict(argstr="-dt %s", position=1,), kernel_file=dict( - argstr='%s', - extensions=None, - position=5, - xor=['kernel_size'], - ), - kernel_shape=dict( - argstr='-kernel %s', - position=4, - ), - kernel_size=dict( - argstr='%.4f', - position=5, - xor=['kernel_file'], - ), - nan2zeros=dict( - argstr='-nan', - position=3, - ), - operation=dict( - argstr='-f%s', - mandatory=True, - position=6, + argstr="%s", extensions=None, position=5, xor=["kernel_size"], ), + kernel_shape=dict(argstr="-kernel %s", position=4,), + kernel_size=dict(argstr="%.4f", position=5, xor=["kernel_file"],), + nan2zeros=dict(argstr="-nan", position=3,), + operation=dict(argstr="-f%s", mandatory=True, position=6,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-2, - ), - output_datatype=dict( - argstr='-odt %s', - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), + output_datatype=dict(argstr="-odt %s", position=-1,), output_type=dict(), ) inputs = SpatialFilter.input_spec() @@ -61,8 +26,10 @@ def test_SpatialFilter_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SpatialFilter_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = SpatialFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_Split.py b/nipype/interfaces/fsl/tests/test_auto_Split.py index 2c922ad583..26b814b9c0 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Split.py +++ b/nipype/interfaces/fsl/tests/test_auto_Split.py @@ -4,26 +4,11 @@ def test_Split_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict( - argstr='-%s', - mandatory=True, - position=2, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=0, - ), - out_base_name=dict( - argstr='%s', - position=1, - ), + args=dict(argstr="%s",), + dimension=dict(argstr="-%s", mandatory=True, position=2,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=0,), + out_base_name=dict(argstr="%s", position=1,), output_type=dict(), ) inputs = Split.input_spec() @@ -31,8 +16,10 @@ def test_Split_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Split_outputs(): - output_map = dict(out_files=dict(), ) + output_map = dict(out_files=dict(),) outputs = Split.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_StdImage.py b/nipype/interfaces/fsl/tests/test_auto_StdImage.py index a8b7e764b5..073ebfa7ee 100644 --- a/nipype/interfaces/fsl/tests/test_auto_StdImage.py +++ b/nipype/interfaces/fsl/tests/test_auto_StdImage.py @@ -4,41 +4,16 @@ def test_StdImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict( - argstr='-%sstd', - position=4, - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - internal_datatype=dict( - argstr='-dt %s', - position=1, - ), - nan2zeros=dict( - argstr='-nan', - position=3, - ), + args=dict(argstr="%s",), + dimension=dict(argstr="-%sstd", position=4, usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + internal_datatype=dict(argstr="-dt %s", position=1,), + nan2zeros=dict(argstr="-nan", position=3,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-2, - ), - output_datatype=dict( - argstr='-odt %s', - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), + output_datatype=dict(argstr="-odt %s", position=-1,), output_type=dict(), ) inputs = StdImage.input_spec() @@ -46,8 +21,10 @@ def test_StdImage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_StdImage_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = StdImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_SwapDimensions.py b/nipype/interfaces/fsl/tests/test_auto_SwapDimensions.py index 241cfbcf27..ac56fad17e 100644 --- a/nipype/interfaces/fsl/tests/test_auto_SwapDimensions.py +++ b/nipype/interfaces/fsl/tests/test_auto_SwapDimensions.py @@ -4,27 +4,11 @@ def test_SwapDimensions_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position='1', - ), - new_dims=dict( - argstr='%s %s %s', - mandatory=True, - ), - out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position="1",), + new_dims=dict(argstr="%s %s %s", mandatory=True,), + out_file=dict(argstr="%s", extensions=None, genfile=True, hash_files=False,), output_type=dict(), ) inputs = SwapDimensions.input_spec() @@ -32,8 +16,10 @@ def test_SwapDimensions_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SwapDimensions_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = SwapDimensions.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_TOPUP.py b/nipype/interfaces/fsl/tests/test_auto_TOPUP.py index f98dbef518..f34023f799 100644 --- a/nipype/interfaces/fsl/tests/test_auto_TOPUP.py +++ b/nipype/interfaces/fsl/tests/test_auto_TOPUP.py @@ -4,112 +4,90 @@ def test_TOPUP_inputs(): input_map = dict( - args=dict(argstr='%s', ), - config=dict( - argstr='--config=%s', - usedefault=True, - ), + args=dict(argstr="%s",), + config=dict(argstr="--config=%s", usedefault=True,), encoding_direction=dict( - argstr='--datain=%s', + argstr="--datain=%s", mandatory=True, - requires=['readout_times'], - xor=['encoding_file'], + requires=["readout_times"], + xor=["encoding_file"], ), encoding_file=dict( - argstr='--datain=%s', + argstr="--datain=%s", extensions=None, mandatory=True, - xor=['encoding_direction'], - ), - environ=dict( - nohash=True, - usedefault=True, + xor=["encoding_direction"], ), - estmov=dict(argstr='--estmov=%d', ), - fwhm=dict(argstr='--fwhm=%f', ), - in_file=dict( - argstr='--imain=%s', - extensions=None, - mandatory=True, - ), - interp=dict(argstr='--interp=%s', ), - max_iter=dict(argstr='--miter=%d', ), - minmet=dict(argstr='--minmet=%d', ), - numprec=dict(argstr='--numprec=%s', ), + environ=dict(nohash=True, usedefault=True,), + estmov=dict(argstr="--estmov=%d",), + fwhm=dict(argstr="--fwhm=%f",), + in_file=dict(argstr="--imain=%s", extensions=None, mandatory=True,), + interp=dict(argstr="--interp=%s",), + max_iter=dict(argstr="--miter=%d",), + minmet=dict(argstr="--minmet=%d",), + numprec=dict(argstr="--numprec=%s",), out_base=dict( - argstr='--out=%s', + argstr="--out=%s", extensions=None, hash_files=False, - name_source=['in_file'], - name_template='%s_base', + name_source=["in_file"], + name_template="%s_base", ), out_corrected=dict( - argstr='--iout=%s', + argstr="--iout=%s", extensions=None, hash_files=False, - name_source=['in_file'], - name_template='%s_corrected', + name_source=["in_file"], + name_template="%s_corrected", ), out_field=dict( - argstr='--fout=%s', + argstr="--fout=%s", extensions=None, hash_files=False, - name_source=['in_file'], - name_template='%s_field', - ), - out_jac_prefix=dict( - argstr='--jacout=%s', - hash_files=False, - usedefault=True, + name_source=["in_file"], + name_template="%s_field", ), + out_jac_prefix=dict(argstr="--jacout=%s", hash_files=False, usedefault=True,), out_logfile=dict( - argstr='--logout=%s', + argstr="--logout=%s", extensions=None, hash_files=False, keep_extension=True, - name_source=['in_file'], - name_template='%s_topup.log', - ), - out_mat_prefix=dict( - argstr='--rbmout=%s', - hash_files=False, - usedefault=True, - ), - out_warp_prefix=dict( - argstr='--dfout=%s', - hash_files=False, - usedefault=True, + name_source=["in_file"], + name_template="%s_topup.log", ), + out_mat_prefix=dict(argstr="--rbmout=%s", hash_files=False, usedefault=True,), + out_warp_prefix=dict(argstr="--dfout=%s", hash_files=False, usedefault=True,), output_type=dict(), readout_times=dict( - mandatory=True, - requires=['encoding_direction'], - xor=['encoding_file'], + mandatory=True, requires=["encoding_direction"], xor=["encoding_file"], ), - reg_lambda=dict(argstr='--lambda=%0.f', ), - regmod=dict(argstr='--regmod=%s', ), - regrid=dict(argstr='--regrid=%d', ), - scale=dict(argstr='--scale=%d', ), - splineorder=dict(argstr='--splineorder=%d', ), - ssqlambda=dict(argstr='--ssqlambda=%d', ), - subsamp=dict(argstr='--subsamp=%d', ), - warp_res=dict(argstr='--warpres=%f', ), + reg_lambda=dict(argstr="--lambda=%0.f",), + regmod=dict(argstr="--regmod=%s",), + regrid=dict(argstr="--regrid=%d",), + scale=dict(argstr="--scale=%d",), + splineorder=dict(argstr="--splineorder=%d",), + ssqlambda=dict(argstr="--ssqlambda=%d",), + subsamp=dict(argstr="--subsamp=%d",), + warp_res=dict(argstr="--warpres=%f",), ) inputs = TOPUP.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TOPUP_outputs(): output_map = dict( - out_corrected=dict(extensions=None, ), - out_enc_file=dict(extensions=None, ), - out_field=dict(extensions=None, ), - out_fieldcoef=dict(extensions=None, ), + out_corrected=dict(extensions=None,), + out_enc_file=dict(extensions=None,), + out_field=dict(extensions=None,), + out_fieldcoef=dict(extensions=None,), out_jacs=dict(), - out_logfile=dict(extensions=None, ), + out_logfile=dict(extensions=None,), out_mats=dict(), - out_movpar=dict(extensions=None, ), + out_movpar=dict(extensions=None,), out_warps=dict(), ) outputs = TOPUP.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_TemporalFilter.py b/nipype/interfaces/fsl/tests/test_auto_TemporalFilter.py index 9f4ecfbd1a..a764c6c1a6 100644 --- a/nipype/interfaces/fsl/tests/test_auto_TemporalFilter.py +++ b/nipype/interfaces/fsl/tests/test_auto_TemporalFilter.py @@ -4,46 +4,17 @@ def test_TemporalFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - highpass_sigma=dict( - argstr='-bptf %.6f', - position=4, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - internal_datatype=dict( - argstr='-dt %s', - position=1, - ), - lowpass_sigma=dict( - argstr='%.6f', - position=5, - usedefault=True, - ), - nan2zeros=dict( - argstr='-nan', - position=3, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + highpass_sigma=dict(argstr="-bptf %.6f", position=4, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + internal_datatype=dict(argstr="-dt %s", position=1,), + lowpass_sigma=dict(argstr="%.6f", position=5, usedefault=True,), + nan2zeros=dict(argstr="-nan", position=3,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-2, - ), - output_datatype=dict( - argstr='-odt %s', - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), + output_datatype=dict(argstr="-odt %s", position=-1,), output_type=dict(), ) inputs = TemporalFilter.input_spec() @@ -51,8 +22,10 @@ def test_TemporalFilter_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TemporalFilter_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = TemporalFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_Threshold.py b/nipype/interfaces/fsl/tests/test_auto_Threshold.py index 0b1eaeaa17..4156b8f82b 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Threshold.py +++ b/nipype/interfaces/fsl/tests/test_auto_Threshold.py @@ -4,44 +4,19 @@ def test_Threshold_inputs(): input_map = dict( - args=dict(argstr='%s', ), - direction=dict(usedefault=True, ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - internal_datatype=dict( - argstr='-dt %s', - position=1, - ), - nan2zeros=dict( - argstr='-nan', - position=3, - ), + args=dict(argstr="%s",), + direction=dict(usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + internal_datatype=dict(argstr="-dt %s", position=1,), + nan2zeros=dict(argstr="-nan", position=3,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-2, - ), - output_datatype=dict( - argstr='-odt %s', - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), + output_datatype=dict(argstr="-odt %s", position=-1,), output_type=dict(), - thresh=dict( - argstr='%s', - mandatory=True, - position=4, - ), - use_nonzero_voxels=dict(requires=['use_robust_range'], ), + thresh=dict(argstr="%s", mandatory=True, position=4,), + use_nonzero_voxels=dict(requires=["use_robust_range"],), use_robust_range=dict(), ) inputs = Threshold.input_spec() @@ -49,8 +24,10 @@ def test_Threshold_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Threshold_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Threshold.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_TractSkeleton.py b/nipype/interfaces/fsl/tests/test_auto_TractSkeleton.py index e630e10a40..f5c6c38f35 100644 --- a/nipype/interfaces/fsl/tests/test_auto_TractSkeleton.py +++ b/nipype/interfaces/fsl/tests/test_auto_TractSkeleton.py @@ -4,52 +4,34 @@ def test_TractSkeleton_inputs(): input_map = dict( - alt_data_file=dict( - argstr='-a %s', - extensions=None, - ), - alt_skeleton=dict( - argstr='-s %s', - extensions=None, - ), - args=dict(argstr='%s', ), - data_file=dict(extensions=None, ), - distance_map=dict(extensions=None, ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - ), + alt_data_file=dict(argstr="-a %s", extensions=None,), + alt_skeleton=dict(argstr="-s %s", extensions=None,), + args=dict(argstr="%s",), + data_file=dict(extensions=None,), + distance_map=dict(extensions=None,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-i %s", extensions=None, mandatory=True,), output_type=dict(), project_data=dict( - argstr='-p %.3f %s %s %s %s', - requires=['threshold', 'distance_map', 'data_file'], + argstr="-p %.3f %s %s %s %s", + requires=["threshold", "distance_map", "data_file"], ), - projected_data=dict(extensions=None, ), - search_mask_file=dict( - extensions=None, - xor=['use_cingulum_mask'], - ), - skeleton_file=dict(argstr='-o %s', ), + projected_data=dict(extensions=None,), + search_mask_file=dict(extensions=None, xor=["use_cingulum_mask"],), + skeleton_file=dict(argstr="-o %s",), threshold=dict(), - use_cingulum_mask=dict( - usedefault=True, - xor=['search_mask_file'], - ), + use_cingulum_mask=dict(usedefault=True, xor=["search_mask_file"],), ) inputs = TractSkeleton.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TractSkeleton_outputs(): output_map = dict( - projected_data=dict(extensions=None, ), - skeleton_file=dict(extensions=None, ), + projected_data=dict(extensions=None,), skeleton_file=dict(extensions=None,), ) outputs = TractSkeleton.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_Training.py b/nipype/interfaces/fsl/tests/test_auto_Training.py index 51617d97ed..5626f3e483 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Training.py +++ b/nipype/interfaces/fsl/tests/test_auto_Training.py @@ -4,32 +4,21 @@ def test_Training_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - loo=dict( - argstr='-l', - position=2, - ), - mel_icas=dict( - argstr='%s', - copyfile=False, - position=-1, - ), - trained_wts_filestem=dict( - argstr='%s', - position=1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + loo=dict(argstr="-l", position=2,), + mel_icas=dict(argstr="%s", copyfile=False, position=-1,), + trained_wts_filestem=dict(argstr="%s", position=1,), ) inputs = Training.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Training_outputs(): - output_map = dict(trained_wts_file=dict(extensions=None, ), ) + output_map = dict(trained_wts_file=dict(extensions=None,),) outputs = Training.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_TrainingSetCreator.py b/nipype/interfaces/fsl/tests/test_auto_TrainingSetCreator.py index edde69fc32..df4c1c2257 100644 --- a/nipype/interfaces/fsl/tests/test_auto_TrainingSetCreator.py +++ b/nipype/interfaces/fsl/tests/test_auto_TrainingSetCreator.py @@ -3,22 +3,16 @@ def test_TrainingSetCreator_inputs(): - input_map = dict(mel_icas_in=dict( - argstr='%s', - copyfile=False, - position=-1, - ), ) + input_map = dict(mel_icas_in=dict(argstr="%s", copyfile=False, position=-1,),) inputs = TrainingSetCreator.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TrainingSetCreator_outputs(): - output_map = dict(mel_icas_out=dict( - argstr='%s', - copyfile=False, - position=-1, - ), ) + output_map = dict(mel_icas_out=dict(argstr="%s", copyfile=False, position=-1,),) outputs = TrainingSetCreator.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_UnaryMaths.py b/nipype/interfaces/fsl/tests/test_auto_UnaryMaths.py index 20fde2afca..67662a8c7f 100644 --- a/nipype/interfaces/fsl/tests/test_auto_UnaryMaths.py +++ b/nipype/interfaces/fsl/tests/test_auto_UnaryMaths.py @@ -4,41 +4,16 @@ def test_UnaryMaths_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - internal_datatype=dict( - argstr='-dt %s', - position=1, - ), - nan2zeros=dict( - argstr='-nan', - position=3, - ), - operation=dict( - argstr='-%s', - mandatory=True, - position=4, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + internal_datatype=dict(argstr="-dt %s", position=1,), + nan2zeros=dict(argstr="-nan", position=3,), + operation=dict(argstr="-%s", mandatory=True, position=4,), out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - hash_files=False, - position=-2, - ), - output_datatype=dict( - argstr='-odt %s', - position=-1, + argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), + output_datatype=dict(argstr="-odt %s", position=-1,), output_type=dict(), ) inputs = UnaryMaths.input_spec() @@ -46,8 +21,10 @@ def test_UnaryMaths_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_UnaryMaths_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = UnaryMaths.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_VecReg.py b/nipype/interfaces/fsl/tests/test_auto_VecReg.py index 41c74d2eb2..4f802628cd 100644 --- a/nipype/interfaces/fsl/tests/test_auto_VecReg.py +++ b/nipype/interfaces/fsl/tests/test_auto_VecReg.py @@ -4,61 +4,29 @@ def test_VecReg_inputs(): input_map = dict( - affine_mat=dict( - argstr='-t %s', - extensions=None, - ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - ), - interpolation=dict(argstr='--interp=%s', ), - mask=dict( - argstr='-m %s', - extensions=None, - ), - out_file=dict( - argstr='-o %s', - extensions=None, - genfile=True, - hash_files=False, - ), + affine_mat=dict(argstr="-t %s", extensions=None,), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-i %s", extensions=None, mandatory=True,), + interpolation=dict(argstr="--interp=%s",), + mask=dict(argstr="-m %s", extensions=None,), + out_file=dict(argstr="-o %s", extensions=None, genfile=True, hash_files=False,), output_type=dict(), - ref_mask=dict( - argstr='--refmask=%s', - extensions=None, - ), - ref_vol=dict( - argstr='-r %s', - extensions=None, - mandatory=True, - ), - rotation_mat=dict( - argstr='--rotmat=%s', - extensions=None, - ), - rotation_warp=dict( - argstr='--rotwarp=%s', - extensions=None, - ), - warp_field=dict( - argstr='-w %s', - extensions=None, - ), + ref_mask=dict(argstr="--refmask=%s", extensions=None,), + ref_vol=dict(argstr="-r %s", extensions=None, mandatory=True,), + rotation_mat=dict(argstr="--rotmat=%s", extensions=None,), + rotation_warp=dict(argstr="--rotwarp=%s", extensions=None,), + warp_field=dict(argstr="-w %s", extensions=None,), ) inputs = VecReg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_VecReg_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = VecReg.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_WarpPoints.py b/nipype/interfaces/fsl/tests/test_auto_WarpPoints.py index 541cff38aa..70980aa1be 100644 --- a/nipype/interfaces/fsl/tests/test_auto_WarpPoints.py +++ b/nipype/interfaces/fsl/tests/test_auto_WarpPoints.py @@ -4,59 +4,31 @@ def test_WarpPoints_inputs(): input_map = dict( - args=dict(argstr='%s', ), - coord_mm=dict( - argstr='-mm', - xor=['coord_vox'], - ), - coord_vox=dict( - argstr='-vox', - xor=['coord_mm'], - ), - dest_file=dict( - argstr='-dest %s', - extensions=None, - mandatory=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_coords=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - ), + args=dict(argstr="%s",), + coord_mm=dict(argstr="-mm", xor=["coord_vox"],), + coord_vox=dict(argstr="-vox", xor=["coord_mm"],), + dest_file=dict(argstr="-dest %s", extensions=None, mandatory=True,), + environ=dict(nohash=True, usedefault=True,), + in_coords=dict(argstr="%s", extensions=None, mandatory=True, position=-1,), out_file=dict( extensions=None, - name_source='in_coords', - name_template='%s_warped', - output_name='out_file', - ), - src_file=dict( - argstr='-src %s', - extensions=None, - mandatory=True, - ), - warp_file=dict( - argstr='-warp %s', - extensions=None, - xor=['xfm_file'], - ), - xfm_file=dict( - argstr='-xfm %s', - extensions=None, - xor=['warp_file'], + name_source="in_coords", + name_template="%s_warped", + output_name="out_file", ), + src_file=dict(argstr="-src %s", extensions=None, mandatory=True,), + warp_file=dict(argstr="-warp %s", extensions=None, xor=["xfm_file"],), + xfm_file=dict(argstr="-xfm %s", extensions=None, xor=["warp_file"],), ) inputs = WarpPoints.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_WarpPoints_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = WarpPoints.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_WarpPointsFromStd.py b/nipype/interfaces/fsl/tests/test_auto_WarpPointsFromStd.py index 369794e3c0..a6fa949890 100644 --- a/nipype/interfaces/fsl/tests/test_auto_WarpPointsFromStd.py +++ b/nipype/interfaces/fsl/tests/test_auto_WarpPointsFromStd.py @@ -4,53 +4,25 @@ def test_WarpPointsFromStd_inputs(): input_map = dict( - args=dict(argstr='%s', ), - coord_mm=dict( - argstr='-mm', - xor=['coord_vox'], - ), - coord_vox=dict( - argstr='-vox', - xor=['coord_mm'], - ), - environ=dict( - nohash=True, - usedefault=True, - ), - img_file=dict( - argstr='-img %s', - extensions=None, - mandatory=True, - ), - in_coords=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - std_file=dict( - argstr='-std %s', - extensions=None, - mandatory=True, - ), - warp_file=dict( - argstr='-warp %s', - extensions=None, - xor=['xfm_file'], - ), - xfm_file=dict( - argstr='-xfm %s', - extensions=None, - xor=['warp_file'], - ), + args=dict(argstr="%s",), + coord_mm=dict(argstr="-mm", xor=["coord_vox"],), + coord_vox=dict(argstr="-vox", xor=["coord_mm"],), + environ=dict(nohash=True, usedefault=True,), + img_file=dict(argstr="-img %s", extensions=None, mandatory=True,), + in_coords=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + std_file=dict(argstr="-std %s", extensions=None, mandatory=True,), + warp_file=dict(argstr="-warp %s", extensions=None, xor=["xfm_file"],), + xfm_file=dict(argstr="-xfm %s", extensions=None, xor=["warp_file"],), ) inputs = WarpPointsFromStd.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_WarpPointsFromStd_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = WarpPointsFromStd.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_WarpPointsToStd.py b/nipype/interfaces/fsl/tests/test_auto_WarpPointsToStd.py index 9c62aa6ec7..9debbe6a74 100644 --- a/nipype/interfaces/fsl/tests/test_auto_WarpPointsToStd.py +++ b/nipype/interfaces/fsl/tests/test_auto_WarpPointsToStd.py @@ -4,63 +4,32 @@ def test_WarpPointsToStd_inputs(): input_map = dict( - args=dict(argstr='%s', ), - coord_mm=dict( - argstr='-mm', - xor=['coord_vox'], - ), - coord_vox=dict( - argstr='-vox', - xor=['coord_mm'], - ), - environ=dict( - nohash=True, - usedefault=True, - ), - img_file=dict( - argstr='-img %s', - extensions=None, - mandatory=True, - ), - in_coords=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - ), + args=dict(argstr="%s",), + coord_mm=dict(argstr="-mm", xor=["coord_vox"],), + coord_vox=dict(argstr="-vox", xor=["coord_mm"],), + environ=dict(nohash=True, usedefault=True,), + img_file=dict(argstr="-img %s", extensions=None, mandatory=True,), + in_coords=dict(argstr="%s", extensions=None, mandatory=True, position=-1,), out_file=dict( extensions=None, - name_source='in_coords', - name_template='%s_warped', - output_name='out_file', - ), - premat_file=dict( - argstr='-premat %s', - extensions=None, - ), - std_file=dict( - argstr='-std %s', - extensions=None, - mandatory=True, - ), - warp_file=dict( - argstr='-warp %s', - extensions=None, - xor=['xfm_file'], - ), - xfm_file=dict( - argstr='-xfm %s', - extensions=None, - xor=['warp_file'], - ), + name_source="in_coords", + name_template="%s_warped", + output_name="out_file", + ), + premat_file=dict(argstr="-premat %s", extensions=None,), + std_file=dict(argstr="-std %s", extensions=None, mandatory=True,), + warp_file=dict(argstr="-warp %s", extensions=None, xor=["xfm_file"],), + xfm_file=dict(argstr="-xfm %s", extensions=None, xor=["warp_file"],), ) inputs = WarpPointsToStd.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_WarpPointsToStd_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = WarpPointsToStd.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_auto_WarpUtils.py b/nipype/interfaces/fsl/tests/test_auto_WarpUtils.py index ab91cf8d41..cdb0e86e64 100644 --- a/nipype/interfaces/fsl/tests/test_auto_WarpUtils.py +++ b/nipype/interfaces/fsl/tests/test_auto_WarpUtils.py @@ -4,51 +4,35 @@ def test_WarpUtils_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='--in=%s', - extensions=None, - mandatory=True, - ), - knot_space=dict(argstr='--knotspace=%d,%d,%d', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="--in=%s", extensions=None, mandatory=True,), + knot_space=dict(argstr="--knotspace=%d,%d,%d",), out_file=dict( - argstr='--out=%s', + argstr="--out=%s", extensions=None, - name_source=['in_file'], - output_name='out_file', + name_source=["in_file"], + output_name="out_file", position=-1, ), - out_format=dict(argstr='--outformat=%s', ), - out_jacobian=dict( - argstr='--jac=%s', - extensions=None, - ), + out_format=dict(argstr="--outformat=%s",), + out_jacobian=dict(argstr="--jac=%s", extensions=None,), output_type=dict(), - reference=dict( - argstr='--ref=%s', - extensions=None, - mandatory=True, - ), - warp_resolution=dict(argstr='--warpres=%0.4f,%0.4f,%0.4f', ), - with_affine=dict(argstr='--withaff', ), - write_jacobian=dict( - mandatory=True, - usedefault=True, - ), + reference=dict(argstr="--ref=%s", extensions=None, mandatory=True,), + warp_resolution=dict(argstr="--warpres=%0.4f,%0.4f,%0.4f",), + with_affine=dict(argstr="--withaff",), + write_jacobian=dict(mandatory=True, usedefault=True,), ) inputs = WarpUtils.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_WarpUtils_outputs(): output_map = dict( - out_file=dict(extensions=None, ), - out_jacobian=dict(extensions=None, ), + out_file=dict(extensions=None,), out_jacobian=dict(extensions=None,), ) outputs = WarpUtils.output_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_XFibres5.py b/nipype/interfaces/fsl/tests/test_auto_XFibres5.py index 1a8bb61389..9a4e973569 100644 --- a/nipype/interfaces/fsl/tests/test_auto_XFibres5.py +++ b/nipype/interfaces/fsl/tests/test_auto_XFibres5.py @@ -4,115 +4,53 @@ def test_XFibres5_inputs(): input_map = dict( - all_ard=dict( - argstr='--allard', - xor=('no_ard', 'all_ard'), - ), - args=dict(argstr='%s', ), - burn_in=dict( - argstr='--burnin=%d', - usedefault=True, - ), - burn_in_no_ard=dict( - argstr='--burnin_noard=%d', - usedefault=True, - ), - bvals=dict( - argstr='--bvals=%s', - extensions=None, - mandatory=True, - ), - bvecs=dict( - argstr='--bvecs=%s', - extensions=None, - mandatory=True, - ), + all_ard=dict(argstr="--allard", xor=("no_ard", "all_ard"),), + args=dict(argstr="%s",), + burn_in=dict(argstr="--burnin=%d", usedefault=True,), + burn_in_no_ard=dict(argstr="--burnin_noard=%d", usedefault=True,), + bvals=dict(argstr="--bvals=%s", extensions=None, mandatory=True,), + bvecs=dict(argstr="--bvecs=%s", extensions=None, mandatory=True,), cnlinear=dict( - argstr='--cnonlinear', - xor=('no_spat', 'non_linear', 'cnlinear'), - ), - dwi=dict( - argstr='--data=%s', - extensions=None, - mandatory=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - f0_ard=dict( - argstr='--f0 --ardf0', - xor=['f0_noard', 'f0_ard', 'all_ard'], - ), - f0_noard=dict( - argstr='--f0', - xor=['f0_noard', 'f0_ard'], - ), - force_dir=dict( - argstr='--forcedir', - usedefault=True, - ), - fudge=dict(argstr='--fudge=%d', ), - gradnonlin=dict( - argstr='--gradnonlin=%s', - extensions=None, - ), - logdir=dict( - argstr='--logdir=%s', - usedefault=True, - ), - mask=dict( - argstr='--mask=%s', - extensions=None, - mandatory=True, - ), - model=dict(argstr='--model=%d', ), - n_fibres=dict( - argstr='--nfibres=%d', - mandatory=True, - usedefault=True, - ), - n_jumps=dict( - argstr='--njumps=%d', - usedefault=True, - ), - no_ard=dict( - argstr='--noard', - xor=('no_ard', 'all_ard'), - ), - no_spat=dict( - argstr='--nospat', - xor=('no_spat', 'non_linear', 'cnlinear'), - ), + argstr="--cnonlinear", xor=("no_spat", "non_linear", "cnlinear"), + ), + dwi=dict(argstr="--data=%s", extensions=None, mandatory=True,), + environ=dict(nohash=True, usedefault=True,), + f0_ard=dict(argstr="--f0 --ardf0", xor=["f0_noard", "f0_ard", "all_ard"],), + f0_noard=dict(argstr="--f0", xor=["f0_noard", "f0_ard"],), + force_dir=dict(argstr="--forcedir", usedefault=True,), + fudge=dict(argstr="--fudge=%d",), + gradnonlin=dict(argstr="--gradnonlin=%s", extensions=None,), + logdir=dict(argstr="--logdir=%s", usedefault=True,), + mask=dict(argstr="--mask=%s", extensions=None, mandatory=True,), + model=dict(argstr="--model=%d",), + n_fibres=dict(argstr="--nfibres=%d", mandatory=True, usedefault=True,), + n_jumps=dict(argstr="--njumps=%d", usedefault=True,), + no_ard=dict(argstr="--noard", xor=("no_ard", "all_ard"),), + no_spat=dict(argstr="--nospat", xor=("no_spat", "non_linear", "cnlinear"),), non_linear=dict( - argstr='--nonlinear', - xor=('no_spat', 'non_linear', 'cnlinear'), + argstr="--nonlinear", xor=("no_spat", "non_linear", "cnlinear"), ), output_type=dict(), - rician=dict(argstr='--rician', ), - sample_every=dict( - argstr='--sampleevery=%d', - usedefault=True, - ), - seed=dict(argstr='--seed=%d', ), - update_proposal_every=dict( - argstr='--updateproposalevery=%d', - usedefault=True, - ), + rician=dict(argstr="--rician",), + sample_every=dict(argstr="--sampleevery=%d", usedefault=True,), + seed=dict(argstr="--seed=%d",), + update_proposal_every=dict(argstr="--updateproposalevery=%d", usedefault=True,), ) inputs = XFibres5.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_XFibres5_outputs(): output_map = dict( dyads=dict(), fsamples=dict(), - mean_S0samples=dict(extensions=None, ), - mean_dsamples=dict(extensions=None, ), + mean_S0samples=dict(extensions=None,), + mean_dsamples=dict(extensions=None,), mean_fsamples=dict(), - mean_tausamples=dict(extensions=None, ), + mean_tausamples=dict(extensions=None,), phsamples=dict(), thsamples=dict(), ) diff --git a/nipype/interfaces/fsl/tests/test_base.py b/nipype/interfaces/fsl/tests/test_base.py index 71022997b6..52f93b545f 100644 --- a/nipype/interfaces/fsl/tests/test_base.py +++ b/nipype/interfaces/fsl/tests/test_base.py @@ -13,8 +13,8 @@ @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_fslversion(): ver = fsl.Info.version() - ver = ver.split('.') - assert ver[0] in ['4', '5'] + ver = ver.split(".") + assert ver[0] in ["4", "5"] @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -30,14 +30,14 @@ def test_outputtype_to_ext(): assert res == ext with pytest.raises(KeyError): - fsl.Info.output_type_to_ext('JUNK') + fsl.Info.output_type_to_ext("JUNK") @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_FSLCommand(): # Most methods in FSLCommand are tested in the subclasses. Only # testing the one item that is not. - cmd = fsl.FSLCommand(command='ls') + cmd = fsl.FSLCommand(command="ls") res = cmd.run() assert type(res) == InterfaceResult @@ -45,13 +45,13 @@ def test_FSLCommand(): @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_FSLCommand2(): # Check default output type and environ - cmd = fsl.FSLCommand(command='junk') + cmd = fsl.FSLCommand(command="junk") assert cmd._output_type == fsl.Info.output_type() - assert cmd.inputs.environ['FSLOUTPUTTYPE'] == cmd._output_type + assert cmd.inputs.environ["FSLOUTPUTTYPE"] == cmd._output_type assert cmd._output_type in fsl.Info.ftypes cmd = fsl.FSLCommand - cmdinst = fsl.FSLCommand(command='junk') + cmdinst = fsl.FSLCommand(command="junk") for out_type in fsl.Info.ftypes: cmd.set_default_output_type(out_type) assert cmd._output_type == out_type @@ -64,38 +64,23 @@ def test_FSLCommand2(): @pytest.mark.parametrize( "args, desired_name", [ - ({}, { - "file": 'foo.nii.gz' - }), # just the filename + ({}, {"file": "foo.nii.gz"}), # just the filename # filename with suffix - ({ - "suffix": '_brain' - }, { - "file": 'foo_brain.nii.gz' - }), + ({"suffix": "_brain"}, {"file": "foo_brain.nii.gz"}), ( - { - "suffix": '_brain', - "cwd": '/data' - }, + {"suffix": "_brain", "cwd": "/data"}, # filename with suffix and working directory - { - "dir": '/data', - "file": 'foo_brain.nii.gz' - }), + {"dir": "/data", "file": "foo_brain.nii.gz"}, + ), # filename with suffix and no file extension change - ({ - "suffix": '_brain.mat', - "change_ext": False - }, { - "file": 'foo_brain.mat' - }) - ]) + ({"suffix": "_brain.mat", "change_ext": False}, {"file": "foo_brain.mat"}), + ], +) def test_gen_fname(args, desired_name): # Test _gen_fname method of FSLCommand - cmd = fsl.FSLCommand(command='junk', output_type='NIFTI_GZ') + cmd = fsl.FSLCommand(command="junk", output_type="NIFTI_GZ") pth = os.getcwd() - fname = cmd._gen_fname('foo.nii.gz', **args) + fname = cmd._gen_fname("foo.nii.gz", **args) if "dir" in desired_name.keys(): desired = os.path.join(desired_name["dir"], desired_name["file"]) else: diff --git a/nipype/interfaces/fsl/tests/test_dti.py b/nipype/interfaces/fsl/tests/test_dti.py index 19d807d217..8f5abfc662 100644 --- a/nipype/interfaces/fsl/tests/test_dti.py +++ b/nipype/interfaces/fsl/tests/test_dti.py @@ -17,7 +17,7 @@ def test_dtifit2(create_files_in_directory): filelist, outdir = create_files_in_directory dti = fsl.DTIFit() # make sure command gets called - assert dti.cmd == 'dtifit' + assert dti.cmd == "dtifit" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -25,211 +25,203 @@ def test_dtifit2(create_files_in_directory): # .inputs based parameters setting dti.inputs.dwi = filelist[0] - dti.inputs.base_name = 'foo.dti.nii' + dti.inputs.base_name = "foo.dti.nii" dti.inputs.mask = filelist[1] dti.inputs.bvecs = filelist[0] dti.inputs.bvals = filelist[1] dti.inputs.min_z = 10 dti.inputs.max_z = 50 - assert dti.cmdline == \ - 'dtifit -k %s -o foo.dti.nii -m %s -r %s -b %s -Z 50 -z 10' % (filelist[0], - filelist[1], - filelist[0], - filelist[1]) + assert ( + dti.cmdline + == "dtifit -k %s -o foo.dti.nii -m %s -r %s -b %s -Z 50 -z 10" + % (filelist[0], filelist[1], filelist[0], filelist[1]) + ) -@pytest.mark.xfail( - reason="These tests are skipped until we clean up some of this code") +@pytest.mark.xfail(reason="These tests are skipped until we clean up some of this code") def test_randomise2(): rand = fsl.Randomise() # make sure command gets called - assert rand.cmd == 'randomise' + assert rand.cmd == "randomise" # test raising error with mandatory args absent with pytest.raises(ValueError): rand.run() # .inputs based parameters setting - rand.inputs.input_4D = 'infile.nii' - rand.inputs.output_rootname = 'outfile' - rand.inputs.design_matrix = 'design.mat' - rand.inputs.t_contrast = 'infile.con' + rand.inputs.input_4D = "infile.nii" + rand.inputs.output_rootname = "outfile" + rand.inputs.design_matrix = "design.mat" + rand.inputs.t_contrast = "infile.con" actualCmdline = sorted(rand.cmdline.split()) - cmd = 'randomise -i infile.nii -o outfile -d design.mat -t infile.con' + cmd = "randomise -i infile.nii -o outfile -d design.mat -t infile.con" desiredCmdline = sorted(cmd.split()) assert actualCmdline == desiredCmdline # .run based parameter setting rand2 = fsl.Randomise( - input_4D='infile2', - output_rootname='outfile2', - f_contrast='infile.f', + input_4D="infile2", + output_rootname="outfile2", + f_contrast="infile.f", one_sample_gmean=True, - int_seed=4) + int_seed=4, + ) actualCmdline = sorted(rand2.cmdline.split()) - cmd = 'randomise -i infile2 -o outfile2 -1 -f infile.f --seed=4' + cmd = "randomise -i infile2 -o outfile2 -1 -f infile.f --seed=4" desiredCmdline = sorted(cmd.split()) assert actualCmdline == desiredCmdline rand3 = fsl.Randomise() - results = rand3.run(input_4D='infile3', output_rootname='outfile3') - assert results.runtime.cmdline == \ - 'randomise -i infile3 -o outfile3' + results = rand3.run(input_4D="infile3", output_rootname="outfile3") + assert results.runtime.cmdline == "randomise -i infile3 -o outfile3" # test arguments for opt_map opt_map = { - 'demean_data': ('-D', True), - 'one_sample_gmean': ('-1', True), - 'mask_image': ('-m inp_mask', 'inp_mask'), - 'design_matrix': ('-d design.mat', 'design.mat'), - 't_contrast': ('-t input.con', 'input.con'), - 'f_contrast': ('-f input.fts', 'input.fts'), - 'xchange_block_labels': ('-e design.grp', 'design.grp'), - 'print_unique_perm': ('-q', True), - 'print_info_parallelMode': ('-Q', True), - 'num_permutations': ('-n 10', 10), - 'vox_pvalus': ('-x', True), - 'fstats_only': ('--fonly', True), - 'thresh_free_cluster': ('-T', True), - 'thresh_free_cluster_2Dopt': ('--T2', True), - 'cluster_thresholding': ('-c 0.20', 0.20), - 'cluster_mass_thresholding': ('-C 0.40', 0.40), - 'fcluster_thresholding': ('-F 0.10', 0.10), - 'fcluster_mass_thresholding': ('-S 0.30', 0.30), - 'variance_smoothing': ('-v 0.20', 0.20), - 'diagnostics_off': ('--quiet', True), - 'output_raw': ('-R', True), - 'output_perm_vect': ('-P', True), - 'int_seed': ('--seed=20', 20), - 'TFCE_height_param': ('--tfce_H=0.11', 0.11), - 'TFCE_extent_param': ('--tfce_E=0.50', 0.50), - 'TFCE_connectivity': ('--tfce_C=0.30', 0.30), - 'list_num_voxel_EVs_pos': ('--vxl=1,2,3,4', '1,2,3,4'), - 'list_img_voxel_EVs': ('--vxf=6,7,8,9,3', '6,7,8,9,3') + "demean_data": ("-D", True), + "one_sample_gmean": ("-1", True), + "mask_image": ("-m inp_mask", "inp_mask"), + "design_matrix": ("-d design.mat", "design.mat"), + "t_contrast": ("-t input.con", "input.con"), + "f_contrast": ("-f input.fts", "input.fts"), + "xchange_block_labels": ("-e design.grp", "design.grp"), + "print_unique_perm": ("-q", True), + "print_info_parallelMode": ("-Q", True), + "num_permutations": ("-n 10", 10), + "vox_pvalus": ("-x", True), + "fstats_only": ("--fonly", True), + "thresh_free_cluster": ("-T", True), + "thresh_free_cluster_2Dopt": ("--T2", True), + "cluster_thresholding": ("-c 0.20", 0.20), + "cluster_mass_thresholding": ("-C 0.40", 0.40), + "fcluster_thresholding": ("-F 0.10", 0.10), + "fcluster_mass_thresholding": ("-S 0.30", 0.30), + "variance_smoothing": ("-v 0.20", 0.20), + "diagnostics_off": ("--quiet", True), + "output_raw": ("-R", True), + "output_perm_vect": ("-P", True), + "int_seed": ("--seed=20", 20), + "TFCE_height_param": ("--tfce_H=0.11", 0.11), + "TFCE_extent_param": ("--tfce_E=0.50", 0.50), + "TFCE_connectivity": ("--tfce_C=0.30", 0.30), + "list_num_voxel_EVs_pos": ("--vxl=1,2,3,4", "1,2,3,4"), + "list_img_voxel_EVs": ("--vxf=6,7,8,9,3", "6,7,8,9,3"), } for name, settings in list(opt_map.items()): rand4 = fsl.Randomise( - input_4D='infile', output_rootname='root', **{ - name: settings[1] - }) - assert rand4.cmdline == rand4.cmd + ' -i infile -o root ' + settings[0] + input_4D="infile", output_rootname="root", **{name: settings[1]} + ) + assert rand4.cmdline == rand4.cmd + " -i infile -o root " + settings[0] -@pytest.mark.xfail( - reason="These tests are skipped until we clean up some of this code") +@pytest.mark.xfail(reason="These tests are skipped until we clean up some of this code") def test_Randomise_parallel(): rand = fsl.Randomise_parallel() # make sure command gets called - assert rand.cmd == 'randomise_parallel' + assert rand.cmd == "randomise_parallel" # test raising error with mandatory args absent with pytest.raises(ValueError): rand.run() # .inputs based parameters setting - rand.inputs.input_4D = 'infile.nii' - rand.inputs.output_rootname = 'outfile' - rand.inputs.design_matrix = 'design.mat' - rand.inputs.t_contrast = 'infile.con' + rand.inputs.input_4D = "infile.nii" + rand.inputs.output_rootname = "outfile" + rand.inputs.design_matrix = "design.mat" + rand.inputs.t_contrast = "infile.con" actualCmdline = sorted(rand.cmdline.split()) - cmd = ('randomise_parallel -i infile.nii -o outfile -d design.mat -t ' - 'infile.con') + cmd = "randomise_parallel -i infile.nii -o outfile -d design.mat -t " "infile.con" desiredCmdline = sorted(cmd.split()) assert actualCmdline == desiredCmdline # .run based parameter setting rand2 = fsl.Randomise_parallel( - input_4D='infile2', - output_rootname='outfile2', - f_contrast='infile.f', + input_4D="infile2", + output_rootname="outfile2", + f_contrast="infile.f", one_sample_gmean=True, - int_seed=4) + int_seed=4, + ) actualCmdline = sorted(rand2.cmdline.split()) - cmd = 'randomise_parallel -i infile2 -o outfile2 -1 -f infile.f --seed=4' + cmd = "randomise_parallel -i infile2 -o outfile2 -1 -f infile.f --seed=4" desiredCmdline = sorted(cmd.split()) assert actualCmdline == desiredCmdline rand3 = fsl.Randomise_parallel() - results = rand3.run(input_4D='infile3', output_rootname='outfile3') - assert results.runtime.cmdline == \ - 'randomise_parallel -i infile3 -o outfile3' + results = rand3.run(input_4D="infile3", output_rootname="outfile3") + assert results.runtime.cmdline == "randomise_parallel -i infile3 -o outfile3" # test arguments for opt_map opt_map = { - 'demean_data': ('-D', True), - 'one_sample_gmean': ('-1', True), - 'mask_image': ('-m inp_mask', 'inp_mask'), - 'design_matrix': ('-d design.mat', 'design.mat'), - 't_contrast': ('-t input.con', 'input.con'), - 'f_contrast': ('-f input.fts', 'input.fts'), - 'xchange_block_labels': ('-e design.grp', 'design.grp'), - 'print_unique_perm': ('-q', True), - 'print_info_parallelMode': ('-Q', True), - 'num_permutations': ('-n 10', 10), - 'vox_pvalus': ('-x', True), - 'fstats_only': ('--fonly', True), - 'thresh_free_cluster': ('-T', True), - 'thresh_free_cluster_2Dopt': ('--T2', True), - 'cluster_thresholding': ('-c 0.20', 0.20), - 'cluster_mass_thresholding': ('-C 0.40', 0.40), - 'fcluster_thresholding': ('-F 0.10', 0.10), - 'fcluster_mass_thresholding': ('-S 0.30', 0.30), - 'variance_smoothing': ('-v 0.20', 0.20), - 'diagnostics_off': ('--quiet', True), - 'output_raw': ('-R', True), - 'output_perm_vect': ('-P', True), - 'int_seed': ('--seed=20', 20), - 'TFCE_height_param': ('--tfce_H=0.11', 0.11), - 'TFCE_extent_param': ('--tfce_E=0.50', 0.50), - 'TFCE_connectivity': ('--tfce_C=0.30', 0.30), - 'list_num_voxel_EVs_pos': ('--vxl=' + repr([1, 2, 3, 4]), - repr([1, 2, 3, 4])), - 'list_img_voxel_EVs': ('--vxf=' + repr([6, 7, 8, 9, 3]), - repr([6, 7, 8, 9, 3])) + "demean_data": ("-D", True), + "one_sample_gmean": ("-1", True), + "mask_image": ("-m inp_mask", "inp_mask"), + "design_matrix": ("-d design.mat", "design.mat"), + "t_contrast": ("-t input.con", "input.con"), + "f_contrast": ("-f input.fts", "input.fts"), + "xchange_block_labels": ("-e design.grp", "design.grp"), + "print_unique_perm": ("-q", True), + "print_info_parallelMode": ("-Q", True), + "num_permutations": ("-n 10", 10), + "vox_pvalus": ("-x", True), + "fstats_only": ("--fonly", True), + "thresh_free_cluster": ("-T", True), + "thresh_free_cluster_2Dopt": ("--T2", True), + "cluster_thresholding": ("-c 0.20", 0.20), + "cluster_mass_thresholding": ("-C 0.40", 0.40), + "fcluster_thresholding": ("-F 0.10", 0.10), + "fcluster_mass_thresholding": ("-S 0.30", 0.30), + "variance_smoothing": ("-v 0.20", 0.20), + "diagnostics_off": ("--quiet", True), + "output_raw": ("-R", True), + "output_perm_vect": ("-P", True), + "int_seed": ("--seed=20", 20), + "TFCE_height_param": ("--tfce_H=0.11", 0.11), + "TFCE_extent_param": ("--tfce_E=0.50", 0.50), + "TFCE_connectivity": ("--tfce_C=0.30", 0.30), + "list_num_voxel_EVs_pos": ("--vxl=" + repr([1, 2, 3, 4]), repr([1, 2, 3, 4])), + "list_img_voxel_EVs": ("--vxf=" + repr([6, 7, 8, 9, 3]), repr([6, 7, 8, 9, 3])), } for name, settings in list(opt_map.items()): rand4 = fsl.Randomise_parallel( - input_4D='infile', output_rootname='root', **{ - name: settings[1] - }) - assert rand4.cmdline == rand4.cmd + ' -i infile -o root ' + settings[0] + input_4D="infile", output_rootname="root", **{name: settings[1]} + ) + assert rand4.cmdline == rand4.cmd + " -i infile -o root " + settings[0] # test proj_thresh -@pytest.mark.xfail( - reason="These tests are skipped until we clean up some of this code") +@pytest.mark.xfail(reason="These tests are skipped until we clean up some of this code") def test_Proj_thresh(): proj = fsl.ProjThresh() # make sure command gets called - assert proj.cmd == 'proj_thresh' + assert proj.cmd == "proj_thresh" # test raising error with mandatory args absent with pytest.raises(ValueError): proj.run() # .inputs based parameters setting - proj.inputs.volumes = ['vol1', 'vol2', 'vol3'] + proj.inputs.volumes = ["vol1", "vol2", "vol3"] proj.inputs.threshold = 3 - assert proj.cmdline == 'proj_thresh vol1 vol2 vol3 3' + assert proj.cmdline == "proj_thresh vol1 vol2 vol3 3" - proj2 = fsl.ProjThresh(threshold=10, volumes=['vola', 'volb']) - assert proj2.cmdline == 'proj_thresh vola volb 10' + proj2 = fsl.ProjThresh(threshold=10, volumes=["vola", "volb"]) + assert proj2.cmdline == "proj_thresh vola volb 10" # .run based parameters setting proj3 = fsl.ProjThresh() - results = proj3.run(volumes=['inp1', 'inp3', 'inp2'], threshold=2) - assert results.runtime.cmdline == 'proj_thresh inp1 inp3 inp2 2' + results = proj3.run(volumes=["inp1", "inp3", "inp2"], threshold=2) + assert results.runtime.cmdline == "proj_thresh inp1 inp3 inp2 2" assert results.runtime.returncode != 0 assert isinstance(results.interface.inputs.volumes, list) assert results.interface.inputs.threshold == 2 @@ -239,103 +231,104 @@ def test_Proj_thresh(): # test vec_reg -@pytest.mark.xfail( - reason="These tests are skipped until we clean up some of this code") +@pytest.mark.xfail(reason="These tests are skipped until we clean up some of this code") def test_Vec_reg(): vrg = fsl.VecReg() # make sure command gets called - assert vrg.cmd == 'vecreg' + assert vrg.cmd == "vecreg" # test raising error with mandatory args absent with pytest.raises(ValueError): vrg.run() # .inputs based parameters setting - vrg.inputs.infile = 'infile' - vrg.inputs.outfile = 'outfile' - vrg.inputs.refVolName = 'MNI152' - vrg.inputs.affineTmat = 'tmat.mat' - assert vrg.cmdline == 'vecreg -i infile -o outfile -r MNI152 -t tmat.mat' + vrg.inputs.infile = "infile" + vrg.inputs.outfile = "outfile" + vrg.inputs.refVolName = "MNI152" + vrg.inputs.affineTmat = "tmat.mat" + assert vrg.cmdline == "vecreg -i infile -o outfile -r MNI152 -t tmat.mat" # .run based parameter setting vrg2 = fsl.VecReg( - infile='infile2', - outfile='outfile2', - refVolName='MNI152', - affineTmat='tmat2.mat', - brainMask='nodif_brain_mask') + infile="infile2", + outfile="outfile2", + refVolName="MNI152", + affineTmat="tmat2.mat", + brainMask="nodif_brain_mask", + ) actualCmdline = sorted(vrg2.cmdline.split()) - cmd = 'vecreg -i infile2 -o outfile2 -r MNI152 -t tmat2.mat -m nodif_brain_mask' + cmd = "vecreg -i infile2 -o outfile2 -r MNI152 -t tmat2.mat -m nodif_brain_mask" desiredCmdline = sorted(cmd.split()) assert actualCmdline == desiredCmdline vrg3 = fsl.VecReg() results = vrg3.run( - infile='infile3', - outfile='outfile3', - refVolName='MNI152', - affineTmat='tmat3.mat', + infile="infile3", + outfile="outfile3", + refVolName="MNI152", + affineTmat="tmat3.mat", ) - assert results.runtime.cmdline == \ - 'vecreg -i infile3 -o outfile3 -r MNI152 -t tmat3.mat' + assert ( + results.runtime.cmdline + == "vecreg -i infile3 -o outfile3 -r MNI152 -t tmat3.mat" + ) assert results.runtime.returncode != 0 - assert results.interface.inputs.infile == 'infile3' - assert results.interface.inputs.outfile == 'outfile3' - assert results.interface.inputs.refVolName == 'MNI152' - assert results.interface.inputs.affineTmat == 'tmat3.mat' + assert results.interface.inputs.infile == "infile3" + assert results.interface.inputs.outfile == "outfile3" + assert results.interface.inputs.refVolName == "MNI152" + assert results.interface.inputs.affineTmat == "tmat3.mat" # test arguments for opt_map opt_map = { - 'verbose': ('-v', True), - 'helpDoc': ('-h', True), - 'tensor': ('--tensor', True), - 'affineTmat': ('-t Tmat', 'Tmat'), - 'warpFile': ('-w wrpFile', 'wrpFile'), - 'interpolation': ('--interp=sinc', 'sinc'), - 'brainMask': ('-m mask', 'mask') + "verbose": ("-v", True), + "helpDoc": ("-h", True), + "tensor": ("--tensor", True), + "affineTmat": ("-t Tmat", "Tmat"), + "warpFile": ("-w wrpFile", "wrpFile"), + "interpolation": ("--interp=sinc", "sinc"), + "brainMask": ("-m mask", "mask"), } for name, settings in list(opt_map.items()): vrg4 = fsl.VecReg( - infile='infile', - outfile='outfile', - refVolName='MNI152', - **{ - name: settings[1] - }) - assert vrg4.cmdline == vrg4.cmd + \ - ' -i infile -o outfile -r MNI152 ' + settings[0] + infile="infile", + outfile="outfile", + refVolName="MNI152", + **{name: settings[1]} + ) + assert ( + vrg4.cmdline == vrg4.cmd + " -i infile -o outfile -r MNI152 " + settings[0] + ) # test find_the_biggest -@pytest.mark.xfail( - reason="These tests are skipped until we clean up some of this code") +@pytest.mark.xfail(reason="These tests are skipped until we clean up some of this code") def test_Find_the_biggest(): fbg = fsl.FindTheBiggest() # make sure command gets called - assert fbg.cmd == 'find_the_biggest' + assert fbg.cmd == "find_the_biggest" # test raising error with mandatory args absent with pytest.raises(ValueError): fbg.run() # .inputs based parameters setting - fbg.inputs.infiles = 'seed*' - fbg.inputs.outfile = 'fbgfile' - assert fbg.cmdline == 'find_the_biggest seed* fbgfile' + fbg.inputs.infiles = "seed*" + fbg.inputs.outfile = "fbgfile" + assert fbg.cmdline == "find_the_biggest seed* fbgfile" - fbg2 = fsl.FindTheBiggest(infiles='seed2*', outfile='fbgfile2') - assert fbg2.cmdline == 'find_the_biggest seed2* fbgfile2' + fbg2 = fsl.FindTheBiggest(infiles="seed2*", outfile="fbgfile2") + assert fbg2.cmdline == "find_the_biggest seed2* fbgfile2" # .run based parameters setting fbg3 = fsl.FindTheBiggest() - results = fbg3.run(infiles='seed3', outfile='out3') - assert results.runtime.cmdline == 'find_the_biggest seed3 out3' + results = fbg3.run(infiles="seed3", outfile="out3") + assert results.runtime.cmdline == "find_the_biggest seed3 out3" # test arguments for opt_map # Find_the_biggest doesn't have an opt_map{} @@ -359,8 +352,9 @@ def test_tbss_skeleton(create_files_in_directory): # First by implicit argument skeletor.inputs.skeleton_file = True - assert skeletor.cmdline == \ - "tbss_skeleton -i a.nii -o %s" % os.path.join(newdir, "a_skeleton.nii") + assert skeletor.cmdline == "tbss_skeleton -i a.nii -o %s" % os.path.join( + newdir, "a_skeleton.nii" + ) # Now with a specific name skeletor.inputs.skeleton_file = "old_boney.nii" @@ -379,15 +373,19 @@ def test_tbss_skeleton(create_files_in_directory): bones.inputs.data_file = "b.nii" # Even though that's silly # Now we get a command line - assert bones.cmdline == \ - "tbss_skeleton -i a.nii -p 0.200 b.nii %s b.nii %s" % (Info.standard_image("LowerCingulum_1mm.nii.gz"), - os.path.join(newdir, "b_skeletonised.nii")) + assert bones.cmdline == "tbss_skeleton -i a.nii -p 0.200 b.nii %s b.nii %s" % ( + Info.standard_image("LowerCingulum_1mm.nii.gz"), + os.path.join(newdir, "b_skeletonised.nii"), + ) # Can we specify a mask? bones.inputs.use_cingulum_mask = Undefined bones.inputs.search_mask_file = "a.nii" - assert bones.cmdline == \ - "tbss_skeleton -i a.nii -p 0.200 b.nii a.nii b.nii %s" % os.path.join(newdir, "b_skeletonised.nii") + assert ( + bones.cmdline + == "tbss_skeleton -i a.nii -p 0.200 b.nii a.nii b.nii %s" + % os.path.join(newdir, "b_skeletonised.nii") + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -408,15 +406,20 @@ def test_distancemap(create_files_in_directory): # It should assert mapper.cmdline == "distancemap --out=%s --in=a.nii" % os.path.join( - newdir, "a_dstmap.nii") + newdir, "a_dstmap.nii" + ) # And we should be able to write out a maxima map mapper.inputs.local_max_file = True - assert mapper.cmdline == \ - "distancemap --out=%s --in=a.nii --localmax=%s" % (os.path.join(newdir, "a_dstmap.nii"), - os.path.join(newdir, "a_lclmax.nii")) + assert mapper.cmdline == "distancemap --out=%s --in=a.nii --localmax=%s" % ( + os.path.join(newdir, "a_dstmap.nii"), + os.path.join(newdir, "a_lclmax.nii"), + ) # And call it whatever we want mapper.inputs.local_max_file = "max.nii" - assert mapper.cmdline == \ - "distancemap --out=%s --in=a.nii --localmax=max.nii" % os.path.join(newdir, "a_dstmap.nii") + assert ( + mapper.cmdline + == "distancemap --out=%s --in=a.nii --localmax=max.nii" + % os.path.join(newdir, "a_dstmap.nii") + ) diff --git a/nipype/interfaces/fsl/tests/test_epi.py b/nipype/interfaces/fsl/tests/test_epi.py index bf025e991d..e8f408de45 100644 --- a/nipype/interfaces/fsl/tests/test_epi.py +++ b/nipype/interfaces/fsl/tests/test_epi.py @@ -17,7 +17,7 @@ def test_eddy_correct2(create_files_in_directory): eddy = fsl.EddyCorrect() # make sure command gets called - assert eddy.cmd == 'eddy_correct' + assert eddy.cmd == "eddy_correct" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -25,14 +25,13 @@ def test_eddy_correct2(create_files_in_directory): # .inputs based parameters setting eddy.inputs.in_file = filelist[0] - eddy.inputs.out_file = 'foo_eddc.nii' + eddy.inputs.out_file = "foo_eddc.nii" eddy.inputs.ref_num = 100 - assert eddy.cmdline == 'eddy_correct %s foo_eddc.nii 100' % filelist[0] + assert eddy.cmdline == "eddy_correct %s foo_eddc.nii 100" % filelist[0] # .run based parameter setting - eddy2 = fsl.EddyCorrect( - in_file=filelist[0], out_file='foo_ec.nii', ref_num=20) - assert eddy2.cmdline == 'eddy_correct %s foo_ec.nii 20' % filelist[0] + eddy2 = fsl.EddyCorrect(in_file=filelist[0], out_file="foo_ec.nii", ref_num=20) + assert eddy2.cmdline == "eddy_correct %s foo_ec.nii 20" % filelist[0] # test arguments for opt_map # eddy_correct class doesn't have opt_map{} diff --git a/nipype/interfaces/fsl/tests/test_maths.py b/nipype/interfaces/fsl/tests/test_maths.py index 7760c6dbe4..9b05645997 100644 --- a/nipype/interfaces/fsl/tests/test_maths.py +++ b/nipype/interfaces/fsl/tests/test_maths.py @@ -31,23 +31,23 @@ def test_maths_base(create_files_in_directory_plus_output_type): out_file = "a_maths{}".format(out_ext) # Now test the most basic command line - assert maths.cmdline == "fslmaths a.nii {}".format( - os.path.join(testdir, out_file)) + assert maths.cmdline == "fslmaths a.nii {}".format(os.path.join(testdir, out_file)) # Now test that we can set the various data types dtypes = ["float", "char", "int", "short", "double", "input"] int_cmdline = "fslmaths -dt {} a.nii " + os.path.join(testdir, out_file) - out_cmdline = "fslmaths a.nii " + os.path.join(testdir, - out_file) + " -odt {}" - duo_cmdline = "fslmaths -dt {} a.nii " + os.path.join( - testdir, out_file) + " -odt {}" + out_cmdline = "fslmaths a.nii " + os.path.join(testdir, out_file) + " -odt {}" + duo_cmdline = ( + "fslmaths -dt {} a.nii " + os.path.join(testdir, out_file) + " -odt {}" + ) for dtype in dtypes: foo = fsl.MathsCommand(in_file="a.nii", internal_datatype=dtype) assert foo.cmdline == int_cmdline.format(dtype) bar = fsl.MathsCommand(in_file="a.nii", output_datatype=dtype) assert bar.cmdline == out_cmdline.format(dtype) foobar = fsl.MathsCommand( - in_file="a.nii", internal_datatype=dtype, output_datatype=dtype) + in_file="a.nii", internal_datatype=dtype, output_datatype=dtype + ) assert foobar.cmdline == duo_cmdline.format(dtype, dtype) # Test that we can ask for an outfile name @@ -81,8 +81,7 @@ def test_changedt(create_files_in_directory_plus_output_type): dtypes = ["float", "char", "int", "short", "double", "input"] cmdline = "fslmaths a.nii b.nii -odt {}" for dtype in dtypes: - foo = fsl.MathsCommand( - in_file="a.nii", out_file="b.nii", output_datatype=dtype) + foo = fsl.MathsCommand(in_file="a.nii", out_file="b.nii", output_datatype=dtype) assert foo.cmdline == cmdline.format(dtype) @@ -102,18 +101,20 @@ def test_threshold(create_files_in_directory_plus_output_type): # Test the various opstrings cmdline = "fslmaths a.nii {} b.nii" - for val in [0, 0., -1, -1.5, -0.5, 0.5, 3, 400, 400.5]: + for val in [0, 0.0, -1, -1.5, -0.5, 0.5, 3, 400, 400.5]: thresh.inputs.thresh = val assert thresh.cmdline == cmdline.format("-thr {:.10f}".format(val)) val = "{:.10f}".format(42) thresh = fsl.Threshold( - in_file="a.nii", out_file="b.nii", thresh=42, use_robust_range=True) + in_file="a.nii", out_file="b.nii", thresh=42, use_robust_range=True + ) assert thresh.cmdline == cmdline.format("-thrp " + val) thresh.inputs.use_nonzero_voxels = True assert thresh.cmdline == cmdline.format("-thrP " + val) thresh = fsl.Threshold( - in_file="a.nii", out_file="b.nii", thresh=42, direction="above") + in_file="a.nii", out_file="b.nii", thresh=42, direction="above" + ) assert thresh.cmdline == cmdline.format("-uthr " + val) thresh.inputs.use_robust_range = True assert thresh.cmdline == cmdline.format("-uthrp " + val) @@ -143,7 +144,8 @@ def test_meanimage(create_files_in_directory_plus_output_type): # Test the auto naming meaner = fsl.MeanImage(in_file="a.nii") assert meaner.cmdline == "fslmaths a.nii -Tmean {}".format( - os.path.join(testdir, "a_mean{}".format(out_ext))) + os.path.join(testdir, "a_mean{}".format(out_ext)) + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -166,9 +168,10 @@ def test_stdimage(create_files_in_directory_plus_output_type): assert stder.cmdline == cmdline.format(dim) # Test the auto naming - stder = fsl.StdImage(in_file="a.nii", output_type='NIFTI') + stder = fsl.StdImage(in_file="a.nii", output_type="NIFTI") assert stder.cmdline == "fslmaths a.nii -Tstd {}".format( - os.path.join(testdir, "a_std.nii")) + os.path.join(testdir, "a_std.nii") + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -193,7 +196,8 @@ def test_maximage(create_files_in_directory_plus_output_type): # Test the auto naming maxer = fsl.MaxImage(in_file="a.nii") assert maxer.cmdline == "fslmaths a.nii -Tmax {}".format( - os.path.join(testdir, "a_max{}".format(out_ext))) + os.path.join(testdir, "a_max{}".format(out_ext)) + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -212,19 +216,18 @@ def test_smooth(create_files_in_directory_plus_output_type): # Test smoothing kernels cmdline = "fslmaths a.nii -s {:.5f} b.nii" - for val in [0, 1., 1, 25, 0.5, 8 / 3.]: - smoother = fsl.IsotropicSmooth( - in_file="a.nii", out_file="b.nii", sigma=val) + for val in [0, 1.0, 1, 25, 0.5, 8 / 3.0]: + smoother = fsl.IsotropicSmooth(in_file="a.nii", out_file="b.nii", sigma=val) assert smoother.cmdline == cmdline.format(val) - smoother = fsl.IsotropicSmooth( - in_file="a.nii", out_file="b.nii", fwhm=val) + smoother = fsl.IsotropicSmooth(in_file="a.nii", out_file="b.nii", fwhm=val) val = float(val) / np.sqrt(8 * np.log(2)) assert smoother.cmdline == cmdline.format(val) # Test automatic naming smoother = fsl.IsotropicSmooth(in_file="a.nii", sigma=5) assert smoother.cmdline == "fslmaths a.nii -s {:.5f} {}".format( - 5, os.path.join(testdir, "a_smooth{}".format(out_ext))) + 5, os.path.join(testdir, "a_smooth{}".format(out_ext)) + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -248,7 +251,8 @@ def test_mask(create_files_in_directory_plus_output_type): # Test auto name generation masker = fsl.ApplyMask(in_file="a.nii", mask_file="b.nii") assert masker.cmdline == "fslmaths a.nii -mas b.nii " + os.path.join( - testdir, "a_masked{}".format(out_ext)) + testdir, "a_masked{}".format(out_ext) + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -276,8 +280,10 @@ def test_dilation(create_files_in_directory_plus_output_type): for size in [1, 1.5, 5]: diller.inputs.kernel_shape = k diller.inputs.kernel_size = size - assert diller.cmdline == "fslmaths a.nii -kernel {} {:.4f} -dilF b.nii".format( - k, size) + assert ( + diller.cmdline + == "fslmaths a.nii -kernel {} {:.4f} -dilF b.nii".format(k, size) + ) # Test that we can use a file kernel f = open("kernel.txt", "w").close() @@ -290,7 +296,8 @@ def test_dilation(create_files_in_directory_plus_output_type): # Test that we don't need to request an out name dil = fsl.DilateImage(in_file="a.nii", operation="max") assert dil.cmdline == "fslmaths a.nii -dilF {}".format( - os.path.join(testdir, "a_dil{}".format(out_ext))) + os.path.join(testdir, "a_dil{}".format(out_ext)) + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -313,7 +320,8 @@ def test_erosion(create_files_in_directory_plus_output_type): # Test that we don't need to request an out name erode = fsl.ErodeImage(in_file="a.nii") assert erode.cmdline == "fslmaths a.nii -ero {}".format( - os.path.join(testdir, "a_ero{}".format(out_ext))) + os.path.join(testdir, "a_ero{}".format(out_ext)) + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -338,7 +346,8 @@ def test_spatial_filter(create_files_in_directory_plus_output_type): # Test that we don't need to ask for an out name filter = fsl.SpatialFilter(in_file="a.nii", operation="mean") assert filter.cmdline == "fslmaths a.nii -fmean {}".format( - os.path.join(testdir, "a_filt{}".format(out_ext))) + os.path.join(testdir, "a_filt{}".format(out_ext)) + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -356,10 +365,7 @@ def test_unarymaths(create_files_in_directory_plus_output_type): maths.run() # Test the different operations - ops = [ - "exp", "log", "sin", "cos", "sqr", "sqrt", "recip", "abs", "bin", - "index" - ] + ops = ["exp", "log", "sin", "cos", "sqr", "sqrt", "recip", "abs", "bin", "index"] for op in ops: maths.inputs.operation = op assert maths.cmdline == "fslmaths a.nii -{} b.nii".format(op) @@ -368,7 +374,8 @@ def test_unarymaths(create_files_in_directory_plus_output_type): for op in ops: maths = fsl.UnaryMaths(in_file="a.nii", operation=op) assert maths.cmdline == "fslmaths a.nii -{} {}".format( - op, os.path.join(testdir, "a_{}{}".format(op, out_ext))) + op, os.path.join(testdir, "a_{}{}".format(op, out_ext)) + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -387,26 +394,25 @@ def test_binarymaths(create_files_in_directory_plus_output_type): # Test the different operations ops = ["add", "sub", "mul", "div", "rem", "min", "max"] - operands = ["b.nii", -2, -0.5, 0, .123456, np.pi, 500] + operands = ["b.nii", -2, -0.5, 0, 0.123456, np.pi, 500] for op in ops: for ent in operands: - maths = fsl.BinaryMaths( - in_file="a.nii", out_file="c.nii", operation=op) + maths = fsl.BinaryMaths(in_file="a.nii", out_file="c.nii", operation=op) if ent == "b.nii": maths.inputs.operand_file = ent - assert maths.cmdline == "fslmaths a.nii -{} b.nii c.nii".format( - op) + assert maths.cmdline == "fslmaths a.nii -{} b.nii c.nii".format(op) else: maths.inputs.operand_value = ent assert maths.cmdline == "fslmaths a.nii -{} {:.8f} c.nii".format( - op, ent) + op, ent + ) # Test that we don't need to ask for an out file for op in ops: - maths = fsl.BinaryMaths( - in_file="a.nii", operation=op, operand_file="b.nii") + maths = fsl.BinaryMaths(in_file="a.nii", operation=op, operand_file="b.nii") assert maths.cmdline == "fslmaths a.nii -{} b.nii {}".format( - op, os.path.join(testdir, "a_maths{}".format(out_ext))) + op, os.path.join(testdir, "a_maths{}".format(out_ext)) + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -425,19 +431,18 @@ def test_multimaths(create_files_in_directory_plus_output_type): # Test a few operations maths.inputs.operand_files = ["a.nii", "b.nii"] - opstrings = [ - "-add %s -div %s", "-max 1 -sub %s -min %s", "-mas %s -add %s" - ] + opstrings = ["-add %s -div %s", "-max 1 -sub %s -min %s", "-mas %s -add %s"] for ostr in opstrings: maths.inputs.op_string = ostr - assert maths.cmdline == "fslmaths a.nii %s c.nii" % ostr % ("a.nii", - "b.nii") + assert maths.cmdline == "fslmaths a.nii %s c.nii" % ostr % ("a.nii", "b.nii") # Test that we don't need to ask for an out file maths = fsl.MultiImageMaths( - in_file="a.nii", op_string="-add %s -mul 5", operand_files=["b.nii"]) - assert maths.cmdline == \ - "fslmaths a.nii -add b.nii -mul 5 %s" % os.path.join(testdir, "a_maths%s" % out_ext) + in_file="a.nii", op_string="-add %s -mul 5", operand_files=["b.nii"] + ) + assert maths.cmdline == "fslmaths a.nii -add b.nii -mul 5 %s" % os.path.join( + testdir, "a_maths%s" % out_ext + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -459,9 +464,11 @@ def test_tempfilt(create_files_in_directory_plus_output_type): filt.inputs.highpass_sigma = win[0] filt.inputs.lowpass_sigma = win[1] assert filt.cmdline == "fslmaths a.nii -bptf {:.6f} {:.6f} b.nii".format( - win[0], win[1]) + win[0], win[1] + ) # Test that we don't need to ask for an out file filt = fsl.TemporalFilter(in_file="a.nii", highpass_sigma=64) - assert filt.cmdline == \ - "fslmaths a.nii -bptf 64.000000 -1.000000 {}".format(os.path.join(testdir, "a_filt{}".format(out_ext))) + assert filt.cmdline == "fslmaths a.nii -bptf 64.000000 -1.000000 {}".format( + os.path.join(testdir, "a_filt{}".format(out_ext)) + ) diff --git a/nipype/interfaces/fsl/tests/test_model.py b/nipype/interfaces/fsl/tests/test_model.py index c5c2f5bf35..ea86d8f628 100644 --- a/nipype/interfaces/fsl/tests/test_model.py +++ b/nipype/interfaces/fsl/tests/test_model.py @@ -13,15 +13,17 @@ def test_MultipleRegressDesign(tmpdir): tmpdir.chdir() foo = fsl.MultipleRegressDesign() foo.inputs.regressors = dict( - voice_stenght=[1, 1, 1], age=[0.2, 0.4, 0.5], BMI=[1, -1, 2]) - con1 = ['voice_and_age', 'T', ['age', 'voice_stenght'], [0.5, 0.5]] - con2 = ['just_BMI', 'T', ['BMI'], [1]] - foo.inputs.contrasts = [con1, con2, ['con3', 'F', [con1, con2]]] + voice_stenght=[1, 1, 1], age=[0.2, 0.4, 0.5], BMI=[1, -1, 2] + ) + con1 = ["voice_and_age", "T", ["age", "voice_stenght"], [0.5, 0.5]] + con2 = ["just_BMI", "T", ["BMI"], [1]] + foo.inputs.contrasts = [con1, con2, ["con3", "F", [con1, con2]]] res = foo.run() for ii in ["mat", "con", "fts", "grp"]: - assert getattr(res.outputs, - "design_" + ii) == tmpdir.join('design.' + ii).strpath + assert ( + getattr(res.outputs, "design_" + ii) == tmpdir.join("design." + ii).strpath + ) design_mat_expected_content = """/NumWaves 3 /NumPoints 3 @@ -61,5 +63,6 @@ def test_MultipleRegressDesign(tmpdir): 1 """ for ii in ["mat", "con", "fts", "grp"]: - assert tmpdir.join('design.' + ii).read() == eval( - "design_" + ii + "_expected_content") + assert tmpdir.join("design." + ii).read() == eval( + "design_" + ii + "_expected_content" + ) diff --git a/nipype/interfaces/fsl/tests/test_preprocess.py b/nipype/interfaces/fsl/tests/test_preprocess.py index 5caffae1b1..438f3f0ec4 100644 --- a/nipype/interfaces/fsl/tests/test_preprocess.py +++ b/nipype/interfaces/fsl/tests/test_preprocess.py @@ -23,7 +23,7 @@ def fsl_name(obj, fname): @pytest.fixture() def setup_infile(tmpdir): ext = Info.output_type_to_ext(Info.output_type()) - tmp_infile = tmpdir.join('foo' + ext) + tmp_infile = tmpdir.join("foo" + ext) tmp_infile.open("w") return (tmp_infile.strpath, tmpdir.strpath) @@ -32,7 +32,7 @@ def setup_infile(tmpdir): def test_bet(setup_infile): tmp_infile, tp_dir = setup_infile better = fsl.BET() - assert better.cmd == 'bet' + assert better.cmd == "bet" # Test raising error with mandatory args absent with pytest.raises(ValueError): @@ -40,19 +40,19 @@ def test_bet(setup_infile): # Test generated outfile name better.inputs.in_file = tmp_infile - outfile = fsl_name(better, 'foo_brain') + outfile = fsl_name(better, "foo_brain") outpath = os.path.join(os.getcwd(), outfile) - realcmd = 'bet %s %s' % (tmp_infile, outpath) + realcmd = "bet %s %s" % (tmp_infile, outpath) assert better.cmdline == realcmd # Test specified outfile name - outfile = fsl_name(better, '/newdata/bar') + outfile = fsl_name(better, "/newdata/bar") better.inputs.out_file = outfile - realcmd = 'bet %s %s' % (tmp_infile, outfile) + realcmd = "bet %s %s" % (tmp_infile, outfile) assert better.cmdline == realcmd # infile foo.nii doesn't exist def func(): - better.run(in_file='foo2.nii', out_file='bar.nii') + better.run(in_file="foo2.nii", out_file="bar.nii") with pytest.raises(TraitError): func() @@ -60,17 +60,17 @@ def func(): # Our options and some test values for them # Should parallel the opt_map structure in the class for clarity opt_map = { - 'outline': ('-o', True), - 'mask': ('-m', True), - 'skull': ('-s', True), - 'no_output': ('-n', True), - 'frac': ('-f 0.40', 0.4), - 'vertical_gradient': ('-g 0.75', 0.75), - 'radius': ('-r 20', 20), - 'center': ('-c 54 75 80', [54, 75, 80]), - 'threshold': ('-t', True), - 'mesh': ('-e', True), - 'surfaces': ('-A', True) + "outline": ("-o", True), + "mask": ("-m", True), + "skull": ("-s", True), + "no_output": ("-n", True), + "frac": ("-f 0.40", 0.4), + "vertical_gradient": ("-g 0.75", 0.75), + "radius": ("-r 20", 20), + "center": ("-c 54 75 80", [54, 75, 80]), + "threshold": ("-t", True), + "mesh": ("-e", True), + "surfaces": ("-A", True) # 'verbose': ('-v', True), # 'flags': ('--i-made-this-up', '--i-made-this-up'), } @@ -78,13 +78,13 @@ def func(): # test each of our arguments better = fsl.BET() - outfile = fsl_name(better, 'foo_brain') + outfile = fsl_name(better, "foo_brain") outpath = os.path.join(os.getcwd(), outfile) for name, settings in list(opt_map.items()): better = fsl.BET(**{name: settings[1]}) # Add mandatory input better.inputs.in_file = tmp_infile - realcmd = ' '.join([better.cmd, tmp_infile, outpath, settings[0]]) + realcmd = " ".join([better.cmd, tmp_infile, outpath, settings[0]]) assert better.cmdline == realcmd @@ -99,62 +99,61 @@ def test_fast(setup_infile): fasted = fsl.FAST(in_files=tmp_infile, verbose=True) fasted2 = fsl.FAST(in_files=[tmp_infile, tmp_infile], verbose=True) - assert faster.cmd == 'fast' + assert faster.cmd == "fast" assert faster.inputs.verbose assert faster.inputs.manual_seg == Undefined assert faster.inputs != fasted.inputs - assert fasted.cmdline == 'fast -v -S 1 %s' % (tmp_infile) - assert fasted2.cmdline == 'fast -v -S 2 %s %s' % (tmp_infile, tmp_infile) + assert fasted.cmdline == "fast -v -S 1 %s" % (tmp_infile) + assert fasted2.cmdline == "fast -v -S 2 %s %s" % (tmp_infile, tmp_infile) faster = fsl.FAST() faster.inputs.in_files = tmp_infile - assert faster.cmdline == 'fast -S 1 %s' % (tmp_infile) + assert faster.cmdline == "fast -S 1 %s" % (tmp_infile) faster.inputs.in_files = [tmp_infile, tmp_infile] - assert faster.cmdline == 'fast -S 2 %s %s' % (tmp_infile, tmp_infile) + assert faster.cmdline == "fast -S 2 %s %s" % (tmp_infile, tmp_infile) # Our options and some test values for them # Should parallel the opt_map structure in the class for clarity opt_map = { - 'number_classes': ('-n 4', 4), - 'bias_iters': ('-I 5', 5), - 'bias_lowpass': ('-l 15', 15), - 'img_type': ('-t 2', 2), - 'init_seg_smooth': ('-f 0.035', 0.035), - 'segments': ('-g', True), - 'init_transform': ('-a %s' % (tmp_infile), '%s' % (tmp_infile)), - 'other_priors': - ('-A %s %s %s' % (tmp_infile, tmp_infile, tmp_infile), - (['%s' % (tmp_infile), - '%s' % (tmp_infile), - '%s' % (tmp_infile)])), - 'no_pve': ('--nopve', True), - 'output_biasfield': ('-b', True), - 'output_biascorrected': ('-B', True), - 'no_bias': ('-N', True), - 'out_basename': ('-o fasted', 'fasted'), - 'use_priors': ('-P', True), - 'segment_iters': ('-W 14', 14), - 'mixel_smooth': ('-R 0.25', 0.25), - 'iters_afterbias': ('-O 3', 3), - 'hyper': ('-H 0.15', 0.15), - 'verbose': ('-v', True), - 'manual_seg': ('-s %s' % (tmp_infile), '%s' % (tmp_infile)), - 'probability_maps': ('-p', True), + "number_classes": ("-n 4", 4), + "bias_iters": ("-I 5", 5), + "bias_lowpass": ("-l 15", 15), + "img_type": ("-t 2", 2), + "init_seg_smooth": ("-f 0.035", 0.035), + "segments": ("-g", True), + "init_transform": ("-a %s" % (tmp_infile), "%s" % (tmp_infile)), + "other_priors": ( + "-A %s %s %s" % (tmp_infile, tmp_infile, tmp_infile), + (["%s" % (tmp_infile), "%s" % (tmp_infile), "%s" % (tmp_infile)]), + ), + "no_pve": ("--nopve", True), + "output_biasfield": ("-b", True), + "output_biascorrected": ("-B", True), + "no_bias": ("-N", True), + "out_basename": ("-o fasted", "fasted"), + "use_priors": ("-P", True), + "segment_iters": ("-W 14", 14), + "mixel_smooth": ("-R 0.25", 0.25), + "iters_afterbias": ("-O 3", 3), + "hyper": ("-H 0.15", 0.15), + "verbose": ("-v", True), + "manual_seg": ("-s %s" % (tmp_infile), "%s" % (tmp_infile)), + "probability_maps": ("-p", True), } # test each of our arguments for name, settings in list(opt_map.items()): faster = fsl.FAST(in_files=tmp_infile, **{name: settings[1]}) - assert faster.cmdline == ' '.join( - [faster.cmd, settings[0], - "-S 1 %s" % tmp_infile]) + assert faster.cmdline == " ".join( + [faster.cmd, settings[0], "-S 1 %s" % tmp_infile] + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_fast_list_outputs(setup_infile, tmpdir): - ''' By default (no -o), FSL's fast command outputs files into the same + """ By default (no -o), FSL's fast command outputs files into the same directory as the input files. If the flag -o is set, it outputs files into - the cwd ''' + the cwd """ def _run_and_test(opts, output_base): outputs = fsl.FAST(**opts)._list_outputs() @@ -162,21 +161,22 @@ def _run_and_test(opts, output_base): if output: for filename in ensure_list(output): assert os.path.realpath(filename).startswith( - os.path.realpath(output_base)) + os.path.realpath(output_base) + ) # set up tmp_infile, indir = setup_infile cwd = tmpdir.mkdir("new") cwd.chdir() assert indir != cwd.strpath - out_basename = 'a_basename' + out_basename = "a_basename" # run and test - opts = {'in_files': tmp_infile} + opts = {"in_files": tmp_infile} input_path, input_filename, input_ext = split_filename(tmp_infile) _run_and_test(opts, os.path.join(input_path, input_filename)) - opts['out_basename'] = out_basename + opts["out_basename"] = out_basename _run_and_test(opts, os.path.join(cwd.strpath, out_basename)) @@ -196,32 +196,36 @@ def test_flirt(setup_flirt): tmpdir, infile, reffile = setup_flirt flirter = fsl.FLIRT() - assert flirter.cmd == 'flirt' + assert flirter.cmd == "flirt" flirter.inputs.bins = 256 - flirter.inputs.cost = 'mutualinfo' + flirter.inputs.cost = "mutualinfo" flirted = fsl.FLIRT( in_file=infile, reference=reffile, - out_file='outfile', - out_matrix_file='outmat.mat', + out_file="outfile", + out_matrix_file="outmat.mat", bins=256, - cost='mutualinfo') + cost="mutualinfo", + ) flirt_est = fsl.FLIRT( in_file=infile, reference=reffile, - out_matrix_file='outmat.mat', + out_matrix_file="outmat.mat", bins=256, - cost='mutualinfo') + cost="mutualinfo", + ) assert flirter.inputs != flirted.inputs assert flirted.inputs != flirt_est.inputs assert flirter.inputs.bins == flirted.inputs.bins assert flirter.inputs.cost == flirt_est.inputs.cost - realcmd = 'flirt -in %s -ref %s -out outfile -omat outmat.mat ' \ - '-bins 256 -cost mutualinfo' % (infile, reffile) + realcmd = ( + "flirt -in %s -ref %s -out outfile -omat outmat.mat " + "-bins 256 -cost mutualinfo" % (infile, reffile) + ) assert flirted.cmdline == realcmd flirter = fsl.FLIRT() @@ -236,10 +240,14 @@ def test_flirt(setup_flirt): # Generate outfile and outmatrix pth, fname, ext = split_filename(infile) - outfile = fsl_name(flirter, '%s_flirt' % fname) - outmat = '%s_flirt.mat' % fname - realcmd = 'flirt -in %s -ref %s -out %s -omat %s' % (infile, reffile, - outfile, outmat) + outfile = fsl_name(flirter, "%s_flirt" % fname) + outmat = "%s_flirt.mat" % fname + realcmd = "flirt -in %s -ref %s -out %s -omat %s" % ( + infile, + reffile, + outfile, + outmat, + ) assert flirter.cmdline == realcmd # test apply_xfm option @@ -251,10 +259,10 @@ def test_flirt(setup_flirt): axfm2 = deepcopy(axfm) # test uses_qform axfm.inputs.uses_qform = True - assert axfm.cmdline == (realcmd + ' -applyxfm -usesqform') + assert axfm.cmdline == (realcmd + " -applyxfm -usesqform") # test in_matrix_file axfm2.inputs.in_matrix_file = reffile - assert axfm2.cmdline == (realcmd + ' -applyxfm -init %s' % reffile) + assert axfm2.cmdline == (realcmd + " -applyxfm -init %s" % reffile) tmpfile = tmpdir.join("file4test.nii") tmpfile.open("w") @@ -262,39 +270,49 @@ def test_flirt(setup_flirt): # cmdline is updated correctly. for key, trait_spec in sorted(fsl.FLIRT.input_spec().traits().items()): # Skip mandatory inputs and the trait methods - if key in ('trait_added', 'trait_modified', 'in_file', 'reference', - 'environ', 'output_type', 'out_file', 'out_matrix_file', - 'in_matrix_file', 'apply_xfm', - 'resource_monitor', 'out_log', - 'save_log'): + if key in ( + "trait_added", + "trait_modified", + "in_file", + "reference", + "environ", + "output_type", + "out_file", + "out_matrix_file", + "in_matrix_file", + "apply_xfm", + "resource_monitor", + "out_log", + "save_log", + ): continue param = None value = None - if key == 'args': - param = '-v' - value = '-v' + if key == "args": + param = "-v" + value = "-v" elif isinstance(trait_spec.trait_type, File): value = tmpfile.strpath param = trait_spec.argstr % value elif trait_spec.default is False: param = trait_spec.argstr value = True - elif key in ('searchr_x', 'searchr_y', 'searchr_z'): + elif key in ("searchr_x", "searchr_y", "searchr_z"): value = [-45, 45] - param = trait_spec.argstr % ' '.join(str(elt) for elt in value) + param = trait_spec.argstr % " ".join(str(elt) for elt in value) else: value = trait_spec.default param = trait_spec.argstr % value - cmdline = 'flirt -in %s -ref %s' % (infile, reffile) + cmdline = "flirt -in %s -ref %s" % (infile, reffile) # Handle autogeneration of outfile pth, fname, ext = split_filename(infile) - outfile = fsl_name(fsl.FLIRT(), '%s_flirt' % fname) - outfile = ' '.join(['-out', outfile]) + outfile = fsl_name(fsl.FLIRT(), "%s_flirt" % fname) + outfile = " ".join(["-out", outfile]) # Handle autogeneration of outmatrix - outmatrix = '%s_flirt.mat' % fname - outmatrix = ' '.join(['-omat', outmatrix]) + outmatrix = "%s_flirt.mat" % fname + outmatrix = " ".join(["-omat", outmatrix]) # Build command line - cmdline = ' '.join([cmdline, outfile, outmatrix, param]) + cmdline = " ".join([cmdline, outfile, outmatrix, param]) flirter = fsl.FLIRT(in_file=infile, reference=reffile) setattr(flirter.inputs, key, value) assert flirter.cmdline == cmdline @@ -302,13 +320,13 @@ def test_flirt(setup_flirt): # Test OutputSpec flirter = fsl.FLIRT(in_file=infile, reference=reffile) pth, fname, ext = split_filename(infile) - flirter.inputs.out_file = ''.join(['foo', ext]) - flirter.inputs.out_matrix_file = ''.join(['bar', ext]) + flirter.inputs.out_file = "".join(["foo", ext]) + flirter.inputs.out_matrix_file = "".join(["bar", ext]) outs = flirter._list_outputs() - assert outs['out_file'] == \ - os.path.join(os.getcwd(), flirter.inputs.out_file) - assert outs['out_matrix_file'] == \ - os.path.join(os.getcwd(), flirter.inputs.out_matrix_file) + assert outs["out_file"] == os.path.join(os.getcwd(), flirter.inputs.out_file) + assert outs["out_matrix_file"] == os.path.join( + os.getcwd(), flirter.inputs.out_matrix_file + ) assert not isdefined(flirter.inputs.out_log) @@ -318,19 +336,19 @@ def test_mcflirt(setup_flirt): tmpdir, infile, reffile = setup_flirt frt = fsl.MCFLIRT() - assert frt.cmd == 'mcflirt' + assert frt.cmd == "mcflirt" # Test generated outfile name frt.inputs.in_file = infile _, nme = os.path.split(infile) outfile = os.path.join(os.getcwd(), nme) - outfile = frt._gen_fname(outfile, suffix='_mcf') - realcmd = 'mcflirt -in ' + infile + ' -out ' + outfile + outfile = frt._gen_fname(outfile, suffix="_mcf") + realcmd = "mcflirt -in " + infile + " -out " + outfile assert frt.cmdline == realcmd # Test specified outfile name - outfile2 = '/newdata/bar.nii' + outfile2 = "/newdata/bar.nii" frt.inputs.out_file = outfile2 - realcmd = 'mcflirt -in ' + infile + ' -out ' + outfile2 + realcmd = "mcflirt -in " + infile + " -out " + outfile2 assert frt.cmdline == realcmd @@ -340,36 +358,34 @@ def test_mcflirt_opt(setup_flirt): _, nme = os.path.split(infile) opt_map = { - 'cost': ('-cost mutualinfo', 'mutualinfo'), - 'bins': ('-bins 256', 256), - 'dof': ('-dof 6', 6), - 'ref_vol': ('-refvol 2', 2), - 'scaling': ('-scaling 6.00', 6.00), - 'smooth': ('-smooth 1.00', 1.00), - 'rotation': ('-rotation 2', 2), - 'stages': ('-stages 3', 3), - 'init': ('-init %s' % (infile), infile), - 'use_gradient': ('-gdt', True), - 'use_contour': ('-edge', True), - 'mean_vol': ('-meanvol', True), - 'stats_imgs': ('-stats', True), - 'save_mats': ('-mats', True), - 'save_plots': ('-plots', True), + "cost": ("-cost mutualinfo", "mutualinfo"), + "bins": ("-bins 256", 256), + "dof": ("-dof 6", 6), + "ref_vol": ("-refvol 2", 2), + "scaling": ("-scaling 6.00", 6.00), + "smooth": ("-smooth 1.00", 1.00), + "rotation": ("-rotation 2", 2), + "stages": ("-stages 3", 3), + "init": ("-init %s" % (infile), infile), + "use_gradient": ("-gdt", True), + "use_contour": ("-edge", True), + "mean_vol": ("-meanvol", True), + "stats_imgs": ("-stats", True), + "save_mats": ("-mats", True), + "save_plots": ("-plots", True), } for name, settings in list(opt_map.items()): fnt = fsl.MCFLIRT(in_file=infile, **{name: settings[1]}) outfile = os.path.join(os.getcwd(), nme) - outfile = fnt._gen_fname(outfile, suffix='_mcf') + outfile = fnt._gen_fname(outfile, suffix="_mcf") - instr = '-in %s' % (infile) - outstr = '-out %s' % (outfile) - if name in ('init', 'cost', 'dof', 'mean_vol', 'bins'): - assert fnt.cmdline == ' '.join( - [fnt.cmd, instr, settings[0], outstr]) + instr = "-in %s" % (infile) + outstr = "-out %s" % (outfile) + if name in ("init", "cost", "dof", "mean_vol", "bins"): + assert fnt.cmdline == " ".join([fnt.cmd, instr, settings[0], outstr]) else: - assert fnt.cmdline == ' '.join( - [fnt.cmd, instr, outstr, settings[0]]) + assert fnt.cmdline == " ".join([fnt.cmd, instr, outstr, settings[0]]) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -378,8 +394,7 @@ def test_mcflirt_noinput(): fnt = fsl.MCFLIRT() with pytest.raises(ValueError) as excinfo: fnt.run() - assert str(excinfo.value).startswith( - "MCFLIRT requires a value for input 'in_file'") + assert str(excinfo.value).startswith("MCFLIRT requires a value for input 'in_file'") # test fnirt @@ -391,50 +406,58 @@ def test_fnirt(setup_flirt): tmpdir, infile, reffile = setup_flirt tmpdir.chdir() fnirt = fsl.FNIRT() - assert fnirt.cmd == 'fnirt' + assert fnirt.cmd == "fnirt" # Test list parameters - params = [('subsampling_scheme', '--subsamp', [4, 2, 2, 1], - '4,2,2,1'), ('max_nonlin_iter', '--miter', [4, 4, 4, 2], - '4,4,4,2'), ('ref_fwhm', '--reffwhm', [4, 2, 2, 0], - '4,2,2,0'), ('in_fwhm', '--infwhm', - [4, 2, 2, 0], '4,2,2,0'), - ('apply_refmask', '--applyrefmask', [0, 0, 1, 1], - '0,0,1,1'), ('apply_inmask', '--applyinmask', [0, 0, 0, 1], - '0,0,0,1'), ('regularization_lambda', '--lambda', - [0.5, 0.75], '0.5,0.75'), - ('intensity_mapping_model', '--intmod', 'global_non_linear', - 'global_non_linear')] + params = [ + ("subsampling_scheme", "--subsamp", [4, 2, 2, 1], "4,2,2,1"), + ("max_nonlin_iter", "--miter", [4, 4, 4, 2], "4,4,4,2"), + ("ref_fwhm", "--reffwhm", [4, 2, 2, 0], "4,2,2,0"), + ("in_fwhm", "--infwhm", [4, 2, 2, 0], "4,2,2,0"), + ("apply_refmask", "--applyrefmask", [0, 0, 1, 1], "0,0,1,1"), + ("apply_inmask", "--applyinmask", [0, 0, 0, 1], "0,0,0,1"), + ("regularization_lambda", "--lambda", [0.5, 0.75], "0.5,0.75"), + ( + "intensity_mapping_model", + "--intmod", + "global_non_linear", + "global_non_linear", + ), + ] for item, flag, val, strval in params: fnirt = fsl.FNIRT(in_file=infile, ref_file=reffile, **{item: val}) - log = fnirt._gen_fname(infile, suffix='_log.txt', change_ext=False) - iout = fnirt._gen_fname(infile, suffix='_warped') - if item in ('max_nonlin_iter'): - cmd = 'fnirt --in=%s '\ - '--logout=%s'\ - ' %s=%s --ref=%s'\ - ' --iout=%s' % (infile, log, - flag, strval, reffile, iout) - elif item in ('in_fwhm', 'intensity_mapping_model'): - cmd = 'fnirt --in=%s %s=%s --logout=%s '\ - '--ref=%s --iout=%s' % (infile, flag, - strval, log, reffile, iout) - elif item.startswith('apply'): - cmd = 'fnirt %s=%s '\ - '--in=%s '\ - '--logout=%s '\ - '--ref=%s --iout=%s' % (flag, strval, - infile, log, - reffile, - iout) + log = fnirt._gen_fname(infile, suffix="_log.txt", change_ext=False) + iout = fnirt._gen_fname(infile, suffix="_warped") + if item in ("max_nonlin_iter"): + cmd = ( + "fnirt --in=%s " + "--logout=%s" + " %s=%s --ref=%s" + " --iout=%s" % (infile, log, flag, strval, reffile, iout) + ) + elif item in ("in_fwhm", "intensity_mapping_model"): + cmd = "fnirt --in=%s %s=%s --logout=%s " "--ref=%s --iout=%s" % ( + infile, + flag, + strval, + log, + reffile, + iout, + ) + elif item.startswith("apply"): + cmd = ( + "fnirt %s=%s " + "--in=%s " + "--logout=%s " + "--ref=%s --iout=%s" % (flag, strval, infile, log, reffile, iout) + ) else: - cmd = 'fnirt '\ - '--in=%s --logout=%s '\ - '--ref=%s %s=%s --iout=%s' % (infile, log, - reffile, - flag, strval, - iout) + cmd = ( + "fnirt " + "--in=%s --logout=%s " + "--ref=%s %s=%s --iout=%s" % (infile, log, reffile, flag, strval, iout) + ) assert fnirt.cmdline == cmd # Test ValueError is raised when missing mandatory args @@ -443,83 +466,84 @@ def test_fnirt(setup_flirt): fnirt.run() fnirt.inputs.in_file = infile fnirt.inputs.ref_file = reffile - intmap_basename = '%s_intmap' % fsl.FNIRT.intensitymap_file_basename( - infile) + intmap_basename = "%s_intmap" % fsl.FNIRT.intensitymap_file_basename(infile) intmap_image = fsl_name(fnirt, intmap_basename) - intmap_txt = '%s.txt' % intmap_basename + intmap_txt = "%s.txt" % intmap_basename # doing this to create the file to pass tests for file existence - with open(intmap_image, 'w'): + with open(intmap_image, "w"): pass - with open(intmap_txt, 'w'): + with open(intmap_txt, "w"): pass # test files - opt_map = [('affine_file', '--aff=%s' % infile, - infile), ('inwarp_file', '--inwarp=%s' % infile, infile), - ('in_intensitymap_file', '--intin=%s' % intmap_basename, - [intmap_image]), ('in_intensitymap_file', - '--intin=%s' % intmap_basename, - [intmap_image, intmap_txt]), - ('config_file', '--config=%s' % infile, - infile), ('refmask_file', '--refmask=%s' % infile, - infile), ('inmask_file', '--inmask=%s' % infile, - infile), ('field_file', - '--fout=%s' % infile, infile), - ('jacobian_file', '--jout=%s' % infile, - infile), ('modulatedref_file', '--refout=%s' % infile, - infile), ('out_intensitymap_file', - '--intout=%s' % intmap_basename, True), - ('out_intensitymap_file', '--intout=%s' % intmap_basename, - intmap_image), ('fieldcoeff_file', '--cout=%s' % infile, - infile), ('log_file', '--logout=%s' % infile, - infile)] + opt_map = [ + ("affine_file", "--aff=%s" % infile, infile), + ("inwarp_file", "--inwarp=%s" % infile, infile), + ("in_intensitymap_file", "--intin=%s" % intmap_basename, [intmap_image]), + ( + "in_intensitymap_file", + "--intin=%s" % intmap_basename, + [intmap_image, intmap_txt], + ), + ("config_file", "--config=%s" % infile, infile), + ("refmask_file", "--refmask=%s" % infile, infile), + ("inmask_file", "--inmask=%s" % infile, infile), + ("field_file", "--fout=%s" % infile, infile), + ("jacobian_file", "--jout=%s" % infile, infile), + ("modulatedref_file", "--refout=%s" % infile, infile), + ("out_intensitymap_file", "--intout=%s" % intmap_basename, True), + ("out_intensitymap_file", "--intout=%s" % intmap_basename, intmap_image), + ("fieldcoeff_file", "--cout=%s" % infile, infile), + ("log_file", "--logout=%s" % infile, infile), + ] for (name, settings, arg) in opt_map: fnirt = fsl.FNIRT(in_file=infile, ref_file=reffile, **{name: arg}) - if name in ('config_file', 'affine_file', 'field_file', - 'fieldcoeff_file'): - cmd = 'fnirt %s --in=%s '\ - '--logout=%s '\ - '--ref=%s --iout=%s' % (settings, infile, log, - reffile, iout) - elif name in ('refmask_file'): - cmd = 'fnirt --in=%s '\ - '--logout=%s --ref=%s '\ - '%s '\ - '--iout=%s' % (infile, log, - reffile, - settings, - iout) - elif name in ('in_intensitymap_file', 'inwarp_file', 'inmask_file', - 'jacobian_file'): - cmd = 'fnirt --in=%s '\ - '%s '\ - '--logout=%s --ref=%s '\ - '--iout=%s' % (infile, - settings, - log, - reffile, - iout) - elif name in ('log_file'): - cmd = 'fnirt --in=%s '\ - '%s --ref=%s '\ - '--iout=%s' % (infile, - settings, - reffile, - iout) + if name in ("config_file", "affine_file", "field_file", "fieldcoeff_file"): + cmd = ( + "fnirt %s --in=%s " + "--logout=%s " + "--ref=%s --iout=%s" % (settings, infile, log, reffile, iout) + ) + elif name in ("refmask_file"): + cmd = ( + "fnirt --in=%s " + "--logout=%s --ref=%s " + "%s " + "--iout=%s" % (infile, log, reffile, settings, iout) + ) + elif name in ( + "in_intensitymap_file", + "inwarp_file", + "inmask_file", + "jacobian_file", + ): + cmd = ( + "fnirt --in=%s " + "%s " + "--logout=%s --ref=%s " + "--iout=%s" % (infile, settings, log, reffile, iout) + ) + elif name in ("log_file"): + cmd = ( + "fnirt --in=%s " + "%s --ref=%s " + "--iout=%s" % (infile, settings, reffile, iout) + ) else: - cmd = 'fnirt --in=%s '\ - '--logout=%s %s '\ - '--ref=%s --iout=%s' % (infile, log, - settings, - reffile, iout) + cmd = ( + "fnirt --in=%s " + "--logout=%s %s " + "--ref=%s --iout=%s" % (infile, log, settings, reffile, iout) + ) assert fnirt.cmdline == cmd - if name == 'out_intensitymap_file': - assert fnirt._list_outputs()['out_intensitymap_file'] == [ - intmap_image, intmap_txt + if name == "out_intensitymap_file": + assert fnirt._list_outputs()["out_intensitymap_file"] == [ + intmap_image, + intmap_txt, ] @@ -527,32 +551,29 @@ def test_fnirt(setup_flirt): def test_applywarp(setup_flirt): tmpdir, infile, reffile = setup_flirt opt_map = { - 'out_file': ('--out=bar.nii', 'bar.nii'), - 'premat': ('--premat=%s' % (reffile), reffile), - 'postmat': ('--postmat=%s' % (reffile), reffile), + "out_file": ("--out=bar.nii", "bar.nii"), + "premat": ("--premat=%s" % (reffile), reffile), + "postmat": ("--postmat=%s" % (reffile), reffile), } # in_file, ref_file, field_file mandatory for name, settings in list(opt_map.items()): awarp = fsl.ApplyWarp( - in_file=infile, - ref_file=reffile, - field_file=reffile, - **{ - name: settings[1] - }) - if name == 'out_file': - realcmd = 'applywarp --in=%s '\ - '--ref=%s --out=%s '\ - '--warp=%s' % (infile, reffile, - settings[1], reffile) + in_file=infile, ref_file=reffile, field_file=reffile, **{name: settings[1]} + ) + if name == "out_file": + realcmd = ( + "applywarp --in=%s " + "--ref=%s --out=%s " + "--warp=%s" % (infile, reffile, settings[1], reffile) + ) else: - outfile = awarp._gen_fname(infile, suffix='_warp') - realcmd = 'applywarp --in=%s '\ - '--ref=%s --out=%s '\ - '--warp=%s %s' % (infile, reffile, - outfile, reffile, - settings[0]) + outfile = awarp._gen_fname(infile, suffix="_warp") + realcmd = ( + "applywarp --in=%s " + "--ref=%s --out=%s " + "--warp=%s %s" % (infile, reffile, outfile, reffile, settings[0]) + ) assert awarp.cmdline == realcmd @@ -563,32 +584,49 @@ def setup_fugue(tmpdir): import os.path as op d = np.ones((80, 80, 80)) - infile = tmpdir.join('dumbfile.nii.gz').strpath + infile = tmpdir.join("dumbfile.nii.gz").strpath nb.Nifti1Image(d, None, None).to_filename(infile) return (tmpdir, infile) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") -@pytest.mark.parametrize("attr, out_file", [({ - "save_unmasked_fmap": True, - "fmap_in_file": "infile", - "mask_file": "infile", - "output_type": "NIFTI_GZ" -}, 'fmap_out_file'), ({ - "save_unmasked_shift": True, - "fmap_in_file": "infile", - "dwell_time": 1.e-3, - "mask_file": "infile", - "output_type": "NIFTI_GZ" -}, "shift_out_file"), ({ - "in_file": "infile", - "mask_file": "infile", - "shift_in_file": "infile", - "output_type": "NIFTI_GZ" -}, 'unwarped_file')]) +@pytest.mark.parametrize( + "attr, out_file", + [ + ( + { + "save_unmasked_fmap": True, + "fmap_in_file": "infile", + "mask_file": "infile", + "output_type": "NIFTI_GZ", + }, + "fmap_out_file", + ), + ( + { + "save_unmasked_shift": True, + "fmap_in_file": "infile", + "dwell_time": 1.0e-3, + "mask_file": "infile", + "output_type": "NIFTI_GZ", + }, + "shift_out_file", + ), + ( + { + "in_file": "infile", + "mask_file": "infile", + "shift_in_file": "infile", + "output_type": "NIFTI_GZ", + }, + "unwarped_file", + ), + ], +) def test_fugue(setup_fugue, attr, out_file): import os.path as op + tmpdir, infile = setup_fugue fugue = fsl.FUGUE() @@ -601,26 +639,26 @@ def test_fugue(setup_fugue, attr, out_file): assert isdefined(getattr(res.outputs, out_file)) trait_spec = fugue.inputs.trait(out_file) - out_name = trait_spec.name_template % 'dumbfile' - out_name += '.nii.gz' + out_name = trait_spec.name_template % "dumbfile" + out_name += ".nii.gz" assert op.basename(getattr(res.outputs, out_file)) == out_name @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_first_genfname(): first = fsl.FIRST() - first.inputs.out_file = 'segment.nii' + first.inputs.out_file = "segment.nii" first.inputs.output_type = "NIFTI_GZ" - value = first._gen_fname(basename='original_segmentations') - expected_value = os.path.abspath('segment_all_fast_origsegs.nii.gz') + value = first._gen_fname(basename="original_segmentations") + expected_value = os.path.abspath("segment_all_fast_origsegs.nii.gz") assert value == expected_value - first.inputs.method = 'none' - value = first._gen_fname(basename='original_segmentations') - expected_value = os.path.abspath('segment_all_none_origsegs.nii.gz') + first.inputs.method = "none" + value = first._gen_fname(basename="original_segmentations") + expected_value = os.path.abspath("segment_all_none_origsegs.nii.gz") assert value == expected_value - first.inputs.method = 'auto' - first.inputs.list_of_specific_structures = ['L_Hipp', 'R_Hipp'] - value = first._gen_fname(basename='original_segmentations') - expected_value = os.path.abspath('segment_all_none_origsegs.nii.gz') + first.inputs.method = "auto" + first.inputs.list_of_specific_structures = ["L_Hipp", "R_Hipp"] + value = first._gen_fname(basename="original_segmentations") + expected_value = os.path.abspath("segment_all_none_origsegs.nii.gz") assert value == expected_value diff --git a/nipype/interfaces/fsl/tests/test_utils.py b/nipype/interfaces/fsl/tests/test_utils.py index 5df6d88a49..ca52354dd4 100644 --- a/nipype/interfaces/fsl/tests/test_utils.py +++ b/nipype/interfaces/fsl/tests/test_utils.py @@ -21,7 +21,7 @@ def test_fslroi(create_files_in_directory_plus_output_type): roi = fsl.ExtractROI() # make sure command gets called - assert roi.cmd == 'fslroi' + assert roi.cmd == "fslroi" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -29,15 +29,15 @@ def test_fslroi(create_files_in_directory_plus_output_type): # .inputs based parameters setting roi.inputs.in_file = filelist[0] - roi.inputs.roi_file = 'foo_roi.nii' + roi.inputs.roi_file = "foo_roi.nii" roi.inputs.t_min = 10 roi.inputs.t_size = 20 - assert roi.cmdline == 'fslroi %s foo_roi.nii 10 20' % filelist[0] + assert roi.cmdline == "fslroi %s foo_roi.nii 10 20" % filelist[0] # .run based parameter setting roi2 = fsl.ExtractROI( in_file=filelist[0], - roi_file='foo2_roi.nii', + roi_file="foo2_roi.nii", t_min=20, t_size=40, x_min=3, @@ -45,9 +45,9 @@ def test_fslroi(create_files_in_directory_plus_output_type): y_min=40, y_size=10, z_min=5, - z_size=20) - assert roi2.cmdline == \ - 'fslroi %s foo2_roi.nii 3 30 40 10 5 20 20 40' % filelist[0] + z_size=20, + ) + assert roi2.cmdline == "fslroi %s foo2_roi.nii 3 30 40 10 5 20 20 40" % filelist[0] # test arguments for opt_map # Fslroi class doesn't have a filled opt_map{} @@ -60,7 +60,7 @@ def test_fslmerge(create_files_in_directory_plus_output_type): merger = fsl.Merge() # make sure command gets called - assert merger.cmd == 'fslmerge' + assert merger.cmd == "fslmerge" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -68,27 +68,31 @@ def test_fslmerge(create_files_in_directory_plus_output_type): # .inputs based parameters setting merger.inputs.in_files = filelist - merger.inputs.merged_file = 'foo_merged.nii' - merger.inputs.dimension = 't' - merger.inputs.output_type = 'NIFTI' - assert merger.cmdline == 'fslmerge -t foo_merged.nii %s' % ' '.join( - filelist) + merger.inputs.merged_file = "foo_merged.nii" + merger.inputs.dimension = "t" + merger.inputs.output_type = "NIFTI" + assert merger.cmdline == "fslmerge -t foo_merged.nii %s" % " ".join(filelist) # verify that providing a tr value updates the dimension to tr merger.inputs.tr = 2.25 - assert merger.cmdline == 'fslmerge -tr foo_merged.nii %s %.2f' % ( - ' '.join(filelist), 2.25) + assert merger.cmdline == "fslmerge -tr foo_merged.nii %s %.2f" % ( + " ".join(filelist), + 2.25, + ) # .run based parameter setting merger2 = fsl.Merge( in_files=filelist, - merged_file='foo_merged.nii', - dimension='t', - output_type='NIFTI', - tr=2.25) + merged_file="foo_merged.nii", + dimension="t", + output_type="NIFTI", + tr=2.25, + ) - assert merger2.cmdline == \ - 'fslmerge -tr foo_merged.nii %s %.2f' % (' '.join(filelist), 2.25) + assert merger2.cmdline == "fslmerge -tr foo_merged.nii %s %.2f" % ( + " ".join(filelist), + 2.25, + ) # test arguments for opt_map # Fslmerge class doesn't have a filled opt_map{} @@ -103,7 +107,7 @@ def test_fslmaths(create_files_in_directory_plus_output_type): math = fsl.ImageMaths() # make sure command gets called - assert math.cmd == 'fslmaths' + assert math.cmd == "fslmaths" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -111,15 +115,18 @@ def test_fslmaths(create_files_in_directory_plus_output_type): # .inputs based parameters setting math.inputs.in_file = filelist[0] - math.inputs.op_string = '-add 2.5 -mul input_volume2' - math.inputs.out_file = 'foo_math.nii' - assert math.cmdline == \ - 'fslmaths %s -add 2.5 -mul input_volume2 foo_math.nii' % filelist[0] + math.inputs.op_string = "-add 2.5 -mul input_volume2" + math.inputs.out_file = "foo_math.nii" + assert ( + math.cmdline + == "fslmaths %s -add 2.5 -mul input_volume2 foo_math.nii" % filelist[0] + ) # .run based parameter setting math2 = fsl.ImageMaths( - in_file=filelist[0], op_string='-add 2.5', out_file='foo2_math.nii') - assert math2.cmdline == 'fslmaths %s -add 2.5 foo2_math.nii' % filelist[0] + in_file=filelist[0], op_string="-add 2.5", out_file="foo2_math.nii" + ) + assert math2.cmdline == "fslmaths %s -add 2.5 foo2_math.nii" % filelist[0] # test arguments for opt_map # Fslmath class doesn't have opt_map{} @@ -134,7 +141,7 @@ def test_overlay(create_files_in_directory_plus_output_type): overlay = fsl.Overlay() # make sure command gets called - assert overlay.cmd == 'overlay' + assert overlay.cmd == "overlay" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -146,10 +153,12 @@ def test_overlay(create_files_in_directory_plus_output_type): overlay.inputs.background_image = filelist[1] overlay.inputs.auto_thresh_bg = True overlay.inputs.show_negative_stats = True - overlay.inputs.out_file = 'foo_overlay.nii' - assert overlay.cmdline == \ - 'overlay 1 0 %s -a %s 2.50 10.00 %s -2.50 -10.00 foo_overlay.nii' % ( - filelist[1], filelist[0], filelist[0]) + overlay.inputs.out_file = "foo_overlay.nii" + assert ( + overlay.cmdline + == "overlay 1 0 %s -a %s 2.50 10.00 %s -2.50 -10.00 foo_overlay.nii" + % (filelist[1], filelist[0], filelist[0]) + ) # .run based parameter setting overlay2 = fsl.Overlay( @@ -157,9 +166,12 @@ def test_overlay(create_files_in_directory_plus_output_type): stat_thresh=(2.5, 10), background_image=filelist[1], auto_thresh_bg=True, - out_file='foo2_overlay.nii') - assert overlay2.cmdline == 'overlay 1 0 %s -a %s 2.50 10.00 foo2_overlay.nii' % ( - filelist[1], filelist[0]) + out_file="foo2_overlay.nii", + ) + assert overlay2.cmdline == "overlay 1 0 %s -a %s 2.50 10.00 foo2_overlay.nii" % ( + filelist[1], + filelist[0], + ) # test slicer @@ -171,7 +183,7 @@ def test_slicer(create_files_in_directory_plus_output_type): slicer = fsl.Slicer() # make sure command gets called - assert slicer.cmd == 'slicer' + assert slicer.cmd == "slicer" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -180,27 +192,29 @@ def test_slicer(create_files_in_directory_plus_output_type): # .inputs based parameters setting slicer.inputs.in_file = filelist[0] slicer.inputs.image_edges = filelist[1] - slicer.inputs.intensity_range = (10., 20.) + slicer.inputs.intensity_range = (10.0, 20.0) slicer.inputs.all_axial = True slicer.inputs.image_width = 750 - slicer.inputs.out_file = 'foo_bar.png' - assert slicer.cmdline == \ - 'slicer %s %s -L -i 10.000 20.000 -A 750 foo_bar.png' % ( - filelist[0], filelist[1]) + slicer.inputs.out_file = "foo_bar.png" + assert slicer.cmdline == "slicer %s %s -L -i 10.000 20.000 -A 750 foo_bar.png" % ( + filelist[0], + filelist[1], + ) # .run based parameter setting slicer2 = fsl.Slicer( in_file=filelist[0], middle_slices=True, label_slices=False, - out_file='foo_bar2.png') - assert slicer2.cmdline == 'slicer %s -a foo_bar2.png' % (filelist[0]) + out_file="foo_bar2.png", + ) + assert slicer2.cmdline == "slicer %s -a foo_bar2.png" % (filelist[0]) def create_parfiles(): - np.savetxt('a.par', np.random.rand(6, 3)) - np.savetxt('b.par', np.random.rand(6, 3)) - return ['a.par', 'b.par'] + np.savetxt("a.par", np.random.rand(6, 3)) + np.savetxt("b.par", np.random.rand(6, 3)) + return ["a.par", "b.par"] # test fsl_tsplot @@ -213,7 +227,7 @@ def test_plottimeseries(create_files_in_directory_plus_output_type): plotter = fsl.PlotTimeSeries() # make sure command gets called - assert plotter.cmd == 'fsl_tsplot' + assert plotter.cmd == "fsl_tsplot" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -221,23 +235,24 @@ def test_plottimeseries(create_files_in_directory_plus_output_type): # .inputs based parameters setting plotter.inputs.in_file = parfiles[0] - plotter.inputs.labels = ['x', 'y', 'z'] + plotter.inputs.labels = ["x", "y", "z"] plotter.inputs.y_range = (0, 1) - plotter.inputs.title = 'test plot' - plotter.inputs.out_file = 'foo.png' - assert plotter.cmdline == \ - ('fsl_tsplot -i %s -a x,y,z -o foo.png -t \'test plot\' -u 1 --ymin=0 --ymax=1' - % parfiles[0]) + plotter.inputs.title = "test plot" + plotter.inputs.out_file = "foo.png" + assert plotter.cmdline == ( + "fsl_tsplot -i %s -a x,y,z -o foo.png -t 'test plot' -u 1 --ymin=0 --ymax=1" + % parfiles[0] + ) # .run based parameter setting plotter2 = fsl.PlotTimeSeries( - in_file=parfiles, - title='test2 plot', - plot_range=(2, 5), - out_file='bar.png') - assert plotter2.cmdline == \ - 'fsl_tsplot -i %s,%s -o bar.png --start=2 --finish=5 -t \'test2 plot\' -u 1' % tuple( - parfiles) + in_file=parfiles, title="test2 plot", plot_range=(2, 5), out_file="bar.png" + ) + assert ( + plotter2.cmdline + == "fsl_tsplot -i %s,%s -o bar.png --start=2 --finish=5 -t 'test2 plot' -u 1" + % tuple(parfiles) + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -247,7 +262,7 @@ def test_plotmotionparams(create_files_in_directory_plus_output_type): plotter = fsl.PlotMotionParams() # make sure command gets called - assert plotter.cmd == 'fsl_tsplot' + assert plotter.cmd == "fsl_tsplot" # test raising error with mandatory args absent with pytest.raises(ValueError): @@ -255,22 +270,25 @@ def test_plotmotionparams(create_files_in_directory_plus_output_type): # .inputs based parameters setting plotter.inputs.in_file = parfiles[0] - plotter.inputs.in_source = 'fsl' - plotter.inputs.plot_type = 'rotations' - plotter.inputs.out_file = 'foo.png' - assert plotter.cmdline == \ - ('fsl_tsplot -i %s -o foo.png -t \'MCFLIRT estimated rotations (radians)\' ' - '--start=1 --finish=3 -a x,y,z' % parfiles[0]) + plotter.inputs.in_source = "fsl" + plotter.inputs.plot_type = "rotations" + plotter.inputs.out_file = "foo.png" + assert plotter.cmdline == ( + "fsl_tsplot -i %s -o foo.png -t 'MCFLIRT estimated rotations (radians)' " + "--start=1 --finish=3 -a x,y,z" % parfiles[0] + ) # .run based parameter setting plotter2 = fsl.PlotMotionParams( in_file=parfiles[1], - in_source='spm', - plot_type='translations', - out_file='bar.png') - assert plotter2.cmdline == \ - ('fsl_tsplot -i %s -o bar.png -t \'Realign estimated translations (mm)\' ' - '--start=1 --finish=3 -a x,y,z' % parfiles[1]) + in_source="spm", + plot_type="translations", + out_file="bar.png", + ) + assert plotter2.cmdline == ( + "fsl_tsplot -i %s -o bar.png -t 'Realign estimated translations (mm)' " + "--start=1 --finish=3 -a x,y,z" % parfiles[1] + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -289,16 +307,16 @@ def test_convertxfm(create_files_in_directory_plus_output_type): cvt.inputs.in_file = filelist[0] cvt.inputs.invert_xfm = True cvt.inputs.out_file = "foo.mat" - assert cvt.cmdline == 'convert_xfm -omat foo.mat -inverse %s' % filelist[0] + assert cvt.cmdline == "convert_xfm -omat foo.mat -inverse %s" % filelist[0] # constructor based parameter setting cvt2 = fsl.ConvertXFM( - in_file=filelist[0], - in_file2=filelist[1], - concat_xfm=True, - out_file="bar.mat") - assert cvt2.cmdline == \ - "convert_xfm -omat bar.mat -concat %s %s" % (filelist[1], filelist[0]) + in_file=filelist[0], in_file2=filelist[1], concat_xfm=True, out_file="bar.mat" + ) + assert cvt2.cmdline == "convert_xfm -omat bar.mat -concat %s %s" % ( + filelist[1], + filelist[0], + ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @@ -320,7 +338,8 @@ def test_swapdims(create_files_in_directory_plus_output_type): swap.inputs.in_file = files[0] swap.inputs.new_dims = ("x", "y", "z") assert swap.cmdline == "fslswapdim a.nii x y z %s" % os.path.realpath( - os.path.join(testdir, "a_newdims%s" % out_ext)) + os.path.join(testdir, "a_newdims%s" % out_ext) + ) # Test that we can set an output name swap.inputs.out_file = "b.nii" diff --git a/nipype/interfaces/fsl/utils.py b/nipype/interfaces/fsl/utils.py index dacb8c9228..6ada44e046 100644 --- a/nipype/interfaces/fsl/utils.py +++ b/nipype/interfaces/fsl/utils.py @@ -17,20 +17,23 @@ import numpy as np -from ...utils.filemanip import (load_json, save_json, split_filename, - fname_presuffix) -from ..base import (traits, TraitedSpec, OutputMultiPath, File, CommandLine, - CommandLineInputSpec, isdefined) +from ...utils.filemanip import load_json, save_json, split_filename, fname_presuffix +from ..base import ( + traits, + TraitedSpec, + OutputMultiPath, + File, + CommandLine, + CommandLineInputSpec, + isdefined, +) from .base import FSLCommand, FSLCommandInputSpec, Info class CopyGeomInputSpec(FSLCommandInputSpec): in_file = File( - exists=True, - mandatory=True, - argstr="%s", - position=0, - desc="source image") + exists=True, mandatory=True, argstr="%s", position=0, desc="source image" + ) dest_file = File( exists=True, mandatory=True, @@ -38,11 +41,13 @@ class CopyGeomInputSpec(FSLCommandInputSpec): position=1, desc="destination image", copyfile=True, - output_name='out_file', - name_source='dest_file', - name_template='%s') + output_name="out_file", + name_source="dest_file", + name_template="%s", + ) ignore_dims = traits.Bool( - desc='Do not copy image dimensions', argstr='-d', position="-1") + desc="Do not copy image dimensions", argstr="-d", position="-1" + ) class CopyGeomOutputSpec(TraitedSpec): @@ -58,6 +63,7 @@ class CopyGeom(FSLCommand): different files will result in loss of information or potentially incorrect settings. """ + _cmd = "fslcpgeom" input_spec = CopyGeomInputSpec output_spec = CopyGeomOutputSpec @@ -65,36 +71,32 @@ class CopyGeom(FSLCommand): class RobustFOVInputSpec(FSLCommandInputSpec): in_file = File( - exists=True, - desc='input filename', - argstr='-i %s', - position=0, - mandatory=True) + exists=True, desc="input filename", argstr="-i %s", position=0, mandatory=True + ) out_roi = File( desc="ROI volume output name", argstr="-r %s", - name_source=['in_file'], + name_source=["in_file"], hash_files=False, - name_template='%s_ROI') + name_template="%s_ROI", + ) brainsize = traits.Int( - desc=('size of brain in z-dimension (default ' - '170mm/150mm)'), - argstr='-b %d') + desc=("size of brain in z-dimension (default " "170mm/150mm)"), argstr="-b %d" + ) out_transform = File( - desc=("Transformation matrix in_file to out_roi " - "output name"), + desc=("Transformation matrix in_file to out_roi " "output name"), argstr="-m %s", - name_source=['in_file'], + name_source=["in_file"], hash_files=False, - name_template='%s_to_ROI') + name_template="%s_to_ROI", + ) class RobustFOVOutputSpec(TraitedSpec): out_roi = File(exists=True, desc="ROI volume output name") out_transform = File( - exists=True, - desc=("Transformation matrix in_file to out_roi " - "output name")) + exists=True, desc=("Transformation matrix in_file to out_roi " "output name") + ) class RobustFOV(FSLCommand): @@ -104,7 +106,7 @@ class RobustFOV(FSLCommand): 150mm to 170mm. """ - _cmd = 'robustfov' + _cmd = "robustfov" input_spec = RobustFOVInputSpec output_spec = RobustFOVOutputSpec @@ -112,45 +114,46 @@ class RobustFOV(FSLCommand): class ImageMeantsInputSpec(FSLCommandInputSpec): in_file = File( exists=True, - desc='input file for computing the average timeseries', - argstr='-i %s', + desc="input file for computing the average timeseries", + argstr="-i %s", position=0, - mandatory=True) + mandatory=True, + ) out_file = File( - desc='name of output text matrix', - argstr='-o %s', + desc="name of output text matrix", + argstr="-o %s", genfile=True, - hash_files=False) - mask = File(exists=True, desc='input 3D mask', argstr='-m %s') + hash_files=False, + ) + mask = File(exists=True, desc="input 3D mask", argstr="-m %s") spatial_coord = traits.List( traits.Int, - desc=(' requested spatial coordinate ' - '(instead of mask)'), - argstr='-c %s') + desc=(" requested spatial coordinate " "(instead of mask)"), + argstr="-c %s", + ) use_mm = traits.Bool( - desc=('use mm instead of voxel coordinates (for -c ' - 'option)'), - argstr='--usemm') + desc=("use mm instead of voxel coordinates (for -c " "option)"), + argstr="--usemm", + ) show_all = traits.Bool( - desc=('show all voxel time series (within mask) ' - 'instead of averaging'), - argstr='--showall') + desc=("show all voxel time series (within mask) " "instead of averaging"), + argstr="--showall", + ) eig = traits.Bool( - desc=('calculate Eigenvariate(s) instead of mean (output will have 0 ' - 'mean)'), - argstr='--eig') + desc=("calculate Eigenvariate(s) instead of mean (output will have 0 " "mean)"), + argstr="--eig", + ) order = traits.Int( - 1, - desc='select number of Eigenvariates', - argstr='--order=%d', - usedefault=True) + 1, desc="select number of Eigenvariates", argstr="--order=%d", usedefault=True + ) nobin = traits.Bool( - desc=('do not binarise the mask for calculation of ' - 'Eigenvariates'), - argstr='--no_bin') + desc=("do not binarise the mask for calculation of " "Eigenvariates"), + argstr="--no_bin", + ) transpose = traits.Bool( - desc=('output results in transpose format (one row per voxel/mean)'), - argstr='--transpose') + desc=("output results in transpose format (one row per voxel/mean)"), + argstr="--transpose", + ) class ImageMeantsOutputSpec(TraitedSpec): @@ -163,21 +166,23 @@ class ImageMeants(FSLCommand): in the mask (or all voxels in the image if no mask is specified) """ - _cmd = 'fslmeants' + + _cmd = "fslmeants" input_spec = ImageMeantsInputSpec output_spec = ImageMeantsOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_file - if not isdefined(outputs['out_file']): - outputs['out_file'] = self._gen_fname( - self.inputs.in_file, suffix='_ts', ext='.txt', change_ext=True) - outputs['out_file'] = os.path.abspath(outputs['out_file']) + outputs["out_file"] = self.inputs.out_file + if not isdefined(outputs["out_file"]): + outputs["out_file"] = self._gen_fname( + self.inputs.in_file, suffix="_ts", ext=".txt", change_ext=True + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._list_outputs()[name] return None @@ -187,22 +192,24 @@ class SmoothInputSpec(FSLCommandInputSpec): sigma = traits.Float( argstr="-kernel gauss %.03f -fmean", position=1, - xor=['fwhm'], + xor=["fwhm"], mandatory=True, - desc='gaussian kernel sigma in mm (not voxels)') + desc="gaussian kernel sigma in mm (not voxels)", + ) fwhm = traits.Float( argstr="-kernel gauss %.03f -fmean", position=1, - xor=['sigma'], + xor=["sigma"], mandatory=True, - desc=('gaussian kernel fwhm, will be converted to sigma in mm ' - '(not voxels)')) + desc=("gaussian kernel fwhm, will be converted to sigma in mm " "(not voxels)"), + ) smoothed_file = File( argstr="%s", position=2, - name_source=['in_file'], - name_template='%s_smooth', - hash_files=False) + name_source=["in_file"], + name_template="%s_smooth", + hash_files=False, + ) class SmoothOutputSpec(TraitedSpec): @@ -249,18 +256,24 @@ class Smooth(FSLCommand): input_spec = SmoothInputSpec output_spec = SmoothOutputSpec - _cmd = 'fslmaths' + _cmd = "fslmaths" def _format_arg(self, name, trait_spec, value): - if name == 'fwhm': + if name == "fwhm": sigma = float(value) / np.sqrt(8 * np.log(2)) return super(Smooth, self)._format_arg(name, trait_spec, sigma) return super(Smooth, self)._format_arg(name, trait_spec, value) class SliceInputSpec(FSLCommandInputSpec): - in_file = File(exists=True, argstr="%s", position=0, mandatory=True, - desc="input filename", copyfile=False) + in_file = File( + exists=True, + argstr="%s", + position=0, + mandatory=True, + desc="input filename", + copyfile=False, + ) out_base_name = traits.Str(argstr="%s", position=1, desc="outputs prefix") @@ -285,7 +298,7 @@ class Slice(FSLCommand): """ - _cmd = 'fslslice' + _cmd = "fslslice" input_spec = SliceInputSpec output_spec = SliceOutputSpec @@ -307,44 +320,50 @@ def _list_outputs(self): """ outputs = self._outputs().get() ext = Info.output_type_to_ext(self.inputs.output_type) - suffix = '_slice_*' + ext + suffix = "_slice_*" + ext if isdefined(self.inputs.out_base_name): - fname_template = os.path.abspath( - self.inputs.out_base_name + suffix) + fname_template = os.path.abspath(self.inputs.out_base_name + suffix) else: - fname_template = fname_presuffix(self.inputs.in_file, - suffix=suffix, use_ext=False) + fname_template = fname_presuffix( + self.inputs.in_file, suffix=suffix, use_ext=False + ) - outputs['out_files'] = sorted(glob(fname_template)) + outputs["out_files"] = sorted(glob(fname_template)) return outputs class MergeInputSpec(FSLCommandInputSpec): - in_files = traits.List( - File(exists=True), argstr="%s", position=2, mandatory=True) + in_files = traits.List(File(exists=True), argstr="%s", position=2, mandatory=True) dimension = traits.Enum( - 't', - 'x', - 'y', - 'z', - 'a', + "t", + "x", + "y", + "z", + "a", argstr="-%s", position=0, - desc=("dimension along which to merge, optionally " - "set tr input when dimension is t"), - mandatory=True) + desc=( + "dimension along which to merge, optionally " + "set tr input when dimension is t" + ), + mandatory=True, + ) tr = traits.Float( position=-1, - argstr='%.2f', - desc=('use to specify TR in seconds (default is 1.00 ' - 'sec), overrides dimension and sets it to tr')) + argstr="%.2f", + desc=( + "use to specify TR in seconds (default is 1.00 " + "sec), overrides dimension and sets it to tr" + ), + ) merged_file = File( argstr="%s", position=1, - name_source='in_files', - name_template='%s_merged', - hash_files=False) + name_source="in_files", + name_template="%s_merged", + hash_files=False, + ) class MergeOutputSpec(TraitedSpec): @@ -378,35 +397,29 @@ class Merge(FSLCommand): """ - _cmd = 'fslmerge' + _cmd = "fslmerge" input_spec = MergeInputSpec output_spec = MergeOutputSpec def _format_arg(self, name, spec, value): - if name == 'tr': - if self.inputs.dimension != 't': - raise ValueError('When TR is specified, dimension must be t') + if name == "tr": + if self.inputs.dimension != "t": + raise ValueError("When TR is specified, dimension must be t") return spec.argstr % value - if name == 'dimension': + if name == "dimension": if isdefined(self.inputs.tr): - return '-tr' + return "-tr" return spec.argstr % value return super(Merge, self)._format_arg(name, spec, value) class ExtractROIInputSpec(FSLCommandInputSpec): in_file = File( - exists=True, - argstr="%s", - position=0, - desc="input file", - mandatory=True) + exists=True, argstr="%s", position=0, desc="input file", mandatory=True + ) roi_file = File( - argstr="%s", - position=1, - desc="output file", - genfile=True, - hash_files=False) + argstr="%s", position=1, desc="output file", genfile=True, hash_files=False + ) x_min = traits.Int(argstr="%d", position=2) x_size = traits.Int(argstr="%d", position=3) y_min = traits.Int(argstr="%d", position=4) @@ -416,15 +429,22 @@ class ExtractROIInputSpec(FSLCommandInputSpec): t_min = traits.Int(argstr="%d", position=8) t_size = traits.Int(argstr="%d", position=9) _crop_xor = [ - 'x_min', 'x_size', 'y_min', 'y_size', 'z_min', 'z_size', 't_min', - 't_size' + "x_min", + "x_size", + "y_min", + "y_size", + "z_min", + "z_size", + "t_min", + "t_size", ] crop_list = traits.List( traits.Tuple(traits.Int, traits.Int), argstr="%s", position=2, xor=_crop_xor, - desc="list of two tuples specifying crop options") + desc="list of two tuples specifying crop options", + ) class ExtractROIOutputSpec(TraitedSpec): @@ -457,7 +477,7 @@ class ExtractROI(FSLCommand): """ - _cmd = 'fslroi' + _cmd = "fslroi" input_spec = ExtractROIInputSpec output_spec = ExtractROIOutputSpec @@ -485,36 +505,33 @@ def _list_outputs(self): """ outputs = self._outputs().get() - outputs['roi_file'] = self.inputs.roi_file - if not isdefined(outputs['roi_file']): - outputs['roi_file'] = self._gen_fname( - self.inputs.in_file, suffix='_roi') - outputs['roi_file'] = os.path.abspath(outputs['roi_file']) + outputs["roi_file"] = self.inputs.roi_file + if not isdefined(outputs["roi_file"]): + outputs["roi_file"] = self._gen_fname(self.inputs.in_file, suffix="_roi") + outputs["roi_file"] = os.path.abspath(outputs["roi_file"]) return outputs def _gen_filename(self, name): - if name == 'roi_file': + if name == "roi_file": return self._list_outputs()[name] return None class SplitInputSpec(FSLCommandInputSpec): in_file = File( - exists=True, - argstr="%s", - position=0, - mandatory=True, - desc="input filename") + exists=True, argstr="%s", position=0, mandatory=True, desc="input filename" + ) out_base_name = traits.Str(argstr="%s", position=1, desc="outputs prefix") dimension = traits.Enum( - 't', - 'x', - 'y', - 'z', + "t", + "x", + "y", + "z", argstr="-%s", position=2, mandatory=True, - desc="dimension along which the file will be split") + desc="dimension along which the file will be split", + ) class SplitOutputSpec(TraitedSpec): @@ -525,7 +542,8 @@ class Split(FSLCommand): """Uses FSL Fslsplit command to separate a volume into images in time, x, y or z dimension. """ - _cmd = 'fslsplit' + + _cmd = "fslsplit" input_spec = SplitInputSpec output_spec = SplitOutputSpec @@ -547,36 +565,37 @@ def _list_outputs(self): """ outputs = self._outputs().get() ext = Info.output_type_to_ext(self.inputs.output_type) - outbase = 'vol[0-9]*' + outbase = "vol[0-9]*" if isdefined(self.inputs.out_base_name): - outbase = '%s[0-9]*' % self.inputs.out_base_name - outputs['out_files'] = sorted( - glob(os.path.join(os.getcwd(), outbase + ext))) + outbase = "%s[0-9]*" % self.inputs.out_base_name + outputs["out_files"] = sorted(glob(os.path.join(os.getcwd(), outbase + ext))) return outputs class ImageMathsInputSpec(FSLCommandInputSpec): in_file = File(exists=True, argstr="%s", mandatory=True, position=1) in_file2 = File(exists=True, argstr="%s", position=3) - mask_file = File(exists=True, argstr='-mas %s', - desc='use (following image>0) to mask current image') + mask_file = File( + exists=True, + argstr="-mas %s", + desc="use (following image>0) to mask current image", + ) out_file = File(argstr="%s", position=-2, genfile=True, hash_files=False) op_string = traits.Str( - argstr="%s", - position=2, - desc="string defining the operation, i. e. -add") + argstr="%s", position=2, desc="string defining the operation, i. e. -add" + ) suffix = traits.Str(desc="out_file suffix") out_data_type = traits.Enum( - 'char', - 'short', - 'int', - 'float', - 'double', - 'input', + "char", + "short", + "int", + "float", + "double", + "input", argstr="-odt %s", position=-1, - desc=("output datatype, one of (char, short, " - "int, float, double, input)")) + desc=("output datatype, one of (char, short, " "int, float, double, input)"), + ) class ImageMathsOutputSpec(TraitedSpec): @@ -600,29 +619,29 @@ class ImageMaths(FSLCommand): """ + input_spec = ImageMathsInputSpec output_spec = ImageMathsOutputSpec - _cmd = 'fslmaths' + _cmd = "fslmaths" def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._list_outputs()[name] return None def _parse_inputs(self, skip=None): - return super(ImageMaths, self)._parse_inputs(skip=['suffix']) + return super(ImageMaths, self)._parse_inputs(skip=["suffix"]) def _list_outputs(self): - suffix = '_maths' # ohinds: build suffix + suffix = "_maths" # ohinds: build suffix if isdefined(self.inputs.suffix): suffix = self.inputs.suffix outputs = self._outputs().get() - outputs['out_file'] = self.inputs.out_file - if not isdefined(outputs['out_file']): - outputs['out_file'] = self._gen_fname( - self.inputs.in_file, suffix=suffix) - outputs['out_file'] = os.path.abspath(outputs['out_file']) + outputs["out_file"] = self.inputs.out_file + if not isdefined(outputs["out_file"]): + outputs["out_file"] = self._gen_fname(self.inputs.in_file, suffix=suffix) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs @@ -632,41 +651,46 @@ class FilterRegressorInputSpec(FSLCommandInputSpec): argstr="-i %s", desc="input file name (4D image)", mandatory=True, - position=1) + position=1, + ) out_file = File( argstr="-o %s", desc="output file name for the filtered data", genfile=True, position=2, - hash_files=False) + hash_files=False, + ) design_file = File( exists=True, argstr="-d %s", position=3, mandatory=True, - desc=("name of the matrix with time courses (e.g. GLM " - "design or MELODIC mixing matrix)")) + desc=( + "name of the matrix with time courses (e.g. GLM " + "design or MELODIC mixing matrix)" + ), + ) filter_columns = traits.List( traits.Int, argstr="-f '%s'", xor=["filter_all"], mandatory=True, position=4, - desc=("(1-based) column indices to filter out of the data")) + desc=("(1-based) column indices to filter out of the data"), + ) filter_all = traits.Bool( mandatory=True, argstr="-f '%s'", xor=["filter_columns"], position=4, - desc=("use all columns in the design file in " - "denoising")) + desc=("use all columns in the design file in " "denoising"), + ) mask = File(exists=True, argstr="-m %s", desc="mask image file name") - var_norm = traits.Bool( - argstr="--vn", desc="perform variance-normalization on data") + var_norm = traits.Bool(argstr="--vn", desc="perform variance-normalization on data") out_vnscales = traits.Bool( argstr="--out_vnscales", - desc=("output scaling factors for variance " - "normalization")) + desc=("output scaling factors for variance " "normalization"), + ) class FilterRegressorOutputSpec(TraitedSpec): @@ -678,12 +702,13 @@ class FilterRegressor(FSLCommand): Uses simple OLS regression on 4D images """ + input_spec = FilterRegressorInputSpec output_spec = FilterRegressorOutputSpec - _cmd = 'fsl_regfilt' + _cmd = "fsl_regfilt" def _format_arg(self, name, trait_spec, value): - if name == 'filter_columns': + if name == "filter_columns": return trait_spec.argstr % ",".join(map(str, value)) elif name == "filter_all": design = np.loadtxt(self.inputs.design_file) @@ -691,59 +716,62 @@ def _format_arg(self, name, trait_spec, value): n_cols = design.shape[1] except IndexError: n_cols = 1 - return trait_spec.argstr % ",".join( - map(str, list(range(1, n_cols + 1)))) - return super(FilterRegressor, self)._format_arg( - name, trait_spec, value) + return trait_spec.argstr % ",".join(map(str, list(range(1, n_cols + 1)))) + return super(FilterRegressor, self)._format_arg(name, trait_spec, value) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_file - if not isdefined(outputs['out_file']): - outputs['out_file'] = self._gen_fname( - self.inputs.in_file, suffix='_regfilt') - outputs['out_file'] = os.path.abspath(outputs['out_file']) + outputs["out_file"] = self.inputs.out_file + if not isdefined(outputs["out_file"]): + outputs["out_file"] = self._gen_fname( + self.inputs.in_file, suffix="_regfilt" + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._list_outputs()[name] return None class ImageStatsInputSpec(FSLCommandInputSpec): split_4d = traits.Bool( - argstr='-t', + argstr="-t", position=1, - desc=('give a separate output line for each 3D ' - 'volume of a 4D timeseries')) + desc=("give a separate output line for each 3D " "volume of a 4D timeseries"), + ) in_file = File( exists=True, argstr="%s", mandatory=True, position=3, - desc='input file to generate stats of') + desc="input file to generate stats of", + ) op_string = traits.Str( argstr="%s", mandatory=True, position=4, - desc=("string defining the operation, options are " - "applied in order, e.g. -M -l 10 -M will " - "report the non-zero mean, apply a threshold " - "and then report the new nonzero mean")) - mask_file = File( - exists=True, argstr="", desc='mask file used for option -k %s') + desc=( + "string defining the operation, options are " + "applied in order, e.g. -M -l 10 -M will " + "report the non-zero mean, apply a threshold " + "and then report the new nonzero mean" + ), + ) + mask_file = File(exists=True, argstr="", desc="mask file used for option -k %s") index_mask_file = File( exists=True, argstr="-K %s", position=2, desc="generate seperate n submasks from indexMask, " - "for indexvalues 1..n where n is the maximum index " - "value in indexMask, and generate statistics for each submask") + "for indexvalues 1..n where n is the maximum index " + "value in indexMask, and generate statistics for each submask", + ) class ImageStatsOutputSpec(TraitedSpec): - out_stat = traits.Any(desc='stats output') + out_stat = traits.Any(desc="stats output") class ImageStats(FSLCommand): @@ -763,35 +791,35 @@ class ImageStats(FSLCommand): """ + input_spec = ImageStatsInputSpec output_spec = ImageStatsOutputSpec - _cmd = 'fslstats' + _cmd = "fslstats" def _format_arg(self, name, trait_spec, value): - if name == 'mask_file': - return '' - if name == 'op_string': - if '-k %s' in self.inputs.op_string: + if name == "mask_file": + return "" + if name == "op_string": + if "-k %s" in self.inputs.op_string: if isdefined(self.inputs.mask_file): return self.inputs.op_string % self.inputs.mask_file else: - raise ValueError( - '-k %s option in op_string requires mask_file') + raise ValueError("-k %s option in op_string requires mask_file") return super(ImageStats, self)._format_arg(name, trait_spec, value) def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() # local caching for backward compatibility - outfile = os.path.join(os.getcwd(), 'stat_result.json') + outfile = os.path.join(os.getcwd(), "stat_result.json") if runtime is None: try: - out_stat = load_json(outfile)['stat'] + out_stat = load_json(outfile)["stat"] except IOError: return self.run().outputs else: out_stat = [] - for line in runtime.stdout.split('\n'): + for line in runtime.stdout.split("\n"): if line: values = line.split() if len(values) > 1: @@ -806,31 +834,35 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): class AvScaleInputSpec(CommandLineInputSpec): - all_param = traits.Bool(False, argstr='--allparams') - mat_file = File( - exists=True, argstr='%s', desc='mat file to read', position=-2) + all_param = traits.Bool(False, argstr="--allparams") + mat_file = File(exists=True, argstr="%s", desc="mat file to read", position=-2) ref_file = File( exists=True, - argstr='%s', + argstr="%s", position=-1, - desc='reference file to get center of rotation') + desc="reference file to get center of rotation", + ) class AvScaleOutputSpec(TraitedSpec): rotation_translation_matrix = traits.List( - traits.List(traits.Float), desc='Rotation and Translation Matrix') - scales = traits.List(traits.Float, desc='Scales (x,y,z)') - skews = traits.List(traits.Float, desc='Skews') - average_scaling = traits.Float(desc='Average Scaling') - determinant = traits.Float(desc='Determinant') + traits.List(traits.Float), desc="Rotation and Translation Matrix" + ) + scales = traits.List(traits.Float, desc="Scales (x,y,z)") + skews = traits.List(traits.Float, desc="Skews") + average_scaling = traits.Float(desc="Average Scaling") + determinant = traits.Float(desc="Determinant") forward_half_transform = traits.List( - traits.List(traits.Float), desc='Forward Half Transform') + traits.List(traits.Float), desc="Forward Half Transform" + ) backward_half_transform = traits.List( - traits.List(traits.Float), desc='Backwards Half Transform') + traits.List(traits.Float), desc="Backwards Half Transform" + ) left_right_orientation_preserved = traits.Bool( - desc='True if LR orientation preserved') - rot_angles = traits.List(traits.Float, desc='rotation angles') - translations = traits.List(traits.Float, desc='translations') + desc="True if LR orientation preserved" + ) + rot_angles = traits.List(traits.Float, desc="rotation angles") + translations = traits.List(traits.Float, desc="translations") class AvScale(CommandLine): @@ -845,56 +877,60 @@ class AvScale(CommandLine): """ + input_spec = AvScaleInputSpec output_spec = AvScaleOutputSpec - _cmd = 'avscale' + _cmd = "avscale" def _run_interface(self, runtime): runtime = super(AvScale, self)._run_interface(runtime) expr = re.compile( - r'Rotation & Translation Matrix:\n(?P[0-9\. \n-]+)[\s\n]*' - r'(Rotation Angles \(x,y,z\) \[rads\] = (?P[0-9\. -]+))?[\s\n]*' - r'(Translations \(x,y,z\) \[mm\] = (?P[0-9\. -]+))?[\s\n]*' - r'Scales \(x,y,z\) = (?P[0-9\. -]+)[\s\n]*' - r'Skews \(xy,xz,yz\) = (?P[0-9\. -]+)[\s\n]*' - r'Average scaling = (?P[0-9\.-]+)[\s\n]*' - r'Determinant = (?P[0-9\.-]+)[\s\n]*' - r'Left-Right orientation: (?P[A-Za-z]+)[\s\n]*' - r'Forward half transform =[\s]*\n' - r'(?P[0-9\. \n-]+)[\s\n]*' - r'Backward half transform =[\s]*\n' - r'(?P[0-9\. \n-]+)[\s\n]*') + r"Rotation & Translation Matrix:\n(?P[0-9\. \n-]+)[\s\n]*" + r"(Rotation Angles \(x,y,z\) \[rads\] = (?P[0-9\. -]+))?[\s\n]*" + r"(Translations \(x,y,z\) \[mm\] = (?P[0-9\. -]+))?[\s\n]*" + r"Scales \(x,y,z\) = (?P[0-9\. -]+)[\s\n]*" + r"Skews \(xy,xz,yz\) = (?P[0-9\. -]+)[\s\n]*" + r"Average scaling = (?P[0-9\.-]+)[\s\n]*" + r"Determinant = (?P[0-9\.-]+)[\s\n]*" + r"Left-Right orientation: (?P[A-Za-z]+)[\s\n]*" + r"Forward half transform =[\s]*\n" + r"(?P[0-9\. \n-]+)[\s\n]*" + r"Backward half transform =[\s]*\n" + r"(?P[0-9\. \n-]+)[\s\n]*" + ) out = expr.search(runtime.stdout).groupdict() outputs = {} - outputs['rotation_translation_matrix'] = [[ - float(v) for v in r.strip().split(' ') - ] for r in out['rot_tran_mat'].strip().split('\n')] - outputs['scales'] = [ - float(s) for s in out['scales'].strip().split(' ') + outputs["rotation_translation_matrix"] = [ + [float(v) for v in r.strip().split(" ")] + for r in out["rot_tran_mat"].strip().split("\n") + ] + outputs["scales"] = [float(s) for s in out["scales"].strip().split(" ")] + outputs["skews"] = [float(s) for s in out["skews"].strip().split(" ")] + outputs["average_scaling"] = float(out["avg_scaling"].strip()) + outputs["determinant"] = float(out["determinant"].strip()) + outputs["left_right_orientation_preserved"] = ( + out["lr_orientation"].strip() == "preserved" + ) + outputs["forward_half_transform"] = [ + [float(v) for v in r.strip().split(" ")] + for r in out["fwd_half_xfm"].strip().split("\n") + ] + outputs["backward_half_transform"] = [ + [float(v) for v in r.strip().split(" ")] + for r in out["bwd_half_xfm"].strip().split("\n") ] - outputs['skews'] = [float(s) for s in out['skews'].strip().split(' ')] - outputs['average_scaling'] = float(out['avg_scaling'].strip()) - outputs['determinant'] = float(out['determinant'].strip()) - outputs['left_right_orientation_preserved'] = out[ - 'lr_orientation'].strip() == 'preserved' - outputs['forward_half_transform'] = [[ - float(v) for v in r.strip().split(' ') - ] for r in out['fwd_half_xfm'].strip().split('\n')] - outputs['backward_half_transform'] = [[ - float(v) for v in r.strip().split(' ') - ] for r in out['bwd_half_xfm'].strip().split('\n')] if self.inputs.all_param: - outputs['rot_angles'] = [ - float(r) for r in out['rot_angles'].strip().split(' ') + outputs["rot_angles"] = [ + float(r) for r in out["rot_angles"].strip().split(" ") ] - outputs['translations'] = [ - float(r) for r in out['translations'].strip().split(' ') + outputs["translations"] = [ + float(r) for r in out["translations"].strip().split(" ") ] - setattr(self, '_results', outputs) + setattr(self, "_results", outputs) return runtime def _list_outputs(self): @@ -903,90 +939,100 @@ def _list_outputs(self): class OverlayInputSpec(FSLCommandInputSpec): transparency = traits.Bool( - desc='make overlay colors semi-transparent', + desc="make overlay colors semi-transparent", position=1, - argstr='%s', + argstr="%s", usedefault=True, - default_value=True) + default_value=True, + ) out_type = traits.Enum( - 'float', - 'int', + "float", + "int", position=2, usedefault=True, - argstr='%s', - desc='write output with float or int') + argstr="%s", + desc="write output with float or int", + ) use_checkerboard = traits.Bool( - desc='use checkerboard mask for overlay', argstr='-c', position=3) + desc="use checkerboard mask for overlay", argstr="-c", position=3 + ) background_image = File( exists=True, position=4, mandatory=True, - argstr='%s', - desc='image to use as background') - _xor_inputs = ('auto_thresh_bg', 'full_bg_range', 'bg_thresh') + argstr="%s", + desc="image to use as background", + ) + _xor_inputs = ("auto_thresh_bg", "full_bg_range", "bg_thresh") auto_thresh_bg = traits.Bool( - desc=('automatically threshold the background image'), - argstr='-a', + desc=("automatically threshold the background image"), + argstr="-a", position=5, xor=_xor_inputs, - mandatory=True) + mandatory=True, + ) full_bg_range = traits.Bool( - desc='use full range of background image', - argstr='-A', + desc="use full range of background image", + argstr="-A", position=5, xor=_xor_inputs, - mandatory=True) + mandatory=True, + ) bg_thresh = traits.Tuple( traits.Float, traits.Float, - argstr='%.3f %.3f', + argstr="%.3f %.3f", position=5, - desc='min and max values for background intensity', + desc="min and max values for background intensity", xor=_xor_inputs, - mandatory=True) + mandatory=True, + ) stat_image = File( exists=True, position=6, mandatory=True, - argstr='%s', - desc='statistical image to overlay in color') + argstr="%s", + desc="statistical image to overlay in color", + ) stat_thresh = traits.Tuple( traits.Float, traits.Float, position=7, mandatory=True, - argstr='%.2f %.2f', - desc=('min and max values for the statistical ' - 'overlay')) + argstr="%.2f %.2f", + desc=("min and max values for the statistical " "overlay"), + ) show_negative_stats = traits.Bool( - desc=('display negative statistics in ' - 'overlay'), - xor=['stat_image2'], - argstr='%s', - position=8) + desc=("display negative statistics in " "overlay"), + xor=["stat_image2"], + argstr="%s", + position=8, + ) stat_image2 = File( exists=True, position=9, - xor=['show_negative_stats'], - argstr='%s', - desc='second statistical image to overlay in color') + xor=["show_negative_stats"], + argstr="%s", + desc="second statistical image to overlay in color", + ) stat_thresh2 = traits.Tuple( traits.Float, traits.Float, position=10, - desc=('min and max values for second ' - 'statistical overlay'), - argstr='%.2f %.2f') + desc=("min and max values for second " "statistical overlay"), + argstr="%.2f %.2f", + ) out_file = File( - desc='combined image volume', + desc="combined image volume", position=-1, - argstr='%s', + argstr="%s", genfile=True, - hash_files=False) + hash_files=False, + ) class OverlayOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='combined image volume') + out_file = File(exists=True, desc="combined image volume") class Overlay(FSLCommand): @@ -1008,25 +1054,28 @@ class Overlay(FSLCommand): """ - _cmd = 'overlay' + + _cmd = "overlay" input_spec = OverlayInputSpec output_spec = OverlayOutputSpec def _format_arg(self, name, spec, value): - if name == 'transparency': + if name == "transparency": if value: - return '1' + return "1" else: - return '0' - if name == 'out_type': - if value == 'float': - return '0' + return "0" + if name == "out_type": + if value == "float": + return "0" else: - return '1' - if name == 'show_negative_stats': - return '%s %.2f %.2f' % (self.inputs.stat_image, - self.inputs.stat_thresh[0] * -1, - self.inputs.stat_thresh[1] * -1) + return "1" + if name == "show_negative_stats": + return "%s %.2f %.2f" % ( + self.inputs.stat_image, + self.inputs.stat_thresh[0] * -1, + self.inputs.stat_thresh[1] * -1, + ) return super(Overlay, self)._format_arg(name, spec, value) def _list_outputs(self): @@ -1034,117 +1083,122 @@ def _list_outputs(self): out_file = self.inputs.out_file if not isdefined(out_file): if isdefined(self.inputs.stat_image2) and ( - not isdefined(self.inputs.show_negative_stats) - or not self.inputs.show_negative_stats): + not isdefined(self.inputs.show_negative_stats) + or not self.inputs.show_negative_stats + ): stem = "%s_and_%s" % ( split_filename(self.inputs.stat_image)[1], - split_filename(self.inputs.stat_image2)[1]) + split_filename(self.inputs.stat_image2)[1], + ) else: stem = split_filename(self.inputs.stat_image)[1] - out_file = self._gen_fname(stem, suffix='_overlay') - outputs['out_file'] = os.path.abspath(out_file) + out_file = self._gen_fname(stem, suffix="_overlay") + outputs["out_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): - if name == 'out_file': - return self._list_outputs()['out_file'] + if name == "out_file": + return self._list_outputs()["out_file"] return None class SlicerInputSpec(FSLCommandInputSpec): in_file = File( - exists=True, - position=1, - argstr='%s', - mandatory=True, - desc='input volume') + exists=True, position=1, argstr="%s", mandatory=True, desc="input volume" + ) image_edges = File( exists=True, position=2, - argstr='%s', - desc=('volume to display edge overlay for (useful for ' - 'checking registration')) + argstr="%s", + desc=( + "volume to display edge overlay for (useful for " "checking registration" + ), + ) label_slices = traits.Bool( position=3, - argstr='-L', - desc='display slice number', + argstr="-L", + desc="display slice number", usedefault=True, - default_value=True) + default_value=True, + ) colour_map = File( exists=True, position=4, - argstr='-l %s', - desc=('use different colour map from that stored in ' - 'nifti header')) + argstr="-l %s", + desc=("use different colour map from that stored in " "nifti header"), + ) intensity_range = traits.Tuple( traits.Float, traits.Float, position=5, - argstr='-i %.3f %.3f', - desc='min and max intensities to display') + argstr="-i %.3f %.3f", + desc="min and max intensities to display", + ) threshold_edges = traits.Float( - position=6, argstr='-e %.3f', desc='use threshold for edges') + position=6, argstr="-e %.3f", desc="use threshold for edges" + ) dither_edges = traits.Bool( - position=7, - argstr='-t', - desc=('produce semi-transparent (dithered) ' - 'edges')) + position=7, argstr="-t", desc=("produce semi-transparent (dithered) " "edges") + ) nearest_neighbour = traits.Bool( position=8, - argstr='-n', - desc=('use nearest neighbor interpolation ' - 'for output')) + argstr="-n", + desc=("use nearest neighbor interpolation " "for output"), + ) show_orientation = traits.Bool( position=9, - argstr='%s', + argstr="%s", usedefault=True, default_value=True, - desc='label left-right orientation') - _xor_options = ('single_slice', 'middle_slices', 'all_axial', - 'sample_axial') + desc="label left-right orientation", + ) + _xor_options = ("single_slice", "middle_slices", "all_axial", "sample_axial") single_slice = traits.Enum( - 'x', - 'y', - 'z', + "x", + "y", + "z", position=10, - argstr='-%s', + argstr="-%s", xor=_xor_options, - requires=['slice_number'], - desc=('output picture of single slice in the x, y, or z plane')) + requires=["slice_number"], + desc=("output picture of single slice in the x, y, or z plane"), + ) slice_number = traits.Int( - position=11, argstr='-%d', desc='slice number to save in picture') + position=11, argstr="-%d", desc="slice number to save in picture" + ) middle_slices = traits.Bool( position=10, - argstr='-a', + argstr="-a", xor=_xor_options, - desc=('output picture of mid-sagittal, axial, ' - 'and coronal slices')) + desc=("output picture of mid-sagittal, axial, " "and coronal slices"), + ) all_axial = traits.Bool( position=10, - argstr='-A', + argstr="-A", xor=_xor_options, - requires=['image_width'], - desc='output all axial slices into one picture') + requires=["image_width"], + desc="output all axial slices into one picture", + ) sample_axial = traits.Int( position=10, - argstr='-S %d', + argstr="-S %d", xor=_xor_options, - requires=['image_width'], - desc=('output every n axial slices into one ' - 'picture')) - image_width = traits.Int( - position=-2, argstr='%d', desc='max picture width') + requires=["image_width"], + desc=("output every n axial slices into one " "picture"), + ) + image_width = traits.Int(position=-2, argstr="%d", desc="max picture width") out_file = File( position=-1, genfile=True, - argstr='%s', - desc='picture to write', - hash_files=False) - scaling = traits.Float(position=0, argstr='-s %f', desc='image scale') + argstr="%s", + desc="picture to write", + hash_files=False, + ) + scaling = traits.Float(position=0, argstr="-s %f", desc="image scale") class SlicerOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='picture to write') + out_file = File(exists=True, desc="picture to write") class Slicer(FSLCommand): @@ -1164,34 +1218,35 @@ class Slicer(FSLCommand): """ - _cmd = 'slicer' + + _cmd = "slicer" input_spec = SlicerInputSpec output_spec = SlicerOutputSpec def _format_arg(self, name, spec, value): - if name == 'show_orientation': + if name == "show_orientation": if value: - return '' + return "" else: - return '-u' + return "-u" elif name == "label_slices": if value: - return '-L' + return "-L" else: - return '' + return "" return super(Slicer, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.out_file if not isdefined(out_file): - out_file = self._gen_fname(self.inputs.in_file, ext='.png') - outputs['out_file'] = os.path.abspath(out_file) + out_file = self._gen_fname(self.inputs.in_file, ext=".png") + outputs["out_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): - if name == 'out_file': - return self._list_outputs()['out_file'] + if name == "out_file": + return self._list_outputs()["out_file"] return None @@ -1203,61 +1258,58 @@ class PlotTimeSeriesInputSpec(FSLCommandInputSpec): mandatory=True, argstr="%s", position=1, - desc=("file or list of files with columns of " - "timecourse information")) + desc=("file or list of files with columns of " "timecourse information"), + ) plot_start = traits.Int( argstr="--start=%d", - xor=("plot_range", ), - desc="first column from in-file to plot") + xor=("plot_range",), + desc="first column from in-file to plot", + ) plot_finish = traits.Int( argstr="--finish=%d", - xor=("plot_range", ), - desc="final column from in-file to plot") + xor=("plot_range",), + desc="final column from in-file to plot", + ) plot_range = traits.Tuple( traits.Int, traits.Int, argstr="%s", xor=("plot_start", "plot_finish"), - desc=("first and last columns from the in-file " - "to plot")) + desc=("first and last columns from the in-file " "to plot"), + ) title = traits.Str(argstr="%s", desc="plot title") legend_file = File(exists=True, argstr="--legend=%s", desc="legend file") labels = traits.Either( - traits.Str, - traits.List(traits.Str), - argstr="%s", - desc="label or list of labels") - y_min = traits.Float( - argstr="--ymin=%.2f", desc="minumum y value", xor=("y_range", )) - y_max = traits.Float( - argstr="--ymax=%.2f", desc="maximum y value", xor=("y_range", )) + traits.Str, traits.List(traits.Str), argstr="%s", desc="label or list of labels" + ) + y_min = traits.Float(argstr="--ymin=%.2f", desc="minumum y value", xor=("y_range",)) + y_max = traits.Float(argstr="--ymax=%.2f", desc="maximum y value", xor=("y_range",)) y_range = traits.Tuple( traits.Float, traits.Float, argstr="%s", xor=("y_min", "y_max"), - desc="min and max y axis values") + desc="min and max y axis values", + ) x_units = traits.Int( argstr="-u %d", usedefault=True, default_value=1, - desc=("scaling units for x-axis (between 1 and length of in file)")) + desc=("scaling units for x-axis (between 1 and length of in file)"), + ) plot_size = traits.Tuple( - traits.Int, - traits.Int, - argstr="%s", - desc="plot image height and width") - x_precision = traits.Int( - argstr="--precision=%d", desc="precision of x-axis labels") - sci_notation = traits.Bool( - argstr="--sci", desc="switch on scientific notation") + traits.Int, traits.Int, argstr="%s", desc="plot image height and width" + ) + x_precision = traits.Int(argstr="--precision=%d", desc="precision of x-axis labels") + sci_notation = traits.Bool(argstr="--sci", desc="switch on scientific notation") out_file = File( - argstr="-o %s", genfile=True, desc="image to write", hash_files=False) + argstr="-o %s", genfile=True, desc="image to write", hash_files=False + ) class PlotTimeSeriesOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='image to write') + out_file = File(exists=True, desc="image to write") class PlotTimeSeries(FSLCommand): @@ -1275,6 +1327,7 @@ class PlotTimeSeries(FSLCommand): """ + _cmd = "fsl_tsplot" input_spec = PlotTimeSeriesInputSpec output_spec = PlotTimeSeriesOutputSpec @@ -1293,7 +1346,7 @@ def _format_arg(self, name, spec, value): else: return "-a %s" % value elif name == "title": - return "-t \'%s\'" % value + return "-t '%s'" % value elif name == "plot_range": return "--start=%d --finish=%d" % value elif name == "y_range": @@ -1310,13 +1363,13 @@ def _list_outputs(self): infile = self.inputs.in_file[0] else: infile = self.inputs.in_file - out_file = self._gen_fname(infile, ext='.png') - outputs['out_file'] = os.path.abspath(out_file) + out_file = self._gen_fname(infile, ext=".png") + outputs["out_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): - if name == 'out_file': - return self._list_outputs()['out_file'] + if name == "out_file": + return self._list_outputs()["out_file"] return None @@ -1328,33 +1381,33 @@ class PlotMotionParamsInputSpec(FSLCommandInputSpec): mandatory=True, argstr="%s", position=1, - desc="file with motion parameters") + desc="file with motion parameters", + ) in_source = traits.Enum( "spm", "fsl", mandatory=True, - desc=("which program generated the motion " - "parameter file - fsl, spm")) + desc=("which program generated the motion " "parameter file - fsl, spm"), + ) plot_type = traits.Enum( "rotations", "translations", "displacement", argstr="%s", mandatory=True, - desc=("which motion type to plot - rotations, " - "translations, displacement")) + desc=("which motion type to plot - rotations, " "translations, displacement"), + ) plot_size = traits.Tuple( - traits.Int, - traits.Int, - argstr="%s", - desc="plot image height and width") + traits.Int, traits.Int, argstr="%s", desc="plot image height and width" + ) out_file = File( - argstr="-o %s", genfile=True, desc="image to write", hash_files=False) + argstr="-o %s", genfile=True, desc="image to write", hash_files=False + ) class PlotMotionParamsOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='image to write') + out_file = File(exists=True, desc="image to write") class PlotMotionParams(FSLCommand): @@ -1384,7 +1437,8 @@ class PlotMotionParams(FSLCommand): more flexibilty, see the :class:`fsl.PlotTimeSeries` interface. """ - _cmd = 'fsl_tsplot' + + _cmd = "fsl_tsplot" input_spec = PlotMotionParamsInputSpec output_spec = PlotMotionParamsOutputSpec @@ -1393,24 +1447,27 @@ def _format_arg(self, name, spec, value): if name == "plot_type": source = self.inputs.in_source - if self.inputs.plot_type == 'displacement': - title = '-t \'MCFLIRT estimated mean displacement (mm)\'' - labels = '-a abs,rel' - return '%s %s' % (title, labels) + if self.inputs.plot_type == "displacement": + title = "-t 'MCFLIRT estimated mean displacement (mm)'" + labels = "-a abs,rel" + return "%s %s" % (title, labels) # Get the right starting and ending position depending on source # package sfdict = dict( - fsl_rot=(1, 3), fsl_tra=(4, 6), spm_rot=(4, 6), spm_tra=(1, 3)) + fsl_rot=(1, 3), fsl_tra=(4, 6), spm_rot=(4, 6), spm_tra=(1, 3) + ) # Format the title properly - sfstr = "--start=%d --finish=%d" % sfdict["%s_%s" % (source, - value[:3])] + sfstr = "--start=%d --finish=%d" % sfdict["%s_%s" % (source, value[:3])] titledict = dict(fsl="MCFLIRT", spm="Realign") unitdict = dict(rot="radians", tra="mm") - title = "\'%s estimated %s (%s)\'" % (titledict[source], value, - unitdict[value[:3]]) + title = "'%s estimated %s (%s)'" % ( + titledict[source], + value, + unitdict[value[:3]], + ) return "-t %s %s -a x,y,z" % (title, sfstr) elif name == "plot_size": @@ -1432,16 +1489,18 @@ def _list_outputs(self): infile = self.inputs.in_file[0] else: infile = self.inputs.in_file - plttype = dict( - rot="rot", tra="trans", dis="disp")[self.inputs.plot_type[:3]] + plttype = dict(rot="rot", tra="trans", dis="disp")[ + self.inputs.plot_type[:3] + ] out_file = fname_presuffix( - infile, suffix="_%s.png" % plttype, use_ext=False) - outputs['out_file'] = os.path.abspath(out_file) + infile, suffix="_%s.png" % plttype, use_ext=False + ) + outputs["out_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): - if name == 'out_file': - return self._list_outputs()['out_file'] + if name == "out_file": + return self._list_outputs()["out_file"] return None @@ -1451,38 +1510,39 @@ class ConvertXFMInputSpec(FSLCommandInputSpec): mandatory=True, argstr="%s", position=-1, - desc="input transformation matrix") + desc="input transformation matrix", + ) in_file2 = File( exists=True, argstr="%s", position=-2, - desc="second input matrix (for use with fix_scale_skew or concat_xfm)") + desc="second input matrix (for use with fix_scale_skew or concat_xfm)", + ) _options = ["invert_xfm", "concat_xfm", "fix_scale_skew"] invert_xfm = traits.Bool( - argstr="-inverse", - position=-3, - xor=_options, - desc="invert input transformation") + argstr="-inverse", position=-3, xor=_options, desc="invert input transformation" + ) concat_xfm = traits.Bool( argstr="-concat", position=-3, xor=_options, requires=["in_file2"], - desc=("write joint transformation of two input " - "matrices")) + desc=("write joint transformation of two input " "matrices"), + ) fix_scale_skew = traits.Bool( argstr="-fixscaleskew", position=-3, xor=_options, requires=["in_file2"], - desc=("use secondary matrix to fix scale and " - "skew")) + desc=("use secondary matrix to fix scale and " "skew"), + ) out_file = File( genfile=True, argstr="-omat %s", position=1, desc="final transformation matrix", - hash_files=False) + hash_files=False, + ) class ConvertXFMOutputSpec(TraitedSpec): @@ -1517,10 +1577,8 @@ def _list_outputs(self): _, infile1, _ = split_filename(self.inputs.in_file) if self.inputs.invert_xfm: outfile = fname_presuffix( - infile1, - suffix="_inv.mat", - newpath=os.getcwd(), - use_ext=False) + infile1, suffix="_inv.mat", newpath=os.getcwd(), use_ext=False + ) else: if self.inputs.concat_xfm: _, infile2, _ = split_filename(self.inputs.in_file2) @@ -1528,13 +1586,12 @@ def _list_outputs(self): "%s_%s" % (infile1, infile2), suffix=".mat", newpath=os.getcwd(), - use_ext=False) + use_ext=False, + ) else: outfile = fname_presuffix( - infile1, - suffix="_fix.mat", - newpath=os.getcwd(), - use_ext=False) + infile1, suffix="_fix.mat", newpath=os.getcwd(), use_ext=False + ) outputs["out_file"] = os.path.abspath(outfile) return outputs @@ -1547,23 +1604,18 @@ def _gen_filename(self, name): class SwapDimensionsInputSpec(FSLCommandInputSpec): in_file = File( - exists=True, - mandatory=True, - argstr="%s", - position="1", - desc="input image") - _dims = [ - "x", "-x", "y", "-y", "z", "-z", "RL", "LR", "AP", "PA", "IS", "SI" - ] + exists=True, mandatory=True, argstr="%s", position="1", desc="input image" + ) + _dims = ["x", "-x", "y", "-y", "z", "-z", "RL", "LR", "AP", "PA", "IS", "SI"] new_dims = traits.Tuple( traits.Enum(_dims), traits.Enum(_dims), traits.Enum(_dims), argstr="%s %s %s", mandatory=True, - desc="3-tuple of new dimension order") - out_file = File( - genfile=True, argstr="%s", desc="image to write", hash_files=False) + desc="3-tuple of new dimension order", + ) + out_file = File(genfile=True, argstr="%s", desc="image to write", hash_files=False) class SwapDimensionsOutputSpec(TraitedSpec): @@ -1580,6 +1632,7 @@ class SwapDimensions(FSLCommand): (RL, LR, AP, PA, IS, SI). """ + _cmd = "fslswapdim" input_spec = SwapDimensionsInputSpec output_spec = SwapDimensionsOutputSpec @@ -1589,7 +1642,8 @@ def _list_outputs(self): outputs["out_file"] = self.inputs.out_file if not isdefined(self.inputs.out_file): outputs["out_file"] = self._gen_fname( - self.inputs.in_file, suffix='_newdims') + self.inputs.in_file, suffix="_newdims" + ) outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs @@ -1605,20 +1659,21 @@ class PowerSpectrumInputSpec(FSLCommandInputSpec): in_file = File( exists=True, desc="input 4D file to estimate the power spectrum", - argstr='%s', + argstr="%s", position=0, - mandatory=True) + mandatory=True, + ) out_file = File( - desc='name of output 4D file for power spectrum', - argstr='%s', + desc="name of output 4D file for power spectrum", + argstr="%s", position=1, genfile=True, - hash_files=False) + hash_files=False, + ) class PowerSpectrumOutputSpec(TraitedSpec): - out_file = File( - exists=True, desc="path/name of the output 4D power spectrum file") + out_file = File(exists=True, desc="path/name of the output 4D power spectrum file") class PowerSpectrum(FSLCommand): @@ -1635,41 +1690,42 @@ class PowerSpectrum(FSLCommand): """ - _cmd = 'fslpspec' + _cmd = "fslpspec" input_spec = PowerSpectrumInputSpec output_spec = PowerSpectrumOutputSpec def _gen_outfilename(self): out_file = self.inputs.out_file if not isdefined(out_file) and isdefined(self.inputs.in_file): - out_file = self._gen_fname(self.inputs.in_file, suffix='_ps') + out_file = self._gen_fname(self.inputs.in_file, suffix="_ps") return out_file def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self._gen_outfilename()) + outputs["out_file"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._gen_outfilename() return None class SigLossInputSpec(FSLCommandInputSpec): - in_file = File( - mandatory=True, exists=True, argstr='-i %s', desc='b0 fieldmap file') + in_file = File(mandatory=True, exists=True, argstr="-i %s", desc="b0 fieldmap file") out_file = File( - argstr='-s %s', desc='output signal loss estimate file', genfile=True) + argstr="-s %s", desc="output signal loss estimate file", genfile=True + ) - mask_file = File(exists=True, argstr='-m %s', desc='brain mask file') - echo_time = traits.Float(argstr='--te=%f', desc='echo time in seconds') + mask_file = File(exists=True, argstr="-m %s", desc="brain mask file") + echo_time = traits.Float(argstr="--te=%f", desc="echo time in seconds") slice_direction = traits.Enum( - 'x', 'y', 'z', argstr='-d %s', desc='slicing direction') + "x", "y", "z", argstr="-d %s", desc="slicing direction" + ) class SigLossOuputSpec(TraitedSpec): - out_file = File(exists=True, desc='signal loss estimate file') + out_file = File(exists=True, desc="signal loss estimate file") class SigLoss(FSLCommand): @@ -1685,22 +1741,23 @@ class SigLoss(FSLCommand): """ + input_spec = SigLossInputSpec output_spec = SigLossOuputSpec - _cmd = 'sigloss' + _cmd = "sigloss" def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_file - if not isdefined(outputs['out_file']) and \ - isdefined(self.inputs.in_file): - outputs['out_file'] = self._gen_fname( - self.inputs.in_file, suffix='_sigloss') + outputs["out_file"] = self.inputs.out_file + if not isdefined(outputs["out_file"]) and isdefined(self.inputs.in_file): + outputs["out_file"] = self._gen_fname( + self.inputs.in_file, suffix="_sigloss" + ) return outputs def _gen_filename(self, name): - if name == 'out_file': - return self._list_outputs()['out_file'] + if name == "out_file": + return self._list_outputs()["out_file"] return None @@ -1727,95 +1784,118 @@ class Reorient2Std(FSLCommand): """ - _cmd = 'fslreorient2std' + + _cmd = "fslreorient2std" input_spec = Reorient2StdInputSpec output_spec = Reorient2StdOutputSpec def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": return self._gen_fname(self.inputs.in_file, suffix="_reoriented") return None def _list_outputs(self): outputs = self.output_spec().get() if not isdefined(self.inputs.out_file): - outputs['out_file'] = self._gen_filename('out_file') + outputs["out_file"] = self._gen_filename("out_file") else: - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class InvWarpInputSpec(FSLCommandInputSpec): warp = File( exists=True, - argstr='--warp=%s', + argstr="--warp=%s", mandatory=True, - desc=('Name of file containing warp-coefficients/fields. This ' - 'would typically be the output from the --cout switch of' - ' fnirt (but can also use fields, like the output from ' - '--fout).')) + desc=( + "Name of file containing warp-coefficients/fields. This " + "would typically be the output from the --cout switch of" + " fnirt (but can also use fields, like the output from " + "--fout)." + ), + ) reference = File( exists=True, - argstr='--ref=%s', + argstr="--ref=%s", mandatory=True, - desc=('Name of a file in target space. Note that the ' - 'target space is now different from the target ' - 'space that was used to create the --warp file. It ' - 'would typically be the file that was specified ' - 'with the --in argument when running fnirt.')) + desc=( + "Name of a file in target space. Note that the " + "target space is now different from the target " + "space that was used to create the --warp file. It " + "would typically be the file that was specified " + "with the --in argument when running fnirt." + ), + ) inverse_warp = File( - argstr='--out=%s', - name_source=['warp'], + argstr="--out=%s", + name_source=["warp"], hash_files=False, - name_template='%s_inverse', - desc=('Name of output file, containing warps that are ' - 'the "reverse" of those in --warp. This will be ' - 'a field-file (rather than a file of spline ' - 'coefficients), and it will have any affine ' - 'component included as part of the ' - 'displacements.')) + name_template="%s_inverse", + desc=( + "Name of output file, containing warps that are " + 'the "reverse" of those in --warp. This will be ' + "a field-file (rather than a file of spline " + "coefficients), and it will have any affine " + "component included as part of the " + "displacements." + ), + ) absolute = traits.Bool( - argstr='--abs', - xor=['relative'], - desc=('If set it indicates that the warps in --warp' - ' should be interpreted as absolute, provided' - ' that it is not created by fnirt (which ' - 'always uses relative warps). If set it also ' - 'indicates that the output --out should be ' - 'absolute.')) + argstr="--abs", + xor=["relative"], + desc=( + "If set it indicates that the warps in --warp" + " should be interpreted as absolute, provided" + " that it is not created by fnirt (which " + "always uses relative warps). If set it also " + "indicates that the output --out should be " + "absolute." + ), + ) relative = traits.Bool( - argstr='--rel', - xor=['absolute'], - desc=('If set it indicates that the warps in --warp' - ' should be interpreted as relative. I.e. the' - ' values in --warp are displacements from the' - ' coordinates in the --ref space. If set it ' - 'also indicates that the output --out should ' - 'be relative.')) + argstr="--rel", + xor=["absolute"], + desc=( + "If set it indicates that the warps in --warp" + " should be interpreted as relative. I.e. the" + " values in --warp are displacements from the" + " coordinates in the --ref space. If set it " + "also indicates that the output --out should " + "be relative." + ), + ) niter = traits.Int( - argstr='--niter=%d', - desc=('Determines how many iterations of the ' - 'gradient-descent search that should be run.')) + argstr="--niter=%d", + desc=( + "Determines how many iterations of the " + "gradient-descent search that should be run." + ), + ) regularise = traits.Float( - argstr='--regularise=%f', - desc='Regularization strength (deafult=1.0).') + argstr="--regularise=%f", desc="Regularization strength (deafult=1.0)." + ) noconstraint = traits.Bool( - argstr='--noconstraint', desc='Do not apply Jacobian constraint') + argstr="--noconstraint", desc="Do not apply Jacobian constraint" + ) jacobian_min = traits.Float( - argstr='--jmin=%f', - desc=('Minimum acceptable Jacobian value for ' - 'constraint (default 0.01)')) + argstr="--jmin=%f", + desc=("Minimum acceptable Jacobian value for " "constraint (default 0.01)"), + ) jacobian_max = traits.Float( - argstr='--jmax=%f', - desc=('Maximum acceptable Jacobian value for ' - 'constraint (default 100.0)')) + argstr="--jmax=%f", + desc=("Maximum acceptable Jacobian value for " "constraint (default 100.0)"), + ) class InvWarpOutputSpec(TraitedSpec): inverse_warp = File( exists=True, - desc=('Name of output file, containing warps that are ' - 'the "reverse" of those in --warp.')) + desc=( + "Name of output file, containing warps that are " + 'the "reverse" of those in --warp.' + ), + ) class InvWarp(FSLCommand): @@ -1841,7 +1921,7 @@ class InvWarp(FSLCommand): input_spec = InvWarpInputSpec output_spec = InvWarpOutputSpec - _cmd = 'invwarp' + _cmd = "invwarp" class ComplexInputSpec(FSLCommandInputSpec): @@ -1851,82 +1931,68 @@ class ComplexInputSpec(FSLCommandInputSpec): real_in_file = File(exists=True, argstr="%s", position=2) imaginary_in_file = File(exists=True, argstr="%s", position=3) magnitude_in_file = File(exists=True, argstr="%s", position=2) - phase_in_file = File(exists=True, argstr='%s', position=3) + phase_in_file = File(exists=True, argstr="%s", position=3) _ofs = [ - 'complex_out_file', 'magnitude_out_file', 'phase_out_file', - 'real_out_file', 'imaginary_out_file' + "complex_out_file", + "magnitude_out_file", + "phase_out_file", + "real_out_file", + "imaginary_out_file", ] _conversion = [ - 'real_polar', - 'real_cartesian', - 'complex_cartesian', - 'complex_polar', - 'complex_split', - 'complex_merge', + "real_polar", + "real_cartesian", + "complex_cartesian", + "complex_polar", + "complex_split", + "complex_merge", ] complex_out_file = File( - genfile=True, argstr="%s", position=-3, xor=_ofs + _conversion[:2]) + genfile=True, argstr="%s", position=-3, xor=_ofs + _conversion[:2] + ) magnitude_out_file = File( genfile=True, argstr="%s", position=-4, - xor=_ofs[:1] + _ofs[3:] + _conversion[1:]) + xor=_ofs[:1] + _ofs[3:] + _conversion[1:], + ) phase_out_file = File( genfile=True, argstr="%s", position=-3, - xor=_ofs[:1] + _ofs[3:] + _conversion[1:]) + xor=_ofs[:1] + _ofs[3:] + _conversion[1:], + ) real_out_file = File( genfile=True, argstr="%s", position=-4, - xor=_ofs[:3] + _conversion[:1] + _conversion[2:]) + xor=_ofs[:3] + _conversion[:1] + _conversion[2:], + ) imaginary_out_file = File( genfile=True, argstr="%s", position=-3, - xor=_ofs[:3] + _conversion[:1] + _conversion[2:]) + xor=_ofs[:3] + _conversion[:1] + _conversion[2:], + ) - start_vol = traits.Int(position=-2, argstr='%d') - end_vol = traits.Int(position=-1, argstr='%d') + start_vol = traits.Int(position=-2, argstr="%d") + end_vol = traits.Int(position=-1, argstr="%d") - real_polar = traits.Bool( - argstr='-realpolar', - xor=_conversion, - position=1, - ) + real_polar = traits.Bool(argstr="-realpolar", xor=_conversion, position=1,) # requires=['complex_in_file','magnitude_out_file','phase_out_file']) - real_cartesian = traits.Bool( - argstr='-realcartesian', - xor=_conversion, - position=1, - ) + real_cartesian = traits.Bool(argstr="-realcartesian", xor=_conversion, position=1,) # requires=['complex_in_file','real_out_file','imaginary_out_file']) - complex_cartesian = traits.Bool( - argstr='-complex', - xor=_conversion, - position=1, - ) + complex_cartesian = traits.Bool(argstr="-complex", xor=_conversion, position=1,) # requires=['real_in_file','imaginary_in_file','complex_out_file']) - complex_polar = traits.Bool( - argstr='-complexpolar', - xor=_conversion, - position=1, - ) + complex_polar = traits.Bool(argstr="-complexpolar", xor=_conversion, position=1,) # requires=['magnitude_in_file','phase_in_file', # 'magnitude_out_file','phase_out_file']) - complex_split = traits.Bool( - argstr='-complexsplit', - xor=_conversion, - position=1, - ) + complex_split = traits.Bool(argstr="-complexsplit", xor=_conversion, position=1,) # requires=['complex_in_file','complex_out_file']) complex_merge = traits.Bool( - argstr='-complexmerge', - xor=_conversion + ['start_vol', 'end_vol'], - position=1, + argstr="-complexmerge", xor=_conversion + ["start_vol", "end_vol"], position=1, ) @@ -1954,7 +2020,8 @@ class Complex(FSLCommand): """ - _cmd = 'fslcomplex' + + _cmd = "fslcomplex" input_spec = ComplexInputSpec output_spec = ComplexOuputSpec @@ -1970,7 +2037,7 @@ def _parse_inputs(self, skip=None): return super(Complex, self)._parse_inputs(skip) def _gen_filename(self, name): - if name == 'complex_out_file': + if name == "complex_out_file": if self.inputs.complex_cartesian: in_file = self.inputs.real_in_file elif self.inputs.complex_polar: @@ -1980,14 +2047,13 @@ def _gen_filename(self, name): else: return None return self._gen_fname(in_file, suffix="_cplx") - elif name == 'magnitude_out_file': + elif name == "magnitude_out_file": return self._gen_fname(self.inputs.complex_in_file, suffix="_mag") - elif name == 'phase_out_file': - return self._gen_fname( - self.inputs.complex_in_file, suffix="_phase") - elif name == 'real_out_file': + elif name == "phase_out_file": + return self._gen_fname(self.inputs.complex_in_file, suffix="_phase") + elif name == "real_out_file": return self._gen_fname(self.inputs.complex_in_file, suffix="_real") - elif name == 'imaginary_out_file': + elif name == "imaginary_out_file": return self._gen_fname(self.inputs.complex_in_file, suffix="_imag") return None @@ -1999,110 +2065,140 @@ def _get_output(self, name): def _list_outputs(self): outputs = self.output_spec().get() - if self.inputs.complex_cartesian or self.inputs.complex_polar or \ - self.inputs.complex_split or self.inputs.complex_merge: - outputs['complex_out_file'] = self._get_output('complex_out_file') + if ( + self.inputs.complex_cartesian + or self.inputs.complex_polar + or self.inputs.complex_split + or self.inputs.complex_merge + ): + outputs["complex_out_file"] = self._get_output("complex_out_file") elif self.inputs.real_cartesian: - outputs['real_out_file'] = self._get_output('real_out_file') - outputs['imaginary_out_file'] = self._get_output( - 'imaginary_out_file') + outputs["real_out_file"] = self._get_output("real_out_file") + outputs["imaginary_out_file"] = self._get_output("imaginary_out_file") elif self.inputs.real_polar: - outputs['magnitude_out_file'] = self._get_output( - 'magnitude_out_file') - outputs['phase_out_file'] = self._get_output('phase_out_file') + outputs["magnitude_out_file"] = self._get_output("magnitude_out_file") + outputs["phase_out_file"] = self._get_output("phase_out_file") return outputs class WarpUtilsInputSpec(FSLCommandInputSpec): in_file = File( exists=True, - argstr='--in=%s', + argstr="--in=%s", mandatory=True, - desc=('Name of file containing warp-coefficients/fields. This ' - 'would typically be the output from the --cout switch of ' - 'fnirt (but can also use fields, like the output from ' - '--fout).')) + desc=( + "Name of file containing warp-coefficients/fields. This " + "would typically be the output from the --cout switch of " + "fnirt (but can also use fields, like the output from " + "--fout)." + ), + ) reference = File( exists=True, - argstr='--ref=%s', + argstr="--ref=%s", mandatory=True, - desc=('Name of a file in target space. Note that the ' - 'target space is now different from the target ' - 'space that was used to create the --warp file. It ' - 'would typically be the file that was specified ' - 'with the --in argument when running fnirt.')) + desc=( + "Name of a file in target space. Note that the " + "target space is now different from the target " + "space that was used to create the --warp file. It " + "would typically be the file that was specified " + "with the --in argument when running fnirt." + ), + ) out_format = traits.Enum( - 'spline', - 'field', - argstr='--outformat=%s', - desc=('Specifies the output format. If set to field (default) ' - 'the output will be a (4D) field-file. If set to spline ' - 'the format will be a (4D) file of spline coefficients.')) + "spline", + "field", + argstr="--outformat=%s", + desc=( + "Specifies the output format. If set to field (default) " + "the output will be a (4D) field-file. If set to spline " + "the format will be a (4D) file of spline coefficients." + ), + ) warp_resolution = traits.Tuple( traits.Float, traits.Float, traits.Float, - argstr='--warpres=%0.4f,%0.4f,%0.4f', - desc=('Specifies the resolution/knot-spacing of the splines pertaining' - ' to the coefficients in the --out file. This parameter is only ' - 'relevant if --outformat is set to spline. It should be noted ' - 'that if the --in file has a higher resolution, the resulting ' - 'coefficients will pertain to the closest (in a least-squares' - ' sense) file in the space of fields with the --warpres' - ' resolution. It should also be noted that the resolution ' - 'will always be an integer multiple of the voxel ' - 'size.')) + argstr="--warpres=%0.4f,%0.4f,%0.4f", + desc=( + "Specifies the resolution/knot-spacing of the splines pertaining" + " to the coefficients in the --out file. This parameter is only " + "relevant if --outformat is set to spline. It should be noted " + "that if the --in file has a higher resolution, the resulting " + "coefficients will pertain to the closest (in a least-squares" + " sense) file in the space of fields with the --warpres" + " resolution. It should also be noted that the resolution " + "will always be an integer multiple of the voxel " + "size." + ), + ) knot_space = traits.Tuple( traits.Int, traits.Int, traits.Int, - argstr='--knotspace=%d,%d,%d', - desc=('Alternative (to --warpres) specification of the resolution of ' - 'the output spline-field.')) + argstr="--knotspace=%d,%d,%d", + desc=( + "Alternative (to --warpres) specification of the resolution of " + "the output spline-field." + ), + ) out_file = File( - argstr='--out=%s', + argstr="--out=%s", position=-1, - name_source=['in_file'], - output_name='out_file', - desc=('Name of output file. The format of the output depends on what ' - 'other parameters are set. The default format is a (4D) ' - 'field-file. If the --outformat is set to spline the format ' - 'will be a (4D) file of spline coefficients.')) + name_source=["in_file"], + output_name="out_file", + desc=( + "Name of output file. The format of the output depends on what " + "other parameters are set. The default format is a (4D) " + "field-file. If the --outformat is set to spline the format " + "will be a (4D) file of spline coefficients." + ), + ) write_jacobian = traits.Bool( False, mandatory=True, usedefault=True, - desc='Switch on --jac flag with automatically generated filename') + desc="Switch on --jac flag with automatically generated filename", + ) out_jacobian = File( - argstr='--jac=%s', - desc=('Specifies that a (3D) file of Jacobian determinants ' - 'corresponding to --in should be produced and written to ' - 'filename.')) + argstr="--jac=%s", + desc=( + "Specifies that a (3D) file of Jacobian determinants " + "corresponding to --in should be produced and written to " + "filename." + ), + ) with_affine = traits.Bool( False, - argstr='--withaff', - desc=('Specifies that the affine transform (i.e. that which was ' - 'specified for the --aff parameter in fnirt) should be ' - 'included as displacements in the --out file. That can be ' - 'useful for interfacing with software that cannot decode ' - 'FSL/fnirt coefficient-files (where the affine transform is ' - 'stored separately from the displacements).')) + argstr="--withaff", + desc=( + "Specifies that the affine transform (i.e. that which was " + "specified for the --aff parameter in fnirt) should be " + "included as displacements in the --out file. That can be " + "useful for interfacing with software that cannot decode " + "FSL/fnirt coefficient-files (where the affine transform is " + "stored separately from the displacements)." + ), + ) class WarpUtilsOutputSpec(TraitedSpec): out_file = File( - desc=('Name of output file, containing the warp as field or ' - 'coefficients.')) + desc=("Name of output file, containing the warp as field or " "coefficients.") + ) out_jacobian = File( - desc=('Name of output file, containing the map of the determinant of ' - 'the Jacobian')) + desc=( + "Name of output file, containing the map of the determinant of " + "the Jacobian" + ) + ) class WarpUtils(FSLCommand): @@ -2130,160 +2226,175 @@ class WarpUtils(FSLCommand): input_spec = WarpUtilsInputSpec output_spec = WarpUtilsOutputSpec - _cmd = 'fnirtfileutils' + _cmd = "fnirtfileutils" def _parse_inputs(self, skip=None): if skip is None: skip = [] - suffix = 'field' - if (isdefined(self.inputs.out_format) - and self.inputs.out_format == 'spline'): - suffix = 'coeffs' + suffix = "field" + if isdefined(self.inputs.out_format) and self.inputs.out_format == "spline": + suffix = "coeffs" - trait_spec = self.inputs.trait('out_file') + trait_spec = self.inputs.trait("out_file") trait_spec.name_template = "%s_" + suffix if self.inputs.write_jacobian: if not isdefined(self.inputs.out_jacobian): - jac_spec = self.inputs.trait('out_jacobian') - jac_spec.name_source = ['in_file'] - jac_spec.name_template = '%s_jac' - jac_spec.output_name = 'out_jacobian' + jac_spec = self.inputs.trait("out_jacobian") + jac_spec.name_source = ["in_file"] + jac_spec.name_template = "%s_jac" + jac_spec.output_name = "out_jacobian" else: - skip += ['out_jacobian'] + skip += ["out_jacobian"] - skip += ['write_jacobian'] + skip += ["write_jacobian"] return super(WarpUtils, self)._parse_inputs(skip=skip) class ConvertWarpInputSpec(FSLCommandInputSpec): reference = File( exists=True, - argstr='--ref=%s', + argstr="--ref=%s", mandatory=True, position=1, - desc='Name of a file in target space of the full transform.') + desc="Name of a file in target space of the full transform.", + ) out_file = File( - argstr='--out=%s', + argstr="--out=%s", position=-1, - name_source=['reference'], - name_template='%s_concatwarp', - output_name='out_file', - desc=('Name of output file, containing warps that are the combination ' - 'of all those given as arguments. The format of this will be a ' - 'field-file (rather than spline coefficients) with any affine ' - 'components included.')) + name_source=["reference"], + name_template="%s_concatwarp", + output_name="out_file", + desc=( + "Name of output file, containing warps that are the combination " + "of all those given as arguments. The format of this will be a " + "field-file (rather than spline coefficients) with any affine " + "components included." + ), + ) premat = File( exists=True, - argstr='--premat=%s', - desc='filename for pre-transform (affine matrix)') + argstr="--premat=%s", + desc="filename for pre-transform (affine matrix)", + ) warp1 = File( exists=True, - argstr='--warp1=%s', - desc='Name of file containing initial ' - 'warp-fields/coefficients (follows premat). This could ' - 'e.g. be a fnirt-transform from a subjects structural ' - 'scan to an average of a group of subjects.') + argstr="--warp1=%s", + desc="Name of file containing initial " + "warp-fields/coefficients (follows premat). This could " + "e.g. be a fnirt-transform from a subjects structural " + "scan to an average of a group of subjects.", + ) midmat = File( exists=True, argstr="--midmat=%s", - desc="Name of file containing mid-warp-affine transform") + desc="Name of file containing mid-warp-affine transform", + ) warp2 = File( exists=True, - argstr='--warp2=%s', - desc='Name of file containing secondary warp-fields/coefficients ' - '(after warp1/midmat but before postmat). This could e.g. be a ' - 'fnirt-transform from the average of a group of subjects to some ' - 'standard space (e.g. MNI152).') + argstr="--warp2=%s", + desc="Name of file containing secondary warp-fields/coefficients " + "(after warp1/midmat but before postmat). This could e.g. be a " + "fnirt-transform from the average of a group of subjects to some " + "standard space (e.g. MNI152).", + ) postmat = File( exists=True, - argstr='--postmat=%s', - desc='Name of file containing an affine transform (applied last). It ' - 'could e.g. be an affine transform that maps the MNI152-space ' - 'into a better approximation to the Talairach-space (if indeed ' - 'there is one).') + argstr="--postmat=%s", + desc="Name of file containing an affine transform (applied last). It " + "could e.g. be an affine transform that maps the MNI152-space " + "into a better approximation to the Talairach-space (if indeed " + "there is one).", + ) shift_in_file = File( exists=True, - argstr='--shiftmap=%s', + argstr="--shiftmap=%s", desc='Name of file containing a "shiftmap", a non-linear transform ' - 'with displacements only in one direction (applied first, before ' - 'premat). This would typically be a fieldmap that has been ' - 'pre-processed using fugue that maps a subjects functional (EPI) ' - 'data onto an undistorted space (i.e. a space that corresponds ' - 'to his/her true anatomy).') + "with displacements only in one direction (applied first, before " + "premat). This would typically be a fieldmap that has been " + "pre-processed using fugue that maps a subjects functional (EPI) " + "data onto an undistorted space (i.e. a space that corresponds " + "to his/her true anatomy).", + ) shift_direction = traits.Enum( - 'y-', - 'y', - 'x', - 'x-', - 'z', - 'z-', + "y-", + "y", + "x", + "x-", + "z", + "z-", argstr="--shiftdir=%s", - requires=['shift_in_file'], - desc='Indicates the direction that the distortions from ' - '--shiftmap goes. It depends on the direction and ' - 'polarity of the phase-encoding in the EPI sequence.') + requires=["shift_in_file"], + desc="Indicates the direction that the distortions from " + "--shiftmap goes. It depends on the direction and " + "polarity of the phase-encoding in the EPI sequence.", + ) cons_jacobian = traits.Bool( False, - argstr='--constrainj', - desc='Constrain the Jacobian of the warpfield to lie within specified ' - 'min/max limits.') + argstr="--constrainj", + desc="Constrain the Jacobian of the warpfield to lie within specified " + "min/max limits.", + ) jacobian_min = traits.Float( - argstr='--jmin=%f', - desc='Minimum acceptable Jacobian value for ' - 'constraint (default 0.01)') + argstr="--jmin=%f", + desc="Minimum acceptable Jacobian value for " "constraint (default 0.01)", + ) jacobian_max = traits.Float( - argstr='--jmax=%f', - desc='Maximum acceptable Jacobian value for ' - 'constraint (default 100.0)') + argstr="--jmax=%f", + desc="Maximum acceptable Jacobian value for " "constraint (default 100.0)", + ) abswarp = traits.Bool( - argstr='--abs', - xor=['relwarp'], - desc='If set it indicates that the warps in --warp1 and --warp2 should' - ' be interpreted as absolute. I.e. the values in --warp1/2 are ' - 'the coordinates in the next space, rather than displacements. ' - 'This flag is ignored if --warp1/2 was created by fnirt, which ' - 'always creates relative displacements.') + argstr="--abs", + xor=["relwarp"], + desc="If set it indicates that the warps in --warp1 and --warp2 should" + " be interpreted as absolute. I.e. the values in --warp1/2 are " + "the coordinates in the next space, rather than displacements. " + "This flag is ignored if --warp1/2 was created by fnirt, which " + "always creates relative displacements.", + ) relwarp = traits.Bool( - argstr='--rel', - xor=['abswarp'], - desc='If set it indicates that the warps in --warp1/2 should be ' - 'interpreted as relative. I.e. the values in --warp1/2 are ' - 'displacements from the coordinates in the next space.') + argstr="--rel", + xor=["abswarp"], + desc="If set it indicates that the warps in --warp1/2 should be " + "interpreted as relative. I.e. the values in --warp1/2 are " + "displacements from the coordinates in the next space.", + ) out_abswarp = traits.Bool( - argstr='--absout', - xor=['out_relwarp'], - desc='If set it indicates that the warps in --out should be absolute, ' - 'i.e. the values in --out are displacements from the coordinates ' - 'in --ref.') + argstr="--absout", + xor=["out_relwarp"], + desc="If set it indicates that the warps in --out should be absolute, " + "i.e. the values in --out are displacements from the coordinates " + "in --ref.", + ) out_relwarp = traits.Bool( - argstr='--relout', - xor=['out_abswarp'], - desc='If set it indicates that the warps in --out should be relative, ' - 'i.e. the values in --out are displacements from the coordinates ' - 'in --ref.') + argstr="--relout", + xor=["out_abswarp"], + desc="If set it indicates that the warps in --out should be relative, " + "i.e. the values in --out are displacements from the coordinates " + "in --ref.", + ) class ConvertWarpOutputSpec(TraitedSpec): out_file = File( exists=True, - desc='Name of output file, containing the warp as field or ' - 'coefficients.') + desc="Name of output file, containing the warp as field or " "coefficients.", + ) class ConvertWarp(FSLCommand): @@ -2309,59 +2420,63 @@ class ConvertWarp(FSLCommand): input_spec = ConvertWarpInputSpec output_spec = ConvertWarpOutputSpec - _cmd = 'convertwarp' + _cmd = "convertwarp" class WarpPointsBaseInputSpec(CommandLineInputSpec): in_coords = File( exists=True, position=-1, - argstr='%s', + argstr="%s", mandatory=True, - desc='filename of file containing coordinates') + desc="filename of file containing coordinates", + ) xfm_file = File( exists=True, - argstr='-xfm %s', - xor=['warp_file'], - desc='filename of affine transform (e.g. source2dest.mat)') + argstr="-xfm %s", + xor=["warp_file"], + desc="filename of affine transform (e.g. source2dest.mat)", + ) warp_file = File( exists=True, - argstr='-warp %s', - xor=['xfm_file'], - desc='filename of warpfield (e.g. ' - 'intermediate2dest_warp.nii.gz)') + argstr="-warp %s", + xor=["xfm_file"], + desc="filename of warpfield (e.g. " "intermediate2dest_warp.nii.gz)", + ) coord_vox = traits.Bool( True, - argstr='-vox', - xor=['coord_mm'], - desc='all coordinates in voxels - default') + argstr="-vox", + xor=["coord_mm"], + desc="all coordinates in voxels - default", + ) coord_mm = traits.Bool( - False, argstr='-mm', xor=['coord_vox'], desc='all coordinates in mm') + False, argstr="-mm", xor=["coord_vox"], desc="all coordinates in mm" + ) out_file = File( - name_source='in_coords', - name_template='%s_warped', - output_name='out_file', - desc='output file name') + name_source="in_coords", + name_template="%s_warped", + output_name="out_file", + desc="output file name", + ) class WarpPointsInputSpec(WarpPointsBaseInputSpec): src_file = File( - exists=True, - argstr='-src %s', - mandatory=True, - desc='filename of source image') + exists=True, argstr="-src %s", mandatory=True, desc="filename of source image" + ) dest_file = File( exists=True, - argstr='-dest %s', + argstr="-dest %s", mandatory=True, - desc='filename of destination image') + desc="filename of destination image", + ) class WarpPointsOutputSpec(TraitedSpec): out_file = File( exists=True, - desc='Name of output file, containing the warp as field or ' - 'coefficients.') + desc="Name of output file, containing the warp as field or " "coefficients.", + ) class WarpPoints(CommandLine): @@ -2390,8 +2505,8 @@ class WarpPoints(CommandLine): input_spec = WarpPointsInputSpec output_spec = WarpPointsOutputSpec - _cmd = 'img2imgcoord' - _terminal_output = 'stream' + _cmd = "img2imgcoord" + _terminal_output = "stream" def __init__(self, command=None, **inputs): self._tmpfile = None @@ -2401,24 +2516,26 @@ def __init__(self, command=None, **inputs): super(WarpPoints, self).__init__(command=command, **inputs) def _format_arg(self, name, trait_spec, value): - if name == 'out_file': - return '' + if name == "out_file": + return "" return super(WarpPoints, self)._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): fname, ext = op.splitext(self.inputs.in_coords) - setattr(self, '_in_file', fname) - setattr(self, '_outformat', ext[1:]) - first_args = super(WarpPoints, - self)._parse_inputs(skip=['in_coords', 'out_file']) + setattr(self, "_in_file", fname) + setattr(self, "_outformat", ext[1:]) + first_args = super(WarpPoints, self)._parse_inputs( + skip=["in_coords", "out_file"] + ) - second_args = fname + '.txt' + second_args = fname + ".txt" - if ext in ['.vtk', '.trk']: + if ext in [".vtk", ".trk"]: if self._tmpfile is None: self._tmpfile = tempfile.NamedTemporaryFile( - suffix='.txt', dir=os.getcwd(), delete=False).name + suffix=".txt", dir=os.getcwd(), delete=False + ).name second_args = self._tmpfile return first_args + [second_args] @@ -2428,16 +2545,15 @@ def _vtk_to_coords(self, in_file, out_file=None): from ...interfaces import vtkbase as VTKInfo if VTKInfo.no_tvtk(): - raise ImportError( - 'TVTK is required and tvtk package was not found') + raise ImportError("TVTK is required and tvtk package was not found") - reader = tvtk.PolyDataReader(file_name=in_file + '.vtk') + reader = tvtk.PolyDataReader(file_name=in_file + ".vtk") reader.update() mesh = VTKInfo.vtk_output(reader) points = mesh.points if out_file is None: - out_file, _ = op.splitext(in_file) + '.txt' + out_file, _ = op.splitext(in_file) + ".txt" np.savetxt(out_file, points) return out_file @@ -2447,8 +2563,7 @@ def _coords_to_vtk(self, points, out_file): from ...interfaces import vtkbase as VTKInfo if VTKInfo.no_tvtk(): - raise ImportError( - 'TVTK is required and tvtk package was not found') + raise ImportError("TVTK is required and tvtk package was not found") reader = tvtk.PolyDataReader(file_name=self.inputs.in_file) reader.update() @@ -2462,37 +2577,37 @@ def _coords_to_vtk(self, points, out_file): def _trk_to_coords(self, in_file, out_file=None): from nibabel.trackvis import TrackvisFile + trkfile = TrackvisFile.from_file(in_file) streamlines = trkfile.streamlines if out_file is None: out_file, _ = op.splitext(in_file) - np.savetxt(streamlines, out_file + '.txt') - return out_file + '.txt' + np.savetxt(streamlines, out_file + ".txt") + return out_file + ".txt" def _coords_to_trk(self, points, out_file): - raise NotImplementedError('trk files are not yet supported') + raise NotImplementedError("trk files are not yet supported") def _overload_extension(self, value, name): - if name == 'out_file': - return '%s.%s' % (value, getattr(self, '_outformat')) + if name == "out_file": + return "%s.%s" % (value, getattr(self, "_outformat")) def _run_interface(self, runtime): - fname = getattr(self, '_in_file') - outformat = getattr(self, '_outformat') + fname = getattr(self, "_in_file") + outformat = getattr(self, "_outformat") tmpfile = None - if outformat == 'vtk': + if outformat == "vtk": tmpfile = self._tmpfile self._vtk_to_coords(fname, out_file=tmpfile) - elif outformat == 'trk': + elif outformat == "trk": tmpfile = self._tmpfile self._trk_to_coords(fname, out_file=tmpfile) runtime = super(WarpPoints, self)._run_interface(runtime) - newpoints = np.fromstring( - '\n'.join(runtime.stdout.split('\n')[1:]), sep=' ') + newpoints = np.fromstring("\n".join(runtime.stdout.split("\n")[1:]), sep=" ") if tmpfile is not None: try: @@ -2500,11 +2615,11 @@ def _run_interface(self, runtime): except: pass - out_file = self._filename_from_source('out_file') + out_file = self._filename_from_source("out_file") - if outformat == 'vtk': + if outformat == "vtk": self._coords_to_vtk(newpoints, out_file) - elif outformat == 'trk': + elif outformat == "trk": self._coords_to_trk(newpoints, out_file) else: np.savetxt(out_file, newpoints.reshape(-1, 3)) @@ -2514,20 +2629,21 @@ def _run_interface(self, runtime): class WarpPointsToStdInputSpec(WarpPointsBaseInputSpec): img_file = File( - exists=True, - argstr='-img %s', - mandatory=True, - desc=('filename of input image')) + exists=True, argstr="-img %s", mandatory=True, desc=("filename of input image") + ) std_file = File( exists=True, - argstr='-std %s', + argstr="-std %s", mandatory=True, - desc=('filename of destination image')) + desc=("filename of destination image"), + ) premat_file = File( exists=True, - argstr='-premat %s', - desc=('filename of pre-warp affine transform ' - '(e.g. example_func2highres.mat)')) + argstr="-premat %s", + desc=( + "filename of pre-warp affine transform " "(e.g. example_func2highres.mat)" + ), + ) class WarpPointsToStd(WarpPoints): @@ -2558,45 +2674,51 @@ class WarpPointsToStd(WarpPoints): input_spec = WarpPointsToStdInputSpec output_spec = WarpPointsOutputSpec - _cmd = 'img2stdcoord' - _terminal_output = 'file_split' + _cmd = "img2stdcoord" + _terminal_output = "file_split" class WarpPointsFromStdInputSpec(CommandLineInputSpec): img_file = File( exists=True, - argstr='-img %s', + argstr="-img %s", mandatory=True, - desc='filename of a destination image') + desc="filename of a destination image", + ) std_file = File( exists=True, - argstr='-std %s', + argstr="-std %s", mandatory=True, - desc='filename of the image in standard space') + desc="filename of the image in standard space", + ) in_coords = File( exists=True, position=-2, - argstr='%s', + argstr="%s", mandatory=True, - desc='filename of file containing coordinates') + desc="filename of file containing coordinates", + ) xfm_file = File( exists=True, - argstr='-xfm %s', - xor=['warp_file'], - desc='filename of affine transform (e.g. source2dest.mat)') + argstr="-xfm %s", + xor=["warp_file"], + desc="filename of affine transform (e.g. source2dest.mat)", + ) warp_file = File( exists=True, - argstr='-warp %s', - xor=['xfm_file'], - desc='filename of warpfield (e.g. ' - 'intermediate2dest_warp.nii.gz)') + argstr="-warp %s", + xor=["xfm_file"], + desc="filename of warpfield (e.g. " "intermediate2dest_warp.nii.gz)", + ) coord_vox = traits.Bool( True, - argstr='-vox', - xor=['coord_mm'], - desc='all coordinates in voxels - default') + argstr="-vox", + xor=["coord_mm"], + desc="all coordinates in voxels - default", + ) coord_mm = traits.Bool( - False, argstr='-mm', xor=['coord_vox'], desc='all coordinates in mm') + False, argstr="-mm", xor=["coord_vox"], desc="all coordinates in mm" + ) class WarpPointsFromStd(CommandLine): @@ -2625,63 +2747,69 @@ class WarpPointsFromStd(CommandLine): input_spec = WarpPointsFromStdInputSpec output_spec = WarpPointsOutputSpec - _cmd = 'std2imgcoord' + _cmd = "std2imgcoord" def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath('stdout.nipype') + outputs["out_file"] = op.abspath("stdout.nipype") return outputs class MotionOutliersInputSpec(FSLCommandInputSpec): in_file = File( - exists=True, - mandatory=True, - desc="unfiltered 4D image", - argstr="-i %s") + exists=True, mandatory=True, desc="unfiltered 4D image", argstr="-i %s" + ) out_file = File( argstr="-o %s", - name_source='in_file', - name_template='%s_outliers.txt', + name_source="in_file", + name_template="%s_outliers.txt", keep_extension=True, - desc='output outlier file name', - hash_files=False) - mask = File( - exists=True, argstr="-m %s", desc="mask image for calculating metric") + desc="output outlier file name", + hash_files=False, + ) + mask = File(exists=True, argstr="-m %s", desc="mask image for calculating metric") metric = traits.Enum( - 'refrms', ['refrms', 'dvars', 'refmse', 'fd', 'fdrms'], + "refrms", + ["refrms", "dvars", "refmse", "fd", "fdrms"], argstr="--%s", - desc='metrics: refrms - RMS intensity difference to reference volume ' - 'as metric [default metric], refmse - Mean Square Error version ' - 'of refrms (used in original version of fsl_motion_outliers), ' - 'dvars - DVARS, fd - frame displacement, fdrms - FD with RMS ' - 'matrix calculation') + desc="metrics: refrms - RMS intensity difference to reference volume " + "as metric [default metric], refmse - Mean Square Error version " + "of refrms (used in original version of fsl_motion_outliers), " + "dvars - DVARS, fd - frame displacement, fdrms - FD with RMS " + "matrix calculation", + ) threshold = traits.Float( argstr="--thresh=%g", - desc=("specify absolute threshold value " - "(otherwise use box-plot cutoff = P75 + " - "1.5*IQR)")) + desc=( + "specify absolute threshold value " + "(otherwise use box-plot cutoff = P75 + " + "1.5*IQR)" + ), + ) no_motion_correction = traits.Bool( - argstr="--nomoco", - desc="do not run motion correction (assumed already done)") + argstr="--nomoco", desc="do not run motion correction (assumed already done)" + ) dummy = traits.Int( argstr="--dummy=%d", - desc='number of dummy scans to delete (before running anything and ' - 'creating EVs)') + desc="number of dummy scans to delete (before running anything and " + "creating EVs)", + ) out_metric_values = File( argstr="-s %s", - name_source='in_file', - name_template='%s_metrics.txt', + name_source="in_file", + name_template="%s_metrics.txt", keep_extension=True, - desc='output metric values (DVARS etc.) file name', - hash_files=False) + desc="output metric values (DVARS etc.) file name", + hash_files=False, + ) out_metric_plot = File( argstr="-p %s", - name_source='in_file', - name_template='%s_metrics.png', + name_source="in_file", + name_template="%s_metrics.png", hash_files=False, keep_extension=True, - desc='output metric values plot (DVARS etc.) file name') + desc="output metric values plot (DVARS etc.) file name", + ) class MotionOutliersOutputSpec(TraitedSpec): @@ -2705,4 +2833,4 @@ class MotionOutliers(FSLCommand): input_spec = MotionOutliersInputSpec output_spec = MotionOutliersOutputSpec - _cmd = 'fsl_motion_outliers' + _cmd = "fsl_motion_outliers" diff --git a/nipype/interfaces/image.py b/nipype/interfaces/image.py index d72bb47c42..b3f3f433cd 100644 --- a/nipype/interfaces/image.py +++ b/nipype/interfaces/image.py @@ -3,28 +3,30 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: from ..utils.filemanip import fname_presuffix -from .base import (SimpleInterface, TraitedSpec, BaseInterfaceInputSpec, - traits, File) +from .base import SimpleInterface, TraitedSpec, BaseInterfaceInputSpec, traits, File from .. import LooseVersion class RescaleInputSpec(BaseInterfaceInputSpec): - in_file = File(exists=True, mandatory=True, - desc='Skull-stripped image to rescale') - ref_file = File(exists=True, mandatory=True, - desc='Skull-stripped reference image') - invert = traits.Bool(desc='Invert contrast of rescaled image') - percentile = traits.Range(low=0., high=50., value=0., usedefault=True, - desc='Percentile to use for reference to allow ' - 'for outliers - 1 indicates the 1st and ' - '99th percentiles in the input file will ' - 'be mapped to the 99th and 1st percentiles ' - 'in the reference; 0 indicates minima and ' - 'maxima will be mapped') + in_file = File(exists=True, mandatory=True, desc="Skull-stripped image to rescale") + ref_file = File(exists=True, mandatory=True, desc="Skull-stripped reference image") + invert = traits.Bool(desc="Invert contrast of rescaled image") + percentile = traits.Range( + low=0.0, + high=50.0, + value=0.0, + usedefault=True, + desc="Percentile to use for reference to allow " + "for outliers - 1 indicates the 1st and " + "99th percentiles in the input file will " + "be mapped to the 99th and 1st percentiles " + "in the reference; 0 indicates minima and " + "maxima will be mapped", + ) class RescaleOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='Rescaled image') + out_file = File(exists=True, desc="Rescaled image") class Rescale(SimpleInterface): @@ -57,6 +59,7 @@ class Rescale(SimpleInterface): >>> res = invert_t1w.run() # doctest: +SKIP """ + input_spec = RescaleInputSpec output_spec = RescaleOutputSpec @@ -71,7 +74,7 @@ def _run_interface(self, runtime): in_mask = data > 0 ref_mask = ref_data > 0 - q = [self.inputs.percentile, 100. - self.inputs.percentile] + q = [self.inputs.percentile, 100.0 - self.inputs.percentile] in_low, in_high = np.percentile(data[in_mask], q) ref_low, ref_high = np.percentile(ref_data[ref_mask], q) scale_factor = (ref_high - ref_low) / (in_high - in_low) @@ -79,33 +82,41 @@ def _run_interface(self, runtime): signal = in_high - data if self.inputs.invert else data - in_low out_data = in_mask * (signal * scale_factor + ref_low) - suffix = '_inv' if self.inputs.invert else '_rescaled' - out_file = fname_presuffix(self.inputs.in_file, suffix=suffix, - newpath=runtime.cwd) + suffix = "_inv" if self.inputs.invert else "_rescaled" + out_file = fname_presuffix( + self.inputs.in_file, suffix=suffix, newpath=runtime.cwd + ) img.__class__(out_data, img.affine, img.header).to_filename(out_file) - self._results['out_file'] = out_file + self._results["out_file"] = out_file return runtime -_axes = ('RL', 'AP', 'SI') +_axes = ("RL", "AP", "SI") _orientations = tuple( - ''.join((x[i], y[j], z[k])) - for x in _axes for y in _axes for z in _axes + "".join((x[i], y[j], z[k])) + for x in _axes + for y in _axes + for z in _axes if x != y != z != x - for i in (0, 1) for j in (0, 1) for k in (0, 1)) + for i in (0, 1) + for j in (0, 1) + for k in (0, 1) +) class ReorientInputSpec(BaseInterfaceInputSpec): - in_file = File(exists=True, mandatory=True, desc='Input image') - orientation = traits.Enum(_orientations, usedefault=True, - desc='Target axis orientation') + in_file = File(exists=True, mandatory=True, desc="Input image") + orientation = traits.Enum( + _orientations, usedefault=True, desc="Target axis orientation" + ) class ReorientOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='Reoriented image') - transform = File(exists=True, - desc='Affine transform from input orientation to output') + out_file = File(exists=True, desc="Reoriented image") + transform = File( + exists=True, desc="Affine transform from input orientation to output" + ) class Reorient(SimpleInterface): @@ -168,14 +179,14 @@ class Reorient(SimpleInterface): >>> os.unlink(res.outputs.transform) """ + input_spec = ReorientInputSpec output_spec = ReorientOutputSpec def _run_interface(self, runtime): import numpy as np import nibabel as nb - from nibabel.orientations import ( - axcodes2ornt, ornt_transform, inv_ornt_aff) + from nibabel.orientations import axcodes2ornt, ornt_transform, inv_ornt_aff fname = self.inputs.in_file orig_img = nb.load(fname) @@ -188,26 +199,26 @@ def _run_interface(self, runtime): affine_xfm = inv_ornt_aff(transform, orig_img.shape) # Check can be eliminated when minimum nibabel version >= 2.4 - if LooseVersion(nb.__version__) >= LooseVersion('2.4.0'): + if LooseVersion(nb.__version__) >= LooseVersion("2.4.0"): reoriented = orig_img.as_reoriented(transform) else: reoriented = _as_reoriented_backport(orig_img, transform) # Image may be reoriented if reoriented is not orig_img: - suffix = '_' + self.inputs.orientation.lower() - out_name = fname_presuffix(fname, suffix=suffix, - newpath=runtime.cwd) + suffix = "_" + self.inputs.orientation.lower() + out_name = fname_presuffix(fname, suffix=suffix, newpath=runtime.cwd) reoriented.to_filename(out_name) else: out_name = fname - mat_name = fname_presuffix(fname, suffix='.mat', - newpath=runtime.cwd, use_ext=False) - np.savetxt(mat_name, affine_xfm, fmt='%.08f') + mat_name = fname_presuffix( + fname, suffix=".mat", newpath=runtime.cwd, use_ext=False + ) + np.savetxt(mat_name, affine_xfm, fmt="%.08f") - self._results['out_file'] = out_name - self._results['transform'] = mat_name + self._results["out_file"] = out_name + self._results["transform"] = mat_name return runtime @@ -217,6 +228,7 @@ def _as_reoriented_backport(img, ornt): import numpy as np import nibabel as nb from nibabel.orientations import inv_ornt_aff + if np.array_equal(ornt, [[0, 1], [1, 1], [2, 1]]): return img @@ -226,8 +238,10 @@ def _as_reoriented_backport(img, ornt): if isinstance(reoriented, nb.Nifti1Pair): # Also apply the transform to the dim_info fields - new_dim = [None if orig_dim is None else int(ornt[orig_dim, 0]) - for orig_dim in img.header.get_dim_info()] + new_dim = [ + None if orig_dim is None else int(ornt[orig_dim, 0]) + for orig_dim in img.header.get_dim_info() + ] reoriented.header.set_dim_info(*new_dim) diff --git a/nipype/interfaces/io.py b/nipype/interfaces/io.py index a5498f5c9f..071c834e14 100644 --- a/nipype/interfaces/io.py +++ b/nipype/interfaces/io.py @@ -27,15 +27,31 @@ from .. import config, logging from ..utils.filemanip import ( - copyfile, simplify_list, ensure_list, - get_related_files, split_filename) + copyfile, + simplify_list, + ensure_list, + get_related_files, + split_filename, +) from ..utils.misc import human_order_sorted, str2bool from .base import ( - TraitedSpec, traits, Str, File, Directory, BaseInterface, InputMultiPath, - isdefined, OutputMultiPath, DynamicTraitedSpec, Undefined, BaseInterfaceInputSpec, - LibraryBaseInterface, SimpleInterface) - -iflogger = logging.getLogger('nipype.interface') + TraitedSpec, + traits, + Str, + File, + Directory, + BaseInterface, + InputMultiPath, + isdefined, + OutputMultiPath, + DynamicTraitedSpec, + Undefined, + BaseInterfaceInputSpec, + LibraryBaseInterface, + SimpleInterface, +) + +iflogger = logging.getLogger("nipype.interface") def copytree(src, dst, use_hardlink=False): @@ -50,7 +66,7 @@ def copytree(src, dst, use_hardlink=False): try: os.makedirs(dst) except OSError as why: - if 'File exists' in why.strerror: + if "File exists" in why.strerror: pass else: raise why @@ -66,8 +82,9 @@ def copytree(src, dst, use_hardlink=False): srcname, dstname, True, - hashmethod='content', - use_hardlink=use_hardlink) + hashmethod="content", + use_hardlink=use_hardlink, + ) except (IOError, os.error) as why: errors.append((srcname, dstname, str(why))) # catch the Error from the recursive copytree so that we can @@ -107,21 +124,26 @@ def _get_head_bucket(s3_resource, bucket_name): try: s3_resource.meta.client.head_bucket(Bucket=bucket_name) except botocore.exceptions.ClientError as exc: - error_code = int(exc.response['Error']['Code']) + error_code = int(exc.response["Error"]["Code"]) if error_code == 403: - err_msg = 'Access to bucket: %s is denied; check credentials'\ - % bucket_name + err_msg = "Access to bucket: %s is denied; check credentials" % bucket_name raise Exception(err_msg) elif error_code == 404: - err_msg = 'Bucket: %s does not exist; check spelling and try '\ - 'again' % bucket_name + err_msg = ( + "Bucket: %s does not exist; check spelling and try " + "again" % bucket_name + ) raise Exception(err_msg) else: - err_msg = 'Unable to connect to bucket: %s. Error message:\n%s'\ - % (bucket_name, exc) + err_msg = "Unable to connect to bucket: %s. Error message:\n%s" % ( + bucket_name, + exc, + ) except Exception as exc: - err_msg = 'Unable to connect to bucket: %s. Error message:\n%s'\ - % (bucket_name, exc) + err_msg = "Unable to connect to bucket: %s. Error message:\n%s" % ( + bucket_name, + exc, + ) raise Exception(err_msg) @@ -141,14 +163,14 @@ def _add_output_traits(self, base): # Class to track percentage of S3 file upload class ProgressPercentage(object): - ''' + """ Callable class instsance (via __call__ method) that displays upload percentage of a file to S3 - ''' + """ def __init__(self, filename): - ''' - ''' + """ + """ # Import packages import threading @@ -160,8 +182,8 @@ def __init__(self, filename): self._lock = threading.Lock() def __call__(self, bytes_amount): - ''' - ''' + """ + """ # Import packages import sys @@ -173,8 +195,11 @@ def __call__(self, bytes_amount): percentage = (self._seen_so_far // self._size) * 100 else: percentage = 0 - progress_str = '%d / %d (%.2f%%)\r'\ - % (self._seen_so_far, self._size, percentage) + progress_str = "%d / %d (%.2f%%)\r" % ( + self._seen_so_far, + self._size, + percentage, + ) # Write to stdout sys.stdout.write(progress_str) @@ -183,43 +208,52 @@ def __call__(self, bytes_amount): # DataSink inputs class DataSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): - ''' - ''' + """ + """ # Init inputspec data attributes - base_directory = Directory( - desc='Path to the base directory for storing data.') - container = Str( - desc='Folder within base directory in which to store output') + base_directory = Directory(desc="Path to the base directory for storing data.") + container = Str(desc="Folder within base directory in which to store output") parameterization = traits.Bool( - True, usedefault=True, desc='store output in parametrized structure') - strip_dir = Directory(desc='path to strip out of filename') + True, usedefault=True, desc="store output in parametrized structure" + ) + strip_dir = Directory(desc="path to strip out of filename") substitutions = InputMultiPath( traits.Tuple(Str, Str), - desc=('List of 2-tuples reflecting string ' - 'to substitute and string to replace ' - 'it with')) - regexp_substitutions = \ - InputMultiPath(traits.Tuple(Str, Str), - desc=('List of 2-tuples reflecting a pair of a ' - 'Python regexp pattern and a replacement ' - 'string. Invoked after string `substitutions`')) + desc=( + "List of 2-tuples reflecting string " + "to substitute and string to replace " + "it with" + ), + ) + regexp_substitutions = InputMultiPath( + traits.Tuple(Str, Str), + desc=( + "List of 2-tuples reflecting a pair of a " + "Python regexp pattern and a replacement " + "string. Invoked after string `substitutions`" + ), + ) _outputs = traits.Dict(Str, value={}, usedefault=True) remove_dest_dir = traits.Bool( - False, usedefault=True, desc='remove dest directory when copying dirs') + False, usedefault=True, desc="remove dest directory when copying dirs" + ) # AWS S3 data attributes - creds_path = Str(desc='Filepath to AWS credentials file for S3 bucket ' - 'access; if not specified, the credentials will ' - 'be taken from the AWS_ACCESS_KEY_ID and ' - 'AWS_SECRET_ACCESS_KEY environment variables') - encrypt_bucket_keys = traits.Bool(desc='Flag indicating whether to use S3 ' - 'server-side AES-256 encryption') + creds_path = Str( + desc="Filepath to AWS credentials file for S3 bucket " + "access; if not specified, the credentials will " + "be taken from the AWS_ACCESS_KEY_ID and " + "AWS_SECRET_ACCESS_KEY environment variables" + ) + encrypt_bucket_keys = traits.Bool( + desc="Flag indicating whether to use S3 " "server-side AES-256 encryption" + ) # Set this if user wishes to override the bucket with their own - bucket = traits.Any(desc='Boto3 S3 bucket for manual override of bucket') + bucket = traits.Any(desc="Boto3 S3 bucket for manual override of bucket") # Set this if user wishes to have local copy of files as well - local_copy = Str(desc='Copy files locally as well as to S3 bucket') + local_copy = Str(desc="Copy files locally as well as to S3 bucket") # Set call-able inputs attributes def __setattr__(self, key, value): @@ -238,7 +272,7 @@ def __setattr__(self, key, value): class DataSinkOutputSpec(TraitedSpec): # Init out file - out_file = traits.Any(desc='datasink output') + out_file = traits.Any(desc="datasink output") # Custom DataSink class @@ -339,10 +373,9 @@ def _get_dst(self, src): if self.inputs.parameterization: dst = path if isdefined(self.inputs.strip_dir): - dst = dst.replace(self.inputs.strip_dir, '') + dst = dst.replace(self.inputs.strip_dir, "") folders = [ - folder for folder in dst.split(os.path.sep) - if folder.startswith('_') + folder for folder in dst.split(os.path.sep) if folder.startswith("_") ] dst = os.path.sep.join(folders) if fname: @@ -364,22 +397,32 @@ def _substitute(self, pathstr): oldpathstr = pathstr pathstr = pathstr.replace(key, val) if pathstr != oldpathstr: - iflogger.debug('sub.str: %s -> %s using %r -> %r', - oldpathstr, pathstr, key, val) + iflogger.debug( + "sub.str: %s -> %s using %r -> %r", + oldpathstr, + pathstr, + key, + val, + ) if isdefined(self.inputs.regexp_substitutions): for key, val in self.inputs.regexp_substitutions: oldpathstr = pathstr pathstr, _ = re.subn(key, val, pathstr) if pathstr != oldpathstr: - iflogger.debug('sub.regexp: %s -> %s using %r -> %r', - oldpathstr, pathstr, key, val) + iflogger.debug( + "sub.regexp: %s -> %s using %r -> %r", + oldpathstr, + pathstr, + key, + val, + ) if pathstr_ != pathstr: - iflogger.info('sub: %s -> %s', pathstr_, pathstr) + iflogger.info("sub: %s -> %s", pathstr_, pathstr) return pathstr # Check for s3 in base directory def _check_s3_base_dir(self): - ''' + """ Method to see if the datasink's base directory specifies an S3 bucket path; if it does, it parses the path for the bucket name in the form 's3://bucket_name/...' and returns it @@ -395,11 +438,11 @@ def _check_s3_base_dir(self): bucket_name : string name of the S3 bucket to connect to; if the base directory is not a valid S3 path, defaults to '' - ''' + """ # Init variables - s3_str = 's3://' - bucket_name = '' + s3_str = "s3://" + bucket_name = "" base_directory = self.inputs.base_directory if not isdefined(base_directory): @@ -408,14 +451,14 @@ def _check_s3_base_dir(self): # Explicitly lower-case the "s3" if base_directory.lower().startswith(s3_str): - base_dir_sp = base_directory.split('/') + base_dir_sp = base_directory.split("/") base_dir_sp[0] = base_dir_sp[0].lower() - base_directory = '/'.join(base_dir_sp) + base_directory = "/".join(base_dir_sp) # Check if 's3://' in base dir if base_directory.startswith(s3_str): # Expects bucket name to be 's3://bucket_name/base_dir/..' - bucket_name = base_directory.split(s3_str)[1].split('/')[0] + bucket_name = base_directory.split(s3_str)[1].split("/")[0] s3_flag = True # Otherwise it's just a normal datasink else: @@ -426,7 +469,7 @@ def _check_s3_base_dir(self): # Function to return AWS secure environment variables def _return_aws_keys(self): - ''' + """ Method to return AWS access key id and secret access key using credentials found in a local file. @@ -441,7 +484,7 @@ def _return_aws_keys(self): string of the AWS access key ID aws_secret_access_key : string string of the AWS secret access key - ''' + """ # Import packages import os @@ -451,40 +494,39 @@ def _return_aws_keys(self): # Check if creds exist if creds_path and os.path.exists(creds_path): - with open(creds_path, 'r') as creds_in: + with open(creds_path, "r") as creds_in: # Grab csv rows row1 = creds_in.readline() row2 = creds_in.readline() # Are they root or user keys - if 'User Name' in row1: + if "User Name" in row1: # And split out for keys - aws_access_key_id = row2.split(',')[1] - aws_secret_access_key = row2.split(',')[2] - elif 'AWSAccessKeyId' in row1: + aws_access_key_id = row2.split(",")[1] + aws_secret_access_key = row2.split(",")[2] + elif "AWSAccessKeyId" in row1: # And split out for keys - aws_access_key_id = row1.split('=')[1] - aws_secret_access_key = row2.split('=')[1] + aws_access_key_id = row1.split("=")[1] + aws_secret_access_key = row2.split("=")[1] else: - err_msg = 'Credentials file not recognized, check file is correct' + err_msg = "Credentials file not recognized, check file is correct" raise Exception(err_msg) # Strip any carriage return/line feeds - aws_access_key_id = aws_access_key_id.replace('\r', '').replace( - '\n', '') - aws_secret_access_key = aws_secret_access_key.replace('\r', - '').replace( - '\n', '') + aws_access_key_id = aws_access_key_id.replace("\r", "").replace("\n", "") + aws_secret_access_key = aws_secret_access_key.replace("\r", "").replace( + "\n", "" + ) else: - aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID') - aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY') + aws_access_key_id = os.getenv("AWS_ACCESS_KEY_ID") + aws_secret_access_key = os.getenv("AWS_SECRET_ACCESS_KEY") # Return keys return aws_access_key_id, aws_secret_access_key # Fetch bucket object def _fetch_bucket(self, bucket_name): - ''' + """ Method to return a bucket object which can be used to interact with an AWS S3 bucket using credentials found in a local file. @@ -500,15 +542,14 @@ def _fetch_bucket(self, bucket_name): bucket : boto3.resources.factory.s3.Bucket boto3 s3 Bucket object which is used to interact with files in an S3 bucket on AWS - ''' + """ # Import packages try: import boto3 import botocore except ImportError as exc: - err_msg = 'Boto3 package is not installed - install boto3 and '\ - 'try again.' + err_msg = "Boto3 package is not installed - install boto3 and " "try again." raise Exception(err_msg) # Init variables @@ -516,34 +557,35 @@ def _fetch_bucket(self, bucket_name): # Get AWS credentials try: - aws_access_key_id, aws_secret_access_key = \ - self._return_aws_keys() + aws_access_key_id, aws_secret_access_key = self._return_aws_keys() except Exception as exc: - err_msg = 'There was a problem extracting the AWS credentials '\ - 'from the credentials file provided: %s. Error:\n%s'\ - % (creds_path, exc) + err_msg = ( + "There was a problem extracting the AWS credentials " + "from the credentials file provided: %s. Error:\n%s" % (creds_path, exc) + ) raise Exception(err_msg) # Try and get AWS credentials if a creds_path is specified if aws_access_key_id and aws_secret_access_key: # Init connection - iflogger.info('Connecting to S3 bucket: %s with credentials...', - bucket_name) + iflogger.info( + "Connecting to S3 bucket: %s with credentials...", bucket_name + ) # Use individual session for each instance of DataSink # Better when datasinks are being used in multi-threading, see: # http://boto3.readthedocs.org/en/latest/guide/resources.html#multithreading session = boto3.session.Session( aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key) + aws_secret_access_key=aws_secret_access_key, + ) else: - iflogger.info('Connecting to S3 bucket: %s with IAM role...', - bucket_name) + iflogger.info("Connecting to S3 bucket: %s with IAM role...", bucket_name) # Lean on AWS environment / IAM role authentication and authorization session = boto3.session.Session() - s3_resource = session.resource('s3', use_ssl=True) + s3_resource = session.resource("s3", use_ssl=True) # And try fetch the bucket with the name argument try: @@ -552,9 +594,10 @@ def _fetch_bucket(self, bucket_name): # Try to connect anonymously s3_resource.meta.client.meta.events.register( - 'choose-signer.s3.*', botocore.handlers.disable_signing) + "choose-signer.s3.*", botocore.handlers.disable_signing + ) - iflogger.info('Connecting to AWS: %s anonymously...', bucket_name) + iflogger.info("Connecting to AWS: %s anonymously...", bucket_name) _get_head_bucket(s3_resource, bucket_name) # Explicitly declare a secure SSL connection for bucket object @@ -563,12 +606,11 @@ def _fetch_bucket(self, bucket_name): # Return the bucket return bucket - # Send up to S3 method def _upload_to_s3(self, bucket, src, dst): - ''' + """ Method to upload outputs to S3 bucket instead of on local disk - ''' + """ # Import packages import hashlib @@ -577,12 +619,12 @@ def _upload_to_s3(self, bucket, src, dst): from botocore.exceptions import ClientError # Init variables - s3_str = 's3://' + s3_str = "s3://" s3_prefix = s3_str + bucket.name # Explicitly lower-case the "s3" - if dst[:len(s3_str)].lower() == s3_str: - dst = s3_str + dst[len(s3_str):] + if dst[: len(s3_str)].lower() == s3_str: + dst = s3_str + dst[len(s3_str) :] # If src is a directory, collect files (this assumes dst is a dir too) if os.path.isdir(src): @@ -590,10 +632,7 @@ def _upload_to_s3(self, bucket, src, dst): for root, dirs, files in os.walk(src): src_files.extend([os.path.join(root, fil) for fil in files]) # Make the dst files have the dst folder as base dir - dst_files = [ - os.path.join(dst, - src_f.split(src)[1]) for src_f in src_files - ] + dst_files = [os.path.join(dst, src_f.split(src)[1]) for src_f in src_files] else: src_files = [src] dst_files = [dst] @@ -602,7 +641,7 @@ def _upload_to_s3(self, bucket, src, dst): for src_idx, src_f in enumerate(src_files): # Get destination filename/keyname dst_f = dst_files[src_idx] - dst_k = dst_f.replace(s3_prefix, '').lstrip('/') + dst_k = dst_f.replace(s3_prefix, "").lstrip("/") # See if same file is already up there try: @@ -610,31 +649,29 @@ def _upload_to_s3(self, bucket, src, dst): dst_md5 = dst_obj.e_tag.strip('"') # See if same file is already there - src_read = open(src_f, 'rb').read() + src_read = open(src_f, "rb").read() src_md5 = hashlib.md5(src_read).hexdigest() # Move to next loop iteration if dst_md5 == src_md5: - iflogger.info('File %s already exists on S3, skipping...', - dst_f) + iflogger.info("File %s already exists on S3, skipping...", dst_f) continue else: - iflogger.info('Overwriting previous S3 file...') + iflogger.info("Overwriting previous S3 file...") except ClientError: - iflogger.info('New file to S3') + iflogger.info("New file to S3") # Copy file up to S3 (either encrypted or not) - iflogger.info('Uploading %s to S3 bucket, %s, as %s...', src_f, - bucket.name, dst_f) + iflogger.info( + "Uploading %s to S3 bucket, %s, as %s...", src_f, bucket.name, dst_f + ) if self.inputs.encrypt_bucket_keys: - extra_args = {'ServerSideEncryption': 'AES256'} + extra_args = {"ServerSideEncryption": "AES256"} else: extra_args = {} bucket.upload_file( - src_f, - dst_k, - ExtraArgs=extra_args, - Callback=ProgressPercentage(src_f)) + src_f, dst_k, ExtraArgs=extra_args, Callback=ProgressPercentage(src_f) + ) # List outputs, main run routine def _list_outputs(self): @@ -645,8 +682,7 @@ def _list_outputs(self): outputs = self.output_spec().get() out_files = [] # Use hardlink - use_hardlink = str2bool( - config.get('execution', 'try_hard_link_datasink')) + use_hardlink = str2bool(config.get("execution", "try_hard_link_datasink")) # Set local output directory if specified if isdefined(self.inputs.local_copy): @@ -655,7 +691,7 @@ def _list_outputs(self): outdir = self.inputs.base_directory # If base directory isn't given, assume current directory if not isdefined(outdir): - outdir = '.' + outdir = "." # Check if base directory reflects S3 bucket upload s3_flag, bucket_name = self._check_s3_base_dir() @@ -671,18 +707,21 @@ def _list_outputs(self): # If encountering an exception during bucket access, set output # base directory to a local folder except Exception as exc: - s3dir = '' + s3dir = "" if not isdefined(self.inputs.local_copy): local_out_exception = os.path.join( - os.path.expanduser('~'), - 's3_datasink_' + bucket_name) + os.path.expanduser("~"), "s3_datasink_" + bucket_name + ) outdir = local_out_exception # Log local copying directory iflogger.info( - 'Access to S3 failed! Storing outputs locally at: ' - '%s\nError: %s', outdir, exc) + "Access to S3 failed! Storing outputs locally at: " + "%s\nError: %s", + outdir, + exc, + ) else: - s3dir = '' + s3dir = "" # If container input is given, append that to outdir if isdefined(self.inputs.container): @@ -697,7 +736,7 @@ def _list_outputs(self): try: os.makedirs(outdir) except OSError as inst: - if 'File exists' in inst.strerror: + if "File exists" in inst.strerror: pass else: raise (inst) @@ -711,8 +750,8 @@ def _list_outputs(self): tempoutdir = outdir if s3_flag: s3tempoutdir = s3dir - for d in key.split('.'): - if d[0] == '@': + for d in key.split("."): + if d[0] == "@": continue tempoutdir = os.path.join(tempoutdir, d) if s3_flag: @@ -728,7 +767,7 @@ def _list_outputs(self): # Format src and dst files src = os.path.abspath(src) if not os.path.isfile(src): - src = os.path.join(src, '') + src = os.path.join(src, "") dst = self._get_dst(src) if s3_flag: s3dst = os.path.join(s3tempoutdir, dst) @@ -748,31 +787,32 @@ def _list_outputs(self): try: os.makedirs(path) except OSError as inst: - if 'File exists' in inst.strerror: + if "File exists" in inst.strerror: pass else: raise (inst) # If src is a file, copy it to dst if os.path.isfile(src): - iflogger.debug('copyfile: %s %s', src, dst) + iflogger.debug("copyfile: %s %s", src, dst) copyfile( src, dst, copy=True, - hashmethod='content', - use_hardlink=use_hardlink) + hashmethod="content", + use_hardlink=use_hardlink, + ) out_files.append(dst) # If src is a directory, copy entire contents to dst dir elif os.path.isdir(src): if os.path.exists(dst) and self.inputs.remove_dest_dir: - iflogger.debug('removing: %s', dst) + iflogger.debug("removing: %s", dst) shutil.rmtree(dst) - iflogger.debug('copydir: %s %s', src, dst) + iflogger.debug("copydir: %s %s", src, dst) copytree(src, dst) out_files.append(dst) # Return outputs dictionary - outputs['out_file'] = out_files + outputs["out_file"] = out_files return outputs @@ -781,36 +821,37 @@ class S3DataGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): anon = traits.Bool( False, usedefault=True, - desc= - 'Use anonymous connection to s3. If this is set to True, boto may print' - + - ' a urlopen error, but this does not prevent data from being downloaded.' - ) - region = Str('us-east-1', usedefault=True, desc='Region of s3 bucket') - bucket = Str( - mandatory=True, desc='Amazon S3 bucket where your data is stored') + desc="Use anonymous connection to s3. If this is set to True, boto may print" + + " a urlopen error, but this does not prevent data from being downloaded.", + ) + region = Str("us-east-1", usedefault=True, desc="Region of s3 bucket") + bucket = Str(mandatory=True, desc="Amazon S3 bucket where your data is stored") bucket_path = Str( - '', - usedefault=True, - desc='Location within your bucket for subject data.') + "", usedefault=True, desc="Location within your bucket for subject data." + ) local_directory = Directory( exists=True, - desc='Path to the local directory for subject data to be downloaded ' - 'and accessed. Should be on HDFS for Spark jobs.') + desc="Path to the local directory for subject data to be downloaded " + "and accessed. Should be on HDFS for Spark jobs.", + ) raise_on_empty = traits.Bool( True, usedefault=True, - desc='Generate exception if list is empty for a given field') + desc="Generate exception if list is empty for a given field", + ) sort_filelist = traits.Bool( - mandatory=True, desc='Sort the filelist that matches the template') + mandatory=True, desc="Sort the filelist that matches the template" + ) template = Str( mandatory=True, - desc='Layout used to get files. Relative to bucket_path if defined.' - 'Uses regex rather than glob style formatting.') + desc="Layout used to get files. Relative to bucket_path if defined." + "Uses regex rather than glob style formatting.", + ) template_args = traits.Dict( key_trait=Str, value_trait=traits.List(traits.List), - desc='Information to plug into template') + desc="Information to plug into template", + ) class S3DataGrabber(LibraryBaseInterface, IOBase): @@ -846,7 +887,7 @@ class S3DataGrabber(LibraryBaseInterface, IOBase): input_spec = S3DataGrabberInputSpec output_spec = DynamicTraitedSpec _always_run = True - _pkg = 'boto' + _pkg = "boto" def __init__(self, infields=None, outfields=None, **kwargs): """ @@ -862,7 +903,7 @@ def __init__(self, infields=None, outfields=None, **kwargs): """ if not outfields: - outfields = ['outfiles'] + outfields = ["outfiles"] super(S3DataGrabber, self).__init__(**kwargs) undefined_traits = {} # used for mandatory inputs check @@ -873,11 +914,13 @@ def __init__(self, infields=None, outfields=None, **kwargs): self.inputs.add_trait(key, traits.Any) undefined_traits[key] = Undefined # add ability to insert field specific templates - self.inputs.add_trait('field_template', - traits.Dict( - traits.Enum(outfields), - desc="arguments that fit into template")) - undefined_traits['field_template'] = Undefined + self.inputs.add_trait( + "field_template", + traits.Dict( + traits.Enum(outfields), desc="arguments that fit into template" + ), + ) + undefined_traits["field_template"] = Undefined if not isdefined(self.inputs.template_args): self.inputs.template_args = {} for key in outfields: @@ -902,30 +945,35 @@ def _list_outputs(self): # infields are mandatory, however I could not figure out how to set 'mandatory' flag dynamically # hence manual check import boto + if self._infields: for key in self._infields: value = getattr(self.inputs, key) if not isdefined(value): - msg = "%s requires a value for input '%s' because it was listed in 'infields'" % \ - (self.__class__.__name__, key) + msg = ( + "%s requires a value for input '%s' because it was listed in 'infields'" + % (self.__class__.__name__, key) + ) raise ValueError(msg) outputs = {} # get list of all files in s3 bucket conn = boto.connect_s3(anon=self.inputs.anon) bkt = conn.get_bucket(self.inputs.bucket) - bkt_files = list( - k.key for k in bkt.list(prefix=self.inputs.bucket_path)) + bkt_files = list(k.key for k in bkt.list(prefix=self.inputs.bucket_path)) # keys are outfields, args are template args for the outfield for key, args in list(self.inputs.template_args.items()): outputs[key] = [] template = self.inputs.template - if hasattr(self.inputs, 'field_template') and \ - isdefined(self.inputs.field_template) and \ - key in self.inputs.field_template: + if ( + hasattr(self.inputs, "field_template") + and isdefined(self.inputs.field_template) + and key in self.inputs.field_template + ): template = self.inputs.field_template[ - key] # template override for multiple outfields + key + ] # template override for multiple outfields if isdefined(self.inputs.bucket_path): template = os.path.join(self.inputs.bucket_path, template) if not args: @@ -934,8 +982,10 @@ def _list_outputs(self): if re.match(template, fname): filelist.append(fname) if len(filelist) == 0: - msg = 'Output key: %s Template: %s returned no files' % ( - key, template) + msg = "Output key: %s Template: %s returned no files" % ( + key, + template, + ) if self.inputs.raise_on_empty: raise IOError(msg) else: @@ -947,22 +997,20 @@ def _list_outputs(self): for argnum, arglist in enumerate(args): maxlen = 1 for arg in arglist: - if isinstance(arg, - (str, bytes)) and hasattr(self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): if (maxlen > 1) and (len(arg) != maxlen): raise ValueError( - 'incompatible number of arguments for %s' % - key) + "incompatible number of arguments for %s" % key + ) if len(arg) > maxlen: maxlen = len(arg) outfiles = [] for i in range(maxlen): argtuple = [] for arg in arglist: - if isinstance(arg, (str, bytes)) and hasattr( - self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): argtuple.append(arg[i]) @@ -974,16 +1022,19 @@ def _list_outputs(self): filledtemplate = template % tuple(argtuple) except TypeError as e: raise TypeError( - e.message + - ": Template %s failed to convert with args %s" - % (template, str(tuple(argtuple)))) + e.message + + ": Template %s failed to convert with args %s" + % (template, str(tuple(argtuple))) + ) outfiles = [] for fname in bkt_files: if re.match(filledtemplate, fname): outfiles.append(fname) if len(outfiles) == 0: - msg = 'Output key: %s Template: %s returned no files' % ( - key, filledtemplate) + msg = "Output key: %s Template: %s returned no files" % ( + key, + filledtemplate, + ) if self.inputs.raise_on_empty: raise IOError(msg) else: @@ -1019,19 +1070,19 @@ def _list_outputs(self): # directory, returning the local path. def s3tolocal(self, s3path, bkt): import boto + # path formatting local_directory = str(self.inputs.local_directory) bucket_path = str(self.inputs.bucket_path) template = str(self.inputs.template) - if not os.path.basename(local_directory) == '': - local_directory += '/' - if not os.path.basename(bucket_path) == '': - bucket_path += '/' - if template[0] == '/': + if not os.path.basename(local_directory) == "": + local_directory += "/" + if not os.path.basename(bucket_path) == "": + bucket_path += "/" + if template[0] == "/": template = template[1:] - localpath = s3path.replace(bucket_path, - local_directory) + localpath = s3path.replace(bucket_path, local_directory) localdir = os.path.split(localpath)[0] if not os.path.exists(localdir): os.makedirs(localdir) @@ -1043,25 +1094,28 @@ def s3tolocal(self, s3path, bkt): class DataGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): base_directory = Directory( - exists=True, - desc='Path to the base directory consisting of subject data.') + exists=True, desc="Path to the base directory consisting of subject data." + ) raise_on_empty = traits.Bool( True, usedefault=True, - desc='Generate exception if list is empty for a given field') + desc="Generate exception if list is empty for a given field", + ) drop_blank_outputs = traits.Bool( - False, usedefault=True, - desc="Remove ``None`` entries from output lists" - ) + False, usedefault=True, desc="Remove ``None`` entries from output lists" + ) sort_filelist = traits.Bool( - mandatory=True, desc='Sort the filelist that matches the template') + mandatory=True, desc="Sort the filelist that matches the template" + ) template = Str( mandatory=True, - desc='Layout used to get files. relative to base directory if defined') + desc="Layout used to get files. relative to base directory if defined", + ) template_args = traits.Dict( key_trait=Str, value_trait=traits.List(traits.List), - desc='Information to plug into template') + desc="Information to plug into template", + ) class DataGrabber(IOBase): @@ -1115,6 +1169,7 @@ class DataGrabber(IOBase): >>> dg.inputs.template_args['struct'] = [['sid']] """ + input_spec = DataGrabberInputSpec output_spec = DynamicTraitedSpec _always_run = True @@ -1133,7 +1188,7 @@ def __init__(self, infields=None, outfields=None, **kwargs): """ if not outfields: - outfields = ['outfiles'] + outfields = ["outfiles"] super(DataGrabber, self).__init__(**kwargs) undefined_traits = {} # used for mandatory inputs check @@ -1144,11 +1199,13 @@ def __init__(self, infields=None, outfields=None, **kwargs): self.inputs.add_trait(key, traits.Any) undefined_traits[key] = Undefined # add ability to insert field specific templates - self.inputs.add_trait('field_template', - traits.Dict( - traits.Enum(outfields), - desc="arguments that fit into template")) - undefined_traits['field_template'] = Undefined + self.inputs.add_trait( + "field_template", + traits.Dict( + traits.Enum(outfields), desc="arguments that fit into template" + ), + ) + undefined_traits["field_template"] = Undefined if not isdefined(self.inputs.template_args): self.inputs.template_args = {} for key in outfields: @@ -1175,28 +1232,35 @@ def _list_outputs(self): for key in self._infields: value = getattr(self.inputs, key) if not isdefined(value): - msg = "%s requires a value for input '%s' because it was listed in 'infields'" % \ - (self.__class__.__name__, key) + msg = ( + "%s requires a value for input '%s' because it was listed in 'infields'" + % (self.__class__.__name__, key) + ) raise ValueError(msg) outputs = {} for key, args in list(self.inputs.template_args.items()): outputs[key] = [] template = self.inputs.template - if hasattr(self.inputs, 'field_template') and \ - isdefined(self.inputs.field_template) and \ - key in self.inputs.field_template: + if ( + hasattr(self.inputs, "field_template") + and isdefined(self.inputs.field_template) + and key in self.inputs.field_template + ): template = self.inputs.field_template[key] if isdefined(self.inputs.base_directory): template = os.path.join( - os.path.abspath(self.inputs.base_directory), template) + os.path.abspath(self.inputs.base_directory), template + ) else: template = os.path.abspath(template) if not args: filelist = glob.glob(template) if len(filelist) == 0: - msg = 'Output key: %s Template: %s returned no files' % ( - key, template) + msg = "Output key: %s Template: %s returned no files" % ( + key, + template, + ) if self.inputs.raise_on_empty: raise IOError(msg) else: @@ -1208,22 +1272,20 @@ def _list_outputs(self): for argnum, arglist in enumerate(args): maxlen = 1 for arg in arglist: - if isinstance(arg, - (str, bytes)) and hasattr(self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): if (maxlen > 1) and (len(arg) != maxlen): raise ValueError( - 'incompatible number of arguments for %s' % - key) + "incompatible number of arguments for %s" % key + ) if len(arg) > maxlen: maxlen = len(arg) outfiles = [] for i in range(maxlen): argtuple = [] for arg in arglist: - if isinstance(arg, (str, bytes)) and hasattr( - self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): argtuple.append(arg[i]) @@ -1235,13 +1297,16 @@ def _list_outputs(self): filledtemplate = template % tuple(argtuple) except TypeError as e: raise TypeError( - e.message + - ": Template %s failed to convert with args %s" - % (template, str(tuple(argtuple)))) + e.message + + ": Template %s failed to convert with args %s" + % (template, str(tuple(argtuple))) + ) outfiles = glob.glob(filledtemplate) if len(outfiles) == 0: - msg = 'Output key: %s Template: %s returned no files' % ( - key, filledtemplate) + msg = "Output key: %s Template: %s returned no files" % ( + key, + filledtemplate, + ) if self.inputs.raise_on_empty: raise IOError(msg) else: @@ -1265,28 +1330,30 @@ def _list_outputs(self): class SelectFilesInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): - base_directory = Directory( - exists=True, desc="Root path common to templates.") + base_directory = Directory(exists=True, desc="Root path common to templates.") sort_filelist = traits.Bool( True, usedefault=True, - desc="When matching mutliple files, return them" - " in sorted order.") + desc="When matching mutliple files, return them" " in sorted order.", + ) raise_on_empty = traits.Bool( True, usedefault=True, - desc="Raise an exception if a template pattern " - "matches no files.") + desc="Raise an exception if a template pattern " "matches no files.", + ) force_lists = traits.Either( traits.Bool(), traits.List(Str()), default=False, usedefault=True, - desc=("Whether to return outputs as a list even" - " when only one file matches the template. " - "Either a boolean that applies to all output " - "fields or a list of output field names to " - "coerce to a list")) + desc=( + "Whether to return outputs as a list even" + " when only one file matches the template. " + "Either a boolean that applies to all output " + "fields or a list of output field names to " + "coerce to a list" + ), + ) class SelectFiles(IOBase): @@ -1320,6 +1387,7 @@ class SelectFiles(IOBase): >>> dg.inputs.run = [2, 4] """ + input_spec = SelectFilesInputSpec output_spec = DynamicTraitedSpec _always_run = True @@ -1369,8 +1437,13 @@ def _add_output_traits(self, base): def _list_outputs(self): """Find the files and expose them as interface outputs.""" outputs = {} - info = dict([(k, v) for k, v in list(self.inputs.__dict__.items()) - if k in self._infields]) + info = dict( + [ + (k, v) + for k, v in list(self.inputs.__dict__.items()) + if k in self._infields + ] + ) force_lists = self.inputs.force_lists if isinstance(force_lists, bool): @@ -1380,8 +1453,9 @@ def _list_outputs(self): bad_fields = ", ".join(list(bad_fields)) plural = "s" if len(bad_fields) > 1 else "" verb = "were" if len(bad_fields) > 1 else "was" - msg = ("The field%s '%s' %s set in 'force_lists' and not in " - "'templates'.") % (plural, bad_fields, verb) + msg = ( + "The field%s '%s' %s set in 'force_lists' and not in " "'templates'." + ) % (plural, bad_fields, verb) raise ValueError(msg) for field, template in list(self._templates.items()): @@ -1390,8 +1464,7 @@ def _list_outputs(self): # Build the full template path if isdefined(self.inputs.base_directory): - template = op.abspath( - op.join(self.inputs.base_directory, template)) + template = op.abspath(op.join(self.inputs.base_directory, template)) else: template = op.abspath(template) @@ -1406,7 +1479,9 @@ def _list_outputs(self): # Handle the case where nothing matched if not filelist: msg = "No files were found matching %s template: %s" % ( - field, filled_template) + field, + filled_template, + ) if self.inputs.raise_on_empty: raise IOError(msg) else: @@ -1426,25 +1501,22 @@ def _list_outputs(self): class DataFinderInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): - root_paths = traits.Either( - traits.List(), - Str(), - mandatory=True, - ) + root_paths = traits.Either(traits.List(), Str(), mandatory=True,) match_regex = Str( - '(.+)', - usedefault=True, - desc=("Regular expression for matching paths.")) + "(.+)", usedefault=True, desc=("Regular expression for matching paths.") + ) ignore_regexes = traits.List( - desc=("List of regular expressions, " - "if any match the path it will be " - "ignored.")) - max_depth = traits.Int(desc="The maximum depth to search beneath " - "the root_paths") - min_depth = traits.Int(desc="The minimum depth to search beneath " - "the root paths") + desc=( + "List of regular expressions, " + "if any match the path it will be " + "ignored." + ) + ) + max_depth = traits.Int(desc="The maximum depth to search beneath " "the root_paths") + min_depth = traits.Int(desc="The minimum depth to search beneath " "the root paths") unpack_single = traits.Bool( - False, usedefault=True, desc="Unpack single results from list") + False, usedefault=True, desc="Unpack single results from list" + ) class DataFinder(IOBase): @@ -1496,10 +1568,10 @@ def _match_path(self, target_path): if match is not None: match_dict = match.groupdict() if self.result is None: - self.result = {'out_paths': []} + self.result = {"out_paths": []} for key in list(match_dict.keys()): self.result[key] = [] - self.result['out_paths'].append(target_path) + self.result["out_paths"].append(target_path) for key, val in list(match_dict.items()): self.result[key].append(val) @@ -1519,14 +1591,15 @@ def _run_interface(self, runtime): if self.inputs.ignore_regexes is Undefined: self.ignore_regexes = [] else: - self.ignore_regexes = \ - [re.compile(regex) - for regex in self.inputs.ignore_regexes] + self.ignore_regexes = [ + re.compile(regex) for regex in self.inputs.ignore_regexes + ] self.result = None for root_path in self.inputs.root_paths: # Handle tilda/env variables and remove extra seperators root_path = os.path.normpath( - os.path.expandvars(os.path.expanduser(root_path))) + os.path.expandvars(os.path.expanduser(root_path)) + ) # Check if the root_path is a file if os.path.isfile(root_path): if min_depth == 0: @@ -1535,7 +1608,7 @@ def _run_interface(self, runtime): # Walk through directory structure checking paths for curr_dir, sub_dirs, files in os.walk(root_path): # Determine the current depth from the root_path - curr_depth = (curr_dir.count(os.sep) - root_path.count(os.sep)) + curr_depth = curr_dir.count(os.sep) - root_path.count(os.sep) # If the max path depth has been reached, clear sub_dirs # and files if max_depth is not None and curr_depth >= max_depth: @@ -1548,7 +1621,7 @@ def _run_interface(self, runtime): for infile in files: full_path = os.path.join(curr_dir, infile) self._match_path(full_path) - if (self.inputs.unpack_single and len(self.result['out_paths']) == 1): + if self.inputs.unpack_single and len(self.result["out_paths"]) == 1: for key, vals in list(self.result.items()): self.result[key] = vals[0] else: @@ -1557,10 +1630,10 @@ def _run_interface(self, runtime): if key == "out_paths": continue sort_tuples = human_order_sorted( - list(zip(self.result["out_paths"], self.result[key]))) + list(zip(self.result["out_paths"], self.result[key])) + ) self.result[key] = [x for (_, x) in sort_tuples] - self.result["out_paths"] = human_order_sorted( - self.result["out_paths"]) + self.result["out_paths"] = human_order_sorted(self.result["out_paths"]) if not self.result: raise RuntimeError("Regular expression did not match any files!") @@ -1574,149 +1647,159 @@ def _list_outputs(self): class FSSourceInputSpec(BaseInterfaceInputSpec): - subjects_dir = Directory(exists=True, mandatory=True, - desc='Freesurfer subjects directory.') - subject_id = Str(mandatory=True, - desc='Subject name for whom to retrieve data') - hemi = traits.Enum('both', 'lh', 'rh', usedefault=True, - desc='Selects hemisphere specific outputs') + subjects_dir = Directory( + exists=True, mandatory=True, desc="Freesurfer subjects directory." + ) + subject_id = Str(mandatory=True, desc="Subject name for whom to retrieve data") + hemi = traits.Enum( + "both", "lh", "rh", usedefault=True, desc="Selects hemisphere specific outputs" + ) class FSSourceOutputSpec(TraitedSpec): - T1 = File( - exists=True, desc='Intensity normalized whole-head volume', loc='mri') + T1 = File(exists=True, desc="Intensity normalized whole-head volume", loc="mri") aseg = File( exists=True, - loc='mri', - desc='Volumetric map of regions from automatic segmentation') - brain = File( - exists=True, desc='Intensity normalized brain-only volume', loc='mri') - brainmask = File( - exists=True, desc='Skull-stripped (brain-only) volume', loc='mri') - filled = File(exists=True, desc='Subcortical mass volume', loc='mri') - norm = File( - exists=True, desc='Normalized skull-stripped volume', loc='mri') - nu = File( - exists=True, - desc='Non-uniformity corrected whole-head volume', - loc='mri') - orig = File( - exists=True, - desc='Base image conformed to Freesurfer space', - loc='mri') + loc="mri", + desc="Volumetric map of regions from automatic segmentation", + ) + brain = File(exists=True, desc="Intensity normalized brain-only volume", loc="mri") + brainmask = File(exists=True, desc="Skull-stripped (brain-only) volume", loc="mri") + filled = File(exists=True, desc="Subcortical mass volume", loc="mri") + norm = File(exists=True, desc="Normalized skull-stripped volume", loc="mri") + nu = File(exists=True, desc="Non-uniformity corrected whole-head volume", loc="mri") + orig = File(exists=True, desc="Base image conformed to Freesurfer space", loc="mri") rawavg = File( - exists=True, desc='Volume formed by averaging input images', loc='mri') + exists=True, desc="Volume formed by averaging input images", loc="mri" + ) ribbon = OutputMultiPath( File(exists=True), - desc='Volumetric maps of cortical ribbons', - loc='mri', - altkey='*ribbon') - wm = File(exists=True, desc='Segmented white-matter volume', loc='mri') + desc="Volumetric maps of cortical ribbons", + loc="mri", + altkey="*ribbon", + ) + wm = File(exists=True, desc="Segmented white-matter volume", loc="mri") wmparc = File( exists=True, - loc='mri', - desc='Aparc parcellation projected into subcortical white matter') + loc="mri", + desc="Aparc parcellation projected into subcortical white matter", + ) curv = OutputMultiPath( - File(exists=True), desc='Maps of surface curvature', loc='surf') + File(exists=True), desc="Maps of surface curvature", loc="surf" + ) avg_curv = OutputMultiPath( File(exists=True), - desc='Average atlas curvature, sampled to subject', - loc='surf') + desc="Average atlas curvature, sampled to subject", + loc="surf", + ) inflated = OutputMultiPath( - File(exists=True), desc='Inflated surface meshes', loc='surf') + File(exists=True), desc="Inflated surface meshes", loc="surf" + ) pial = OutputMultiPath( - File(exists=True), - desc='Gray matter/pia mater surface meshes', - loc='surf') + File(exists=True), desc="Gray matter/pia mater surface meshes", loc="surf" + ) area_pial = OutputMultiPath( File(exists=True), - desc='Mean area of triangles each vertex on the pial surface is ' - 'associated with', - loc='surf', - altkey='area.pial') + desc="Mean area of triangles each vertex on the pial surface is " + "associated with", + loc="surf", + altkey="area.pial", + ) curv_pial = OutputMultiPath( File(exists=True), - desc='Curvature of pial surface', - loc='surf', - altkey='curv.pial') + desc="Curvature of pial surface", + loc="surf", + altkey="curv.pial", + ) smoothwm = OutputMultiPath( - File(exists=True), loc='surf', desc='Smoothed original surface meshes') + File(exists=True), loc="surf", desc="Smoothed original surface meshes" + ) sphere = OutputMultiPath( - File(exists=True), desc='Spherical surface meshes', loc='surf') + File(exists=True), desc="Spherical surface meshes", loc="surf" + ) sulc = OutputMultiPath( - File(exists=True), desc='Surface maps of sulcal depth', loc='surf') + File(exists=True), desc="Surface maps of sulcal depth", loc="surf" + ) thickness = OutputMultiPath( - File(exists=True), - loc='surf', - desc='Surface maps of cortical thickness') + File(exists=True), loc="surf", desc="Surface maps of cortical thickness" + ) volume = OutputMultiPath( - File(exists=True), desc='Surface maps of cortical volume', loc='surf') + File(exists=True), desc="Surface maps of cortical volume", loc="surf" + ) white = OutputMultiPath( - File(exists=True), desc='White/gray matter surface meshes', loc='surf') + File(exists=True), desc="White/gray matter surface meshes", loc="surf" + ) jacobian_white = OutputMultiPath( File(exists=True), - desc='Distortion required to register to spherical atlas', - loc='surf') + desc="Distortion required to register to spherical atlas", + loc="surf", + ) graymid = OutputMultiPath( File(exists=True), - desc='Graymid/midthickness surface meshes', - loc='surf', - altkey=['graymid', 'midthickness']) + desc="Graymid/midthickness surface meshes", + loc="surf", + altkey=["graymid", "midthickness"], + ) label = OutputMultiPath( File(exists=True), - desc='Volume and surface label files', - loc='label', - altkey='*label') + desc="Volume and surface label files", + loc="label", + altkey="*label", + ) annot = OutputMultiPath( - File(exists=True), - desc='Surface annotation files', - loc='label', - altkey='*annot') + File(exists=True), desc="Surface annotation files", loc="label", altkey="*annot" + ) aparc_aseg = OutputMultiPath( File(exists=True), - loc='mri', - altkey='aparc*aseg', - desc='Aparc parcellation projected into aseg volume') + loc="mri", + altkey="aparc*aseg", + desc="Aparc parcellation projected into aseg volume", + ) sphere_reg = OutputMultiPath( File(exists=True), - loc='surf', - altkey='sphere.reg', - desc='Spherical registration file') + loc="surf", + altkey="sphere.reg", + desc="Spherical registration file", + ) aseg_stats = OutputMultiPath( File(exists=True), - loc='stats', - altkey='aseg', - desc='Automated segmentation statistics file') + loc="stats", + altkey="aseg", + desc="Automated segmentation statistics file", + ) wmparc_stats = OutputMultiPath( File(exists=True), - loc='stats', - altkey='wmparc', - desc='White matter parcellation statistics file') + loc="stats", + altkey="wmparc", + desc="White matter parcellation statistics file", + ) aparc_stats = OutputMultiPath( File(exists=True), - loc='stats', - altkey='aparc', - desc='Aparc parcellation statistics files') + loc="stats", + altkey="aparc", + desc="Aparc parcellation statistics files", + ) BA_stats = OutputMultiPath( File(exists=True), - loc='stats', - altkey='BA', - desc='Brodmann Area statistics files') + loc="stats", + altkey="BA", + desc="Brodmann Area statistics files", + ) aparc_a2009s_stats = OutputMultiPath( File(exists=True), - loc='stats', - altkey='aparc.a2009s', - desc='Aparc a2009s parcellation statistics files') + loc="stats", + altkey="aparc.a2009s", + desc="Aparc a2009s parcellation statistics files", + ) curv_stats = OutputMultiPath( - File(exists=True), - loc='stats', - altkey='curv', - desc='Curvature statistics files') + File(exists=True), loc="stats", altkey="curv", desc="Curvature statistics files" + ) entorhinal_exvivo_stats = OutputMultiPath( File(exists=True), - loc='stats', - altkey='entorhinal_exvivo', - desc='Entorhinal exvivo statistics files') + loc="stats", + altkey="entorhinal_exvivo", + desc="Entorhinal exvivo statistics files", + ) class FreeSurferSource(IOBase): @@ -1735,36 +1818,35 @@ class FreeSurferSource(IOBase): >>> res = fs.run() # doctest: +SKIP """ + input_spec = FSSourceInputSpec output_spec = FSSourceOutputSpec _always_run = True - _additional_metadata = ['loc', 'altkey'] + _additional_metadata = ["loc", "altkey"] def _get_files(self, path, key, dirval, altkey=None): - globsuffix = '' - if dirval == 'mri': - globsuffix = '.mgz' - elif dirval == 'stats': - globsuffix = '.stats' - globprefix = '' - if dirval in ('surf', 'label', 'stats'): - if self.inputs.hemi != 'both': - globprefix = self.inputs.hemi + '.' + globsuffix = "" + if dirval == "mri": + globsuffix = ".mgz" + elif dirval == "stats": + globsuffix = ".stats" + globprefix = "" + if dirval in ("surf", "label", "stats"): + if self.inputs.hemi != "both": + globprefix = self.inputs.hemi + "." else: - globprefix = '?h.' - if key in ('aseg_stats', 'wmparc_stats'): - globprefix = '' - elif key == 'ribbon': - if self.inputs.hemi != 'both': - globprefix = self.inputs.hemi + '.' + globprefix = "?h." + if key in ("aseg_stats", "wmparc_stats"): + globprefix = "" + elif key == "ribbon": + if self.inputs.hemi != "both": + globprefix = self.inputs.hemi + "." else: - globprefix = '*' + globprefix = "*" keys = ensure_list(altkey) if altkey else [key] - globfmt = os.path.join(path, dirval, ''.join((globprefix, '{}', - globsuffix))) + globfmt = os.path.join(path, dirval, "".join((globprefix, "{}", globsuffix))) return [ - os.path.abspath(f) for key in keys - for f in glob.glob(globfmt.format(key)) + os.path.abspath(f) for key in keys for f in glob.glob(globfmt.format(key)) ] def _list_outputs(self): @@ -1773,9 +1855,12 @@ def _list_outputs(self): output_traits = self._outputs() outputs = output_traits.get() for k in list(outputs.keys()): - val = self._get_files(subject_path, k, - output_traits.traits()[k].loc, - output_traits.traits()[k].altkey) + val = self._get_files( + subject_path, + k, + output_traits.traits()[k].loc, + output_traits.traits()[k].altkey, + ) if val: outputs[k] = simplify_list(val) return outputs @@ -1785,23 +1870,24 @@ class XNATSourceInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): query_template = Str( mandatory=True, - desc=('Layout used to get files. Relative to base ' - 'directory if defined')) + desc=("Layout used to get files. Relative to base " "directory if defined"), + ) query_template_args = traits.Dict( Str, traits.List(traits.List), value=dict(outfiles=[]), usedefault=True, - desc='Information to plug into template') + desc="Information to plug into template", + ) - server = Str(mandatory=True, requires=['user', 'pwd'], xor=['config']) + server = Str(mandatory=True, requires=["user", "pwd"], xor=["config"]) user = Str() pwd = traits.Password() - config = File(mandatory=True, xor=['server']) + config = File(mandatory=True, xor=["server"]) - cache_dir = Directory(desc='Cache directory') + cache_dir = Directory(desc="Cache directory") class XNATSource(LibraryBaseInterface, IOBase): @@ -1837,9 +1923,10 @@ class XNATSource(LibraryBaseInterface, IOBase): """ + input_spec = XNATSourceInputSpec output_spec = DynamicTraitedSpec - _pkg = 'pyxnat' + _pkg = "pyxnat" def __init__(self, infields=None, outfields=None, **kwargs): """ @@ -1862,15 +1949,17 @@ def __init__(self, infields=None, outfields=None, **kwargs): for key in infields: self.inputs.add_trait(key, traits.Any) undefined_traits[key] = Undefined - self.inputs.query_template_args['outfiles'] = [infields] + self.inputs.query_template_args["outfiles"] = [infields] if outfields: # add ability to insert field specific templates self.inputs.add_trait( - 'field_template', + "field_template", traits.Dict( traits.Enum(outfields), - desc="arguments that fit into query_template")) - undefined_traits['field_template'] = Undefined + desc="arguments that fit into query_template", + ), + ) + undefined_traits["field_template"] = Undefined # self.inputs.remove_trait('query_template_args') outdict = {} for key in outfields: @@ -1896,52 +1985,59 @@ def _list_outputs(self): if self.inputs.config: xnat = pyxnat.Interface(config=self.inputs.config) else: - xnat = pyxnat.Interface(self.inputs.server, self.inputs.user, - self.inputs.pwd, cache_dir) + xnat = pyxnat.Interface( + self.inputs.server, self.inputs.user, self.inputs.pwd, cache_dir + ) if self._infields: for key in self._infields: value = getattr(self.inputs, key) if not isdefined(value): - msg = ("%s requires a value for input '%s' " - "because it was listed in 'infields'" % - (self.__class__.__name__, key)) + msg = ( + "%s requires a value for input '%s' " + "because it was listed in 'infields'" + % (self.__class__.__name__, key) + ) raise ValueError(msg) outputs = {} for key, args in list(self.inputs.query_template_args.items()): outputs[key] = [] template = self.inputs.query_template - if hasattr(self.inputs, 'field_template') and \ - isdefined(self.inputs.field_template) and \ - key in self.inputs.field_template: + if ( + hasattr(self.inputs, "field_template") + and isdefined(self.inputs.field_template) + and key in self.inputs.field_template + ): template = self.inputs.field_template[key] if not args: - file_objects = xnat.select(template).get('obj') + file_objects = xnat.select(template).get("obj") if file_objects == []: - raise IOError('Template %s returned no files' % template) - outputs[key] = simplify_list([ - str(file_object.get()) for file_object in file_objects - if file_object.exists() - ]) + raise IOError("Template %s returned no files" % template) + outputs[key] = simplify_list( + [ + str(file_object.get()) + for file_object in file_objects + if file_object.exists() + ] + ) for argnum, arglist in enumerate(args): maxlen = 1 for arg in arglist: - if isinstance(arg, - (str, bytes)) and hasattr(self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): if (maxlen > 1) and (len(arg) != maxlen): - raise ValueError('incompatible number ' - 'of arguments for %s' % key) + raise ValueError( + "incompatible number " "of arguments for %s" % key + ) if len(arg) > maxlen: maxlen = len(arg) outfiles = [] for i in range(maxlen): argtuple = [] for arg in arglist: - if isinstance(arg, (str, bytes)) and \ - hasattr(self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): argtuple.append(arg[i]) @@ -1949,29 +2045,31 @@ def _list_outputs(self): argtuple.append(arg) if argtuple: target = template % tuple(argtuple) - file_objects = xnat.select(target).get('obj') + file_objects = xnat.select(target).get("obj") if file_objects == []: - raise IOError('Template %s ' - 'returned no files' % target) - - outfiles = simplify_list([ - str(file_object.get()) - for file_object in file_objects - if file_object.exists() - ]) + raise IOError("Template %s " "returned no files" % target) + + outfiles = simplify_list( + [ + str(file_object.get()) + for file_object in file_objects + if file_object.exists() + ] + ) else: - file_objects = xnat.select(template).get('obj') + file_objects = xnat.select(template).get("obj") if file_objects == []: - raise IOError('Template %s ' - 'returned no files' % template) + raise IOError("Template %s " "returned no files" % template) - outfiles = simplify_list([ - str(file_object.get()) - for file_object in file_objects - if file_object.exists() - ]) + outfiles = simplify_list( + [ + str(file_object.get()) + for file_object in file_objects + if file_object.exists() + ] + ) outputs[key].insert(i, outfiles) if len(outputs[key]) == 0: @@ -1985,36 +2083,44 @@ class XNATSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): _outputs = traits.Dict(Str, value={}, usedefault=True) - server = Str(mandatory=True, requires=['user', 'pwd'], xor=['config']) + server = Str(mandatory=True, requires=["user", "pwd"], xor=["config"]) user = Str() pwd = traits.Password() - config = File(mandatory=True, xor=['server']) - cache_dir = Directory(desc='') + config = File(mandatory=True, xor=["server"]) + cache_dir = Directory(desc="") - project_id = Str( - desc='Project in which to store the outputs', mandatory=True) + project_id = Str(desc="Project in which to store the outputs", mandatory=True) - subject_id = Str(desc='Set to subject id', mandatory=True) + subject_id = Str(desc="Set to subject id", mandatory=True) - experiment_id = Str(desc='Set to workflow name', mandatory=True) + experiment_id = Str(desc="Set to workflow name", mandatory=True) assessor_id = Str( - desc=('Option to customize ouputs representation in XNAT - ' - 'assessor level will be used with specified id'), - xor=['reconstruction_id']) + desc=( + "Option to customize ouputs representation in XNAT - " + "assessor level will be used with specified id" + ), + xor=["reconstruction_id"], + ) reconstruction_id = Str( - desc=('Option to customize ouputs representation in XNAT - ' - 'reconstruction level will be used with specified id'), - xor=['assessor_id']) + desc=( + "Option to customize ouputs representation in XNAT - " + "reconstruction level will be used with specified id" + ), + xor=["assessor_id"], + ) share = traits.Bool( False, - desc=('Option to share the subjects from the original project' - 'instead of creating new ones when possible - the created ' - 'experiments are then shared back to the original project'), - usedefault=True) + desc=( + "Option to share the subjects from the original project" + "instead of creating new ones when possible - the created " + "experiments are then shared back to the original project" + ), + usedefault=True, + ) def __setattr__(self, key, value): if key not in self.copyable_trait_names(): @@ -2028,8 +2134,9 @@ class XNATSink(LibraryBaseInterface, IOBase): list of nifti files and provides a set of structured output fields. """ + input_spec = XNATSinkInputSpec - _pkg = 'pyxnat' + _pkg = "pyxnat" def _list_outputs(self): """Execute this module. @@ -2042,34 +2149,36 @@ def _list_outputs(self): if self.inputs.config: xnat = pyxnat.Interface(config=self.inputs.config) else: - xnat = pyxnat.Interface(self.inputs.server, self.inputs.user, - self.inputs.pwd, cache_dir) + xnat = pyxnat.Interface( + self.inputs.server, self.inputs.user, self.inputs.pwd, cache_dir + ) # if possible share the subject from the original project if self.inputs.share: subject_id = self.inputs.subject_id result = xnat.select( - 'xnat:subjectData', - ['xnat:subjectData/PROJECT', 'xnat:subjectData/SUBJECT_ID' - ]).where('xnat:subjectData/SUBJECT_ID = %s AND' % subject_id) + "xnat:subjectData", + ["xnat:subjectData/PROJECT", "xnat:subjectData/SUBJECT_ID"], + ).where("xnat:subjectData/SUBJECT_ID = %s AND" % subject_id) # subject containing raw data exists on the server - if (result.data and isinstance(result.data[0], dict)): + if result.data and isinstance(result.data[0], dict): result = result.data[0] - shared = xnat.select('/project/%s/subject/%s' % - (self.inputs.project_id, - self.inputs.subject_id)) + shared = xnat.select( + "/project/%s/subject/%s" + % (self.inputs.project_id, self.inputs.subject_id) + ) if not shared.exists(): # subject not in share project - share_project = xnat.select( - '/project/%s' % self.inputs.project_id) + share_project = xnat.select("/project/%s" % self.inputs.project_id) if not share_project.exists(): # check project exists share_project.insert() - subject = xnat.select('/project/%(project)s' - '/subject/%(subject_id)s' % result) + subject = xnat.select( + "/project/%(project)s" "/subject/%(subject_id)s" % result + ) subject.share(str(self.inputs.project_id)) @@ -2077,17 +2186,18 @@ def _list_outputs(self): uri_template_args = dict( project_id=quote_id(self.inputs.project_id), subject_id=self.inputs.subject_id, - experiment_id=quote_id(self.inputs.experiment_id)) + experiment_id=quote_id(self.inputs.experiment_id), + ) if self.inputs.share: - uri_template_args['original_project'] = result['project'] + uri_template_args["original_project"] = result["project"] if self.inputs.assessor_id: - uri_template_args['assessor_id'] = quote_id( - self.inputs.assessor_id) + uri_template_args["assessor_id"] = quote_id(self.inputs.assessor_id) elif self.inputs.reconstruction_id: - uri_template_args['reconstruction_id'] = quote_id( - self.inputs.reconstruction_id) + uri_template_args["reconstruction_id"] = quote_id( + self.inputs.reconstruction_id + ) # gather outputs and upload them for key, files in list(self.inputs._outputs.items()): @@ -2096,27 +2206,29 @@ def _list_outputs(self): if isinstance(name, list): for i, file_name in enumerate(name): - push_file(self, xnat, file_name, '%s_' % i + key, - uri_template_args) + push_file( + self, xnat, file_name, "%s_" % i + key, uri_template_args + ) else: push_file(self, xnat, name, key, uri_template_args) def quote_id(string): - return str(string).replace('_', '---') + return str(string).replace("_", "---") def unquote_id(string): - return str(string).replace('---', '_') + return str(string).replace("---", "_") def push_file(self, xnat, file_name, out_key, uri_template_args): # grab info from output file names val_list = [ - unquote_id(val) for part in os.path.split(file_name)[0].split(os.sep) - for val in part.split('_')[1:] - if part.startswith('_') and len(part.split('_')) % 2 + unquote_id(val) + for part in os.path.split(file_name)[0].split(os.sep) + for val in part.split("_")[1:] + if part.startswith("_") and len(part.split("_")) % 2 ] keymap = dict(list(zip(val_list[1::2], val_list[2::2]))) @@ -2127,38 +2239,41 @@ def push_file(self, xnat, file_name, out_key, uri_template_args): _label.extend([key, val]) # select and define container level - uri_template_args['container_type'] = None + uri_template_args["container_type"] = None - for container in ['assessor_id', 'reconstruction_id']: + for container in ["assessor_id", "reconstruction_id"]: if getattr(self.inputs, container): - uri_template_args['container_type'] = container.split('_id')[0] - uri_template_args['container_id'] = uri_template_args[container] + uri_template_args["container_type"] = container.split("_id")[0] + uri_template_args["container_id"] = uri_template_args[container] - if uri_template_args['container_type'] is None: - uri_template_args['container_type'] = 'reconstruction' + if uri_template_args["container_type"] is None: + uri_template_args["container_type"] = "reconstruction" - uri_template_args['container_id'] = unquote_id( - uri_template_args['experiment_id']) + uri_template_args["container_id"] = unquote_id( + uri_template_args["experiment_id"] + ) if _label: - uri_template_args['container_id'] += ( - '_results_%s' % '_'.join(_label)) + uri_template_args["container_id"] += "_results_%s" % "_".join(_label) else: - uri_template_args['container_id'] += '_results' + uri_template_args["container_id"] += "_results" # define resource level - uri_template_args['resource_label'] = ('%s_%s' % - (uri_template_args['container_id'], - out_key.split('.')[0])) + uri_template_args["resource_label"] = "%s_%s" % ( + uri_template_args["container_id"], + out_key.split(".")[0], + ) # define file level - uri_template_args['file_name'] = os.path.split( - os.path.abspath(unquote_id(file_name)))[1] + uri_template_args["file_name"] = os.path.split( + os.path.abspath(unquote_id(file_name)) + )[1] uri_template = ( - '/project/%(project_id)s/subject/%(subject_id)s' - '/experiment/%(experiment_id)s/%(container_type)s/%(container_id)s' - '/out/resource/%(resource_label)s/file/%(file_name)s') + "/project/%(project_id)s/subject/%(subject_id)s" + "/experiment/%(experiment_id)s/%(container_type)s/%(container_id)s" + "/out/resource/%(resource_label)s/file/%(file_name)s" + ) # unquote values before uploading for key in list(uri_template_args.keys()): @@ -2166,18 +2281,19 @@ def push_file(self, xnat, file_name, out_key, uri_template_args): # upload file remote_file = xnat.select(uri_template % uri_template_args) - remote_file.insert( - file_name, experiments='xnat:imageSessionData', use_label=True) + remote_file.insert(file_name, experiments="xnat:imageSessionData", use_label=True) # shares the experiment back to the original project if relevant - if 'original_project' in uri_template_args: + if "original_project" in uri_template_args: experiment_template = ( - '/project/%(original_project)s' - '/subject/%(subject_id)s/experiment/%(experiment_id)s') + "/project/%(original_project)s" + "/subject/%(subject_id)s/experiment/%(experiment_id)s" + ) xnat.select(experiment_template % uri_template_args).share( - uri_template_args['original_project']) + uri_template_args["original_project"] + ) def capture_provenance(): @@ -2212,8 +2328,9 @@ class SQLiteSink(LibraryBaseInterface, IOBase): >>> sql.run() # doctest: +SKIP """ + input_spec = SQLiteSinkInputSpec - _pkg = 'sqlite3' + _pkg = "sqlite3" def __init__(self, input_names, **inputs): @@ -2226,13 +2343,17 @@ def _list_outputs(self): """Execute this module. """ import sqlite3 - conn = sqlite3.connect( - self.inputs.database_file, check_same_thread=False) + + conn = sqlite3.connect(self.inputs.database_file, check_same_thread=False) c = conn.cursor() - c.execute("INSERT OR REPLACE INTO %s (" % self.inputs.table_name + - ",".join(self._input_names) + ") VALUES (" + - ",".join(["?"] * len(self._input_names)) + ")", - [getattr(self.inputs, name) for name in self._input_names]) + c.execute( + "INSERT OR REPLACE INTO %s (" % self.inputs.table_name + + ",".join(self._input_names) + + ") VALUES (" + + ",".join(["?"] * len(self._input_names)) + + ")", + [getattr(self.inputs, name) for name in self._input_names], + ) conn.commit() c.close() return None @@ -2240,17 +2361,16 @@ def _list_outputs(self): class MySQLSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): host = Str( - 'localhost', + "localhost", mandatory=True, - requires=['username', 'password'], - xor=['config'], - usedefault=True) + requires=["username", "password"], + xor=["config"], + usedefault=True, + ) config = File( - mandatory=True, - xor=['host'], - desc="MySQL Options File (same format as my.cnf)") - database_name = Str( - mandatory=True, desc='Otherwise known as the schema name') + mandatory=True, xor=["host"], desc="MySQL Options File (same format as my.cnf)" + ) + database_name = Str(mandatory=True, desc="Otherwise known as the schema name") table_name = Str(mandatory=True) username = Str() password = Str() @@ -2272,6 +2392,7 @@ class MySQLSink(IOBase): >>> sql.run() # doctest: +SKIP """ + input_spec = MySQLSinkInputSpec def __init__(self, input_names, **inputs): @@ -2285,45 +2406,52 @@ def _list_outputs(self): """Execute this module. """ import MySQLdb + if isdefined(self.inputs.config): conn = MySQLdb.connect( - db=self.inputs.database_name, - read_default_file=self.inputs.config) + db=self.inputs.database_name, read_default_file=self.inputs.config + ) else: conn = MySQLdb.connect( host=self.inputs.host, user=self.inputs.username, passwd=self.inputs.password, - db=self.inputs.database_name) + db=self.inputs.database_name, + ) c = conn.cursor() - c.execute("REPLACE INTO %s (" % self.inputs.table_name + - ",".join(self._input_names) + ") VALUES (" + - ",".join(["%s"] * len(self._input_names)) + ")", - [getattr(self.inputs, name) for name in self._input_names]) + c.execute( + "REPLACE INTO %s (" % self.inputs.table_name + + ",".join(self._input_names) + + ") VALUES (" + + ",".join(["%s"] * len(self._input_names)) + + ")", + [getattr(self.inputs, name) for name in self._input_names], + ) conn.commit() c.close() return None class SSHDataGrabberInputSpec(DataGrabberInputSpec): - hostname = Str(mandatory=True, desc='Server hostname.') - username = Str(desc='Server username.') - password = traits.Password(desc='Server password.') + hostname = Str(mandatory=True, desc="Server hostname.") + username = Str(desc="Server username.") + password = traits.Password(desc="Server password.") download_files = traits.Bool( True, usedefault=True, - desc='If false it will return the file names without downloading them') + desc="If false it will return the file names without downloading them", + ) base_directory = Str( - mandatory=True, - desc='Path to the base directory consisting of subject data.') + mandatory=True, desc="Path to the base directory consisting of subject data." + ) template_expression = traits.Enum( - ['fnmatch', 'regexp'], + ["fnmatch", "regexp"], usedefault=True, - desc='Use either fnmatch or regexp to express templates') + desc="Use either fnmatch or regexp to express templates", + ) ssh_log_to_file = Str( - '', - usedefault=True, - desc='If set SSH commands will be logged to the given file') + "", usedefault=True, desc="If set SSH commands will be logged to the given file" + ) class SSHDataGrabber(LibraryBaseInterface, DataGrabber): @@ -2387,10 +2515,11 @@ class SSHDataGrabber(LibraryBaseInterface, DataGrabber): >>> dg.inputs.template_args['struct'] = [['sid']] """ + input_spec = SSHDataGrabberInputSpec output_spec = DynamicTraitedSpec _always_run = False - _pkg = 'paramiko' + _pkg = "paramiko" def __init__(self, infields=None, outfields=None, **kwargs): """ @@ -2406,18 +2535,21 @@ def __init__(self, infields=None, outfields=None, **kwargs): """ if not outfields: - outfields = ['outfiles'] + outfields = ["outfiles"] kwargs = kwargs.copy() - kwargs['infields'] = infields - kwargs['outfields'] = outfields + kwargs["infields"] = infields + kwargs["outfields"] = outfields super(SSHDataGrabber, self).__init__(**kwargs) - if (None in (self.inputs.username, self.inputs.password)): - raise ValueError("either both username and password " - "are provided or none of them") + if None in (self.inputs.username, self.inputs.password): + raise ValueError( + "either both username and password " "are provided or none of them" + ) - if (self.inputs.template_expression == 'regexp' - and self.inputs.template[-1] != '$'): - self.inputs.template += '$' + if ( + self.inputs.template_expression == "regexp" + and self.inputs.template[-1] != "$" + ): + self.inputs.template += "$" def _get_files_over_ssh(self, template): """Get the files matching template over an SSH connection.""" @@ -2430,17 +2562,17 @@ def _get_files_over_ssh(self, template): template_dir = os.path.dirname(template) template_base = os.path.basename(template) every_file_in_dir = sftp.listdir(template_dir) - if self.inputs.template_expression == 'fnmatch': + if self.inputs.template_expression == "fnmatch": outfiles = fnmatch.filter(every_file_in_dir, template_base) - elif self.inputs.template_expression == 'regexp': + elif self.inputs.template_expression == "regexp": regexp = re.compile(template_base) outfiles = list(filter(regexp.match, every_file_in_dir)) else: - raise ValueError('template_expression value invalid') + raise ValueError("template_expression value invalid") if len(outfiles) == 0: # no files - msg = 'Output template: %s returned no files' % template + msg = "Output template: %s returned no files" % template if self.inputs.raise_on_empty: raise IOError(msg) else: @@ -2456,22 +2588,25 @@ def _get_files_over_ssh(self, template): # actually download the files, if desired if self.inputs.download_files: - files_to_download = copy.copy(outfiles) # make sure new list! + files_to_download = copy.copy(outfiles) # make sure new list! # check to see if there are any related files to download for file_to_download in files_to_download: related_to_current = get_related_files( - file_to_download, include_this_file=False) + file_to_download, include_this_file=False + ) existing_related_not_downloading = [ - f for f in related_to_current - if f in every_file_in_dir and f not in files_to_download] + f + for f in related_to_current + if f in every_file_in_dir and f not in files_to_download + ] files_to_download.extend(existing_related_not_downloading) for f in files_to_download: try: sftp.get(os.path.join(template_dir, f), f) except IOError: - iflogger.info('remote file %s not found' % f) + iflogger.info("remote file %s not found" % f) # return value outfiles = simplify_list(outfiles) @@ -2489,17 +2624,21 @@ def _list_outputs(self): for key in self._infields: value = getattr(self.inputs, key) if not isdefined(value): - msg = "%s requires a value for input '%s' because it was listed in 'infields'" % \ - (self.__class__.__name__, key) + msg = ( + "%s requires a value for input '%s' because it was listed in 'infields'" + % (self.__class__.__name__, key) + ) raise ValueError(msg) outputs = {} for key, args in list(self.inputs.template_args.items()): outputs[key] = [] template = self.inputs.template - if hasattr(self.inputs, 'field_template') and \ - isdefined(self.inputs.field_template) and \ - key in self.inputs.field_template: + if ( + hasattr(self.inputs, "field_template") + and isdefined(self.inputs.field_template) + and key in self.inputs.field_template + ): template = self.inputs.field_template[key] if not args: @@ -2508,22 +2647,20 @@ def _list_outputs(self): for argnum, arglist in enumerate(args): maxlen = 1 for arg in arglist: - if isinstance(arg, - (str, bytes)) and hasattr(self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): if (maxlen > 1) and (len(arg) != maxlen): raise ValueError( - 'incompatible number of arguments for %s' % - key) + "incompatible number of arguments for %s" % key + ) if len(arg) > maxlen: maxlen = len(arg) outfiles = [] for i in range(maxlen): argtuple = [] for arg in arglist: - if isinstance(arg, (str, bytes)) and hasattr( - self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): argtuple.append(arg[i]) @@ -2535,9 +2672,10 @@ def _list_outputs(self): filledtemplate = template % tuple(argtuple) except TypeError as e: raise TypeError( - e.message + - ": Template %s failed to convert with args %s" - % (template, str(tuple(argtuple)))) + e.message + + ": Template %s failed to convert with args %s" + % (template, str(tuple(argtuple))) + ) outputs[key].append(self._get_files_over_ssh(filledtemplate)) @@ -2560,29 +2698,33 @@ def _list_outputs(self): def _get_ssh_client(self): import paramiko + config = paramiko.SSHConfig() - config.parse(open(os.path.expanduser('~/.ssh/config'))) + config.parse(open(os.path.expanduser("~/.ssh/config"))) host = config.lookup(self.inputs.hostname) - if 'proxycommand' in host: + if "proxycommand" in host: proxy = paramiko.ProxyCommand( - subprocess.check_output([ - os.environ['SHELL'], '-c', - 'echo %s' % host['proxycommand'] - ]).strip()) + subprocess.check_output( + [os.environ["SHELL"], "-c", "echo %s" % host["proxycommand"]] + ).strip() + ) else: proxy = None client = paramiko.SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - client.connect(host['hostname'], username=host['user'], sock=proxy) + client.connect(host["hostname"], username=host["user"], sock=proxy) return client class JSONFileGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): - in_file = File(exists=True, desc='JSON source file') + in_file = File(exists=True, desc="JSON source file") defaults = traits.Dict( - desc=('JSON dictionary that sets default output' - 'values, overridden by values found in in_file')) + desc=( + "JSON dictionary that sets default output" + "values, overridden by values found in in_file" + ) + ) class JSONFileGrabber(IOBase): @@ -2605,6 +2747,7 @@ class JSONFileGrabber(IOBase): >>> pprint.pprint(res.outputs.get()) # doctest:, +ELLIPSIS {'param1': 'exampleStr', 'param2': 4, 'param3': 1.0} """ + input_spec = JSONFileGrabberInputSpec output_spec = DynamicTraitedSpec _always_run = True @@ -2614,11 +2757,11 @@ def _list_outputs(self): outputs = {} if isdefined(self.inputs.in_file): - with open(self.inputs.in_file, 'r') as f: + with open(self.inputs.in_file, "r") as f: data = simplejson.load(f) if not isinstance(data, dict): - raise RuntimeError('JSON input has no dictionary structure') + raise RuntimeError("JSON input has no dictionary structure") for key, value in list(data.items()): outputs[key] = value @@ -2633,9 +2776,8 @@ def _list_outputs(self): class JSONFileSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): - out_file = File(desc='JSON sink file') - in_dict = traits.Dict( - value={}, usedefault=True, desc='input JSON dictionary') + out_file = File(desc="JSON sink file") + in_dict = traits.Dict(value={}, usedefault=True, desc="input JSON dictionary") _outputs = traits.Dict(value={}, usedefault=True) def __setattr__(self, key, value): @@ -2650,7 +2792,7 @@ def __setattr__(self, key, value): class JSONFileSinkOutputSpec(TraitedSpec): - out_file = File(desc='JSON sink file') + out_file = File(desc="JSON sink file") class JSONFileSink(IOBase): @@ -2681,6 +2823,7 @@ class JSONFileSink(IOBase): >>> dictsink.run() # doctest: +SKIP """ + input_spec = JSONFileSinkInputSpec output_spec = JSONFileSinkOutputSpec @@ -2699,8 +2842,8 @@ def __init__(self, infields=[], force_run=True, **inputs): self._always_run = True def _process_name(self, name, val): - if '.' in name: - newkeys = name.split('.') + if "." in name: + newkeys = name.split(".") name = newkeys.pop(0) nested_dict = {newkeys.pop(): val} @@ -2715,7 +2858,7 @@ def _list_outputs(self): import os.path as op if not isdefined(self.inputs.out_file): - out_file = op.abspath('datasink.json') + out_file = op.abspath("datasink.json") else: out_file = op.abspath(self.inputs.out_file) @@ -2723,37 +2866,35 @@ def _list_outputs(self): # Overwrite in_dict entries automatically for key, val in list(self.inputs._outputs.items()): - if not isdefined(val) or key == 'trait_added': + if not isdefined(val) or key == "trait_added": continue key, val = self._process_name(key, val) out_dict[key] = val - with open(out_file, 'w') as f: + with open(out_file, "w") as f: f.write(str(simplejson.dumps(out_dict, ensure_ascii=False))) outputs = self.output_spec().get() - outputs['out_file'] = out_file + outputs["out_file"] = out_file return outputs class BIDSDataGrabberInputSpec(DynamicTraitedSpec): - base_dir = Directory( - exists=True, - desc='Path to BIDS Directory.', - mandatory=True) + base_dir = Directory(exists=True, desc="Path to BIDS Directory.", mandatory=True) output_query = traits.Dict( - key_trait=Str, - value_trait=traits.Dict, - desc='Queries for outfield outputs') + key_trait=Str, value_trait=traits.Dict, desc="Queries for outfield outputs" + ) raise_on_empty = traits.Bool( - True, usedefault=True, - desc='Generate exception if list is empty for a given field') + True, + usedefault=True, + desc="Generate exception if list is empty for a given field", + ) index_derivatives = traits.Bool( - False, mandatory=True, usedefault=True, - desc='Index derivatives/ sub-directory') + False, mandatory=True, usedefault=True, desc="Index derivatives/ sub-directory" + ) extra_derivatives = traits.List( - Directory(exists=True), - desc='Additional derivative directories to index') + Directory(exists=True), desc="Additional derivative directories to index" + ) class BIDSDataGrabber(LibraryBaseInterface, IOBase): @@ -2786,10 +2927,11 @@ class BIDSDataGrabber(LibraryBaseInterface, IOBase): >>> results = bg.run() # doctest: +SKIP """ + input_spec = BIDSDataGrabberInputSpec output_spec = DynamicTraitedSpec _always_run = True - _pkg = 'bids' + _pkg = "bids" def __init__(self, infields=None, **kwargs): """ @@ -2802,19 +2944,25 @@ def __init__(self, infields=None, **kwargs): if not isdefined(self.inputs.output_query): self.inputs.output_query = { - "bold": {"datatype": "func", "suffix": "bold", - "extensions": ["nii", ".nii.gz"]}, - "T1w": {"datatype": "anat", "suffix": "T1w", - "extensions": ["nii", ".nii.gz"]}, - } + "bold": { + "datatype": "func", + "suffix": "bold", + "extensions": ["nii", ".nii.gz"], + }, + "T1w": { + "datatype": "anat", + "suffix": "T1w", + "extensions": ["nii", ".nii.gz"], + }, + } # If infields is empty, use all BIDS entities if infields is None: from bids import layout as bidslayout - bids_config = join( - dirname(bidslayout.__file__), 'config', 'bids.json') - bids_config = json.load(open(bids_config, 'r')) - infields = [i['name'] for i in bids_config['entities']] + + bids_config = join(dirname(bidslayout.__file__), "config", "bids.json") + bids_config = json.load(open(bids_config, "r")) + infields = [i["name"] for i in bids_config["entities"]] self._infields = infields or [] @@ -2828,8 +2976,10 @@ def __init__(self, infields=None, **kwargs): def _list_outputs(self): from bids import BIDSLayout - layout = BIDSLayout(self.inputs.base_dir, - derivatives=self.inputs.index_derivatives) + + layout = BIDSLayout( + self.inputs.base_dir, derivatives=self.inputs.index_derivatives + ) if isdefined(self.inputs.extra_derivatives): layout.add_derivatives(self.inputs.extra_derivatives) @@ -2845,9 +2995,9 @@ def _list_outputs(self): for key, query in self.inputs.output_query.items(): args = query.copy() args.update(filters) - filelist = layout.get(return_type='file', **args) + filelist = layout.get(return_type="file", **args) if len(filelist) == 0: - msg = 'Output key: %s returned no files' % key + msg = "Output key: %s returned no files" % key if self.inputs.raise_on_empty: raise IOError(msg) else: @@ -2862,14 +3012,16 @@ def _add_output_traits(self, base): class ExportFileInputSpec(BaseInterfaceInputSpec): - in_file = File(exists=True, mandatory=True, desc='Input file name') - out_file = File(mandatory=True, desc='Output file name') - check_extension = traits.Bool(True, desc='Ensure that the input and output file extensions match') - clobber = traits.Bool(desc='Permit overwriting existing files') + in_file = File(exists=True, mandatory=True, desc="Input file name") + out_file = File(mandatory=True, desc="Output file name") + check_extension = traits.Bool( + True, desc="Ensure that the input and output file extensions match" + ) + clobber = traits.Bool(desc="Permit overwriting existing files") class ExportFileOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='Output file name') + out_file = File(exists=True, desc="Output file name") class ExportFile(SimpleInterface): @@ -2893,6 +3045,7 @@ class ExportFile(SimpleInterface): True """ + input_spec = ExportFileInputSpec output_spec = ExportFileOutputSpec @@ -2900,10 +3053,16 @@ def _run_interface(self, runtime): if not self.inputs.clobber and op.exists(self.inputs.out_file): raise FileExistsError(self.inputs.out_file) if not op.isabs(self.inputs.out_file): - raise ValueError('Out_file must be an absolute path.') - if (self.inputs.check_extension and - split_filename(self.inputs.in_file)[2] != split_filename(self.inputs.out_file)[2]): - raise RuntimeError('%s and %s have different extensions' % (self.inputs.in_file, self.inputs.out_file)) + raise ValueError("Out_file must be an absolute path.") + if ( + self.inputs.check_extension + and split_filename(self.inputs.in_file)[2] + != split_filename(self.inputs.out_file)[2] + ): + raise RuntimeError( + "%s and %s have different extensions" + % (self.inputs.in_file, self.inputs.out_file) + ) shutil.copy(str(self.inputs.in_file), str(self.inputs.out_file)) - self._results['out_file'] = self.inputs.out_file + self._results["out_file"] = self.inputs.out_file return runtime diff --git a/nipype/interfaces/matlab.py b/nipype/interfaces/matlab.py index 71fc7ab0e5..488635843e 100644 --- a/nipype/interfaces/matlab.py +++ b/nipype/interfaces/matlab.py @@ -5,25 +5,33 @@ import os from .. import config -from .base import (CommandLineInputSpec, InputMultiPath, isdefined, - CommandLine, traits, File, Directory) +from .base import ( + CommandLineInputSpec, + InputMultiPath, + isdefined, + CommandLine, + traits, + File, + Directory, +) def get_matlab_command(): - if 'NIPYPE_NO_MATLAB' in os.environ: + if "NIPYPE_NO_MATLAB" in os.environ: return None try: - matlab_cmd = os.environ['MATLABCMD'] + matlab_cmd = os.environ["MATLABCMD"] except: - matlab_cmd = 'matlab' + matlab_cmd = "matlab" try: res = CommandLine( - command='which', + command="which", args=matlab_cmd, resource_monitor=False, - terminal_output='allatonce').run() + terminal_output="allatonce", + ).run() matlab_path = res.runtime.stdout.strip() except Exception: return None @@ -37,49 +45,51 @@ class MatlabInputSpec(CommandLineInputSpec): """ Basic expected inputs to Matlab interface """ script = traits.Str( - argstr='-r \"%s;exit\"', - desc='m-code to run', - mandatory=True, - position=-1) + argstr='-r "%s;exit"', desc="m-code to run", mandatory=True, position=-1 + ) uses_mcr = traits.Bool( - desc='use MCR interface', - xor=['nodesktop', 'nosplash', 'single_comp_thread'], - nohash=True) + desc="use MCR interface", + xor=["nodesktop", "nosplash", "single_comp_thread"], + nohash=True, + ) nodesktop = traits.Bool( True, - argstr='-nodesktop', + argstr="-nodesktop", usedefault=True, - desc='Switch off desktop mode on unix platforms', - nohash=True) + desc="Switch off desktop mode on unix platforms", + nohash=True, + ) nosplash = traits.Bool( True, - argstr='-nosplash', + argstr="-nosplash", usedefault=True, - desc='Switch of splash screen', - nohash=True) - logfile = File(argstr='-logfile %s', desc='Save matlab output to log') + desc="Switch of splash screen", + nohash=True, + ) + logfile = File(argstr="-logfile %s", desc="Save matlab output to log") single_comp_thread = traits.Bool( - argstr="-singleCompThread", - desc="force single threaded operation", - nohash=True) + argstr="-singleCompThread", desc="force single threaded operation", nohash=True + ) # non-commandline options - mfile = traits.Bool(True, desc='Run m-code using m-file', usedefault=True) + mfile = traits.Bool(True, desc="Run m-code using m-file", usedefault=True) script_file = File( - 'pyscript.m', usedefault=True, desc='Name of file to write m-code to') - paths = InputMultiPath(Directory(), desc='Paths to add to matlabpath') + "pyscript.m", usedefault=True, desc="Name of file to write m-code to" + ) + paths = InputMultiPath(Directory(), desc="Paths to add to matlabpath") prescript = traits.List( - ["ver,", "try,"], - usedefault=True, - desc='prescript to be added before code') + ["ver,", "try,"], usedefault=True, desc="prescript to be added before code" + ) postscript = traits.List( [ - "\n,catch ME,", "fprintf(2,'MATLAB code threw an exception:\\n');", + "\n,catch ME,", + "fprintf(2,'MATLAB code threw an exception:\\n');", "fprintf(2,'%s\\n',ME.message);", "if length(ME.stack) ~= 0, fprintf(2,'File:%s\\nName:%s\\nLine:%d\\n',ME.stack.file,ME.stack.name,ME.stack.line);, end;", - "end;" + "end;", ], - desc='script added after code', - usedefault=True) + desc="script added after code", + usedefault=True, + ) class MatlabCommand(CommandLine): @@ -91,7 +101,7 @@ class MatlabCommand(CommandLine): >>> out = mlab.run() # doctest: +SKIP """ - _cmd = 'matlab' + _cmd = "matlab" _default_matlab_cmd = None _default_mfile = None _default_paths = None @@ -113,13 +123,14 @@ def __init__(self, matlab_cmd=None, **inputs): if self._default_paths and not isdefined(self.inputs.paths): self.inputs.paths = self._default_paths - if not isdefined(self.inputs.single_comp_thread) and \ - not isdefined(self.inputs.uses_mcr): - if config.getboolean('execution', 'single_thread_matlab'): + if not isdefined(self.inputs.single_comp_thread) and not isdefined( + self.inputs.uses_mcr + ): + if config.getboolean("execution", "single_thread_matlab"): self.inputs.single_comp_thread = True # For matlab commands force all output to be returned since matlab # does not have a clean way of notifying an error - self.terminal_output = 'allatonce' + self.terminal_output = "allatonce" @classmethod def set_default_matlab_cmd(cls, matlab_cmd): @@ -155,23 +166,23 @@ def set_default_paths(cls, paths): cls._default_paths = paths def _run_interface(self, runtime): - self.terminal_output = 'allatonce' + self.terminal_output = "allatonce" runtime = super(MatlabCommand, self)._run_interface(runtime) try: # Matlab can leave the terminal in a barbbled state - os.system('stty sane') + os.system("stty sane") except: # We might be on a system where stty doesn't exist pass - if 'MATLAB code threw an exception' in runtime.stderr: + if "MATLAB code threw an exception" in runtime.stderr: self.raise_exception(runtime) return runtime def _format_arg(self, name, trait_spec, value): - if name in ['script']: + if name in ["script"]: argstr = trait_spec.argstr if self.inputs.uses_mcr: - argstr = '%s' + argstr = "%s" return self._gen_matlab_command(argstr, value) return super(MatlabCommand, self)._format_arg(name, trait_spec, value) @@ -189,33 +200,34 @@ def _gen_matlab_command(self, argstr, script_lines): # prescript takes different default value depending on the mfile argument if mfile: prescript.insert( - 0, - "fprintf(1,'Executing %s at %s:\\n',mfilename(),datestr(now));" + 0, "fprintf(1,'Executing %s at %s:\\n',mfilename(),datestr(now));" ) else: - prescript.insert( - 0, "fprintf(1,'Executing code at %s:\\n',datestr(now));") + prescript.insert(0, "fprintf(1,'Executing code at %s:\\n',datestr(now));") for path in paths: prescript.append("addpath('%s');\n" % path) if not mfile: # clean up the code of comments and replace newlines with commas - script_lines = ','.join([ - line for line in script_lines.split("\n") - if not line.strip().startswith("%") - ]) + script_lines = ",".join( + [ + line + for line in script_lines.split("\n") + if not line.strip().startswith("%") + ] + ) - script_lines = '\n'.join(prescript) + script_lines + '\n'.join( - postscript) + script_lines = "\n".join(prescript) + script_lines + "\n".join(postscript) if mfile: - with open(os.path.join(cwd, self.inputs.script_file), - 'wt') as mfile: + with open(os.path.join(cwd, self.inputs.script_file), "wt") as mfile: mfile.write(script_lines) if self.inputs.uses_mcr: - script = '%s' % (os.path.join(cwd, self.inputs.script_file)) + script = "%s" % (os.path.join(cwd, self.inputs.script_file)) else: script = "addpath('%s');%s" % ( - cwd, self.inputs.script_file.split('.')[0]) + cwd, + self.inputs.script_file.split(".")[0], + ) else: - script = ''.join(script_lines.split('\n')) + script = "".join(script_lines.split("\n")) return argstr % script diff --git a/nipype/interfaces/meshfix.py b/nipype/interfaces/meshfix.py index 53c7f56cfe..d1689ad9b4 100644 --- a/nipype/interfaces/meshfix.py +++ b/nipype/interfaces/meshfix.py @@ -6,145 +6,163 @@ import os.path as op from ..utils.filemanip import split_filename -from .base import (CommandLine, CommandLineInputSpec, traits, TraitedSpec, - isdefined, File) +from .base import ( + CommandLine, + CommandLineInputSpec, + traits, + TraitedSpec, + isdefined, + File, +) class MeshFixInputSpec(CommandLineInputSpec): number_of_biggest_shells = traits.Int( - argstr='--shells %d', desc="Only the N biggest shells are kept") + argstr="--shells %d", desc="Only the N biggest shells are kept" + ) epsilon_angle = traits.Range( - argstr='-a %f', + argstr="-a %f", low=0.0, high=2.0, - desc="Epsilon angle in degrees (must be between 0 and 2)") + desc="Epsilon angle in degrees (must be between 0 and 2)", + ) join_overlapping_largest_components = traits.Bool( - argstr='-j', - xor=['join_closest_components'], - desc='Join 2 biggest components if they overlap, remove the rest.') + argstr="-j", + xor=["join_closest_components"], + desc="Join 2 biggest components if they overlap, remove the rest.", + ) join_closest_components = traits.Bool( - argstr='-jc', - xor=['join_closest_components'], - desc='Join the closest pair of components.') + argstr="-jc", + xor=["join_closest_components"], + desc="Join the closest pair of components.", + ) quiet_mode = traits.Bool( - argstr='-q', desc="Quiet mode, don't write much to stdout.") + argstr="-q", desc="Quiet mode, don't write much to stdout." + ) - dont_clean = traits.Bool(argstr='--no-clean', desc="Don't Clean") + dont_clean = traits.Bool(argstr="--no-clean", desc="Don't Clean") save_as_stl = traits.Bool( - xor=['save_as_vrml', 'save_as_freesurfer_mesh'], - argstr='--stl', - desc="Result is saved in stereolithographic format (.stl)") + xor=["save_as_vrml", "save_as_freesurfer_mesh"], + argstr="--stl", + desc="Result is saved in stereolithographic format (.stl)", + ) save_as_vrml = traits.Bool( - argstr='--wrl', - xor=['save_as_stl', 'save_as_freesurfer_mesh'], - desc="Result is saved in VRML1.0 format (.wrl)") + argstr="--wrl", + xor=["save_as_stl", "save_as_freesurfer_mesh"], + desc="Result is saved in VRML1.0 format (.wrl)", + ) save_as_freesurfer_mesh = traits.Bool( - argstr='--fsmesh', - xor=['save_as_vrml', 'save_as_stl'], - desc="Result is saved in freesurfer mesh format") + argstr="--fsmesh", + xor=["save_as_vrml", "save_as_stl"], + desc="Result is saved in freesurfer mesh format", + ) - remove_handles = traits.Bool( - argstr='--remove-handles', desc="Remove handles") + remove_handles = traits.Bool(argstr="--remove-handles", desc="Remove handles") uniform_remeshing_steps = traits.Int( - argstr='-u %d', - requires=['uniform_remeshing_vertices'], - desc="Number of steps for uniform remeshing of the whole mesh") + argstr="-u %d", + requires=["uniform_remeshing_vertices"], + desc="Number of steps for uniform remeshing of the whole mesh", + ) uniform_remeshing_vertices = traits.Int( - argstr='--vertices %d', - requires=['uniform_remeshing_steps'], + argstr="--vertices %d", + requires=["uniform_remeshing_steps"], desc="Constrains the number of vertices." - "Must be used with uniform_remeshing_steps") + "Must be used with uniform_remeshing_steps", + ) laplacian_smoothing_steps = traits.Int( - argstr='--smooth %d', - desc="The number of laplacian smoothing steps to apply") + argstr="--smooth %d", desc="The number of laplacian smoothing steps to apply" + ) x_shift = traits.Int( - argstr='--smooth %d', - desc="Shifts the coordinates of the vertices when saving. Output must be in FreeSurfer format" + argstr="--smooth %d", + desc="Shifts the coordinates of the vertices when saving. Output must be in FreeSurfer format", ) # Cutting, decoupling, dilation cut_outer = traits.Int( - argstr='--cut-outer %d', - desc="Remove triangles of 1st that are outside of the 2nd shell.") + argstr="--cut-outer %d", + desc="Remove triangles of 1st that are outside of the 2nd shell.", + ) cut_inner = traits.Int( - argstr='--cut-inner %d', - desc="Remove triangles of 1st that are inside of the 2nd shell. Dilate 2nd by N; Fill holes and keep only 1st afterwards." + argstr="--cut-inner %d", + desc="Remove triangles of 1st that are inside of the 2nd shell. Dilate 2nd by N; Fill holes and keep only 1st afterwards.", ) decouple_inin = traits.Int( - argstr='--decouple-inin %d', + argstr="--decouple-inin %d", desc="Treat 1st file as inner, 2nd file as outer component." - "Resolve overlaps by moving inners triangles inwards. Constrain the min distance between the components > d." + "Resolve overlaps by moving inners triangles inwards. Constrain the min distance between the components > d.", ) decouple_outin = traits.Int( - argstr='--decouple-outin %d', + argstr="--decouple-outin %d", desc="Treat 1st file as outer, 2nd file as inner component." - "Resolve overlaps by moving outers triangles inwards. Constrain the min distance between the components > d." + "Resolve overlaps by moving outers triangles inwards. Constrain the min distance between the components > d.", ) decouple_outout = traits.Int( - argstr='--decouple-outout %d', + argstr="--decouple-outout %d", desc="Treat 1st file as outer, 2nd file as inner component." - "Resolve overlaps by moving outers triangles outwards. Constrain the min distance between the components > d." + "Resolve overlaps by moving outers triangles outwards. Constrain the min distance between the components > d.", ) finetuning_inwards = traits.Bool( - argstr='--fineTuneIn ', - requires=['finetuning_distance', 'finetuning_substeps'], + argstr="--fineTuneIn ", + requires=["finetuning_distance", "finetuning_substeps"], position=-3, - desc="Used to fine-tune the minimal distance between surfaces." + desc="Used to fine-tune the minimal distance between surfaces.", ) finetuning_outwards = traits.Bool( - argstr='--fineTuneOut ', - requires=['finetuning_distance', 'finetuning_substeps'], + argstr="--fineTuneOut ", + requires=["finetuning_distance", "finetuning_substeps"], position=-3, - xor=['finetuning_inwards'], - desc='Similar to finetuning_inwards, but ensures minimal distance in the other direction' + xor=["finetuning_inwards"], + desc="Similar to finetuning_inwards, but ensures minimal distance in the other direction", ) finetuning_distance = traits.Float( - argstr='%f', - requires=['finetuning_substeps'], + argstr="%f", + requires=["finetuning_substeps"], position=-2, desc="Used to fine-tune the minimal distance between surfaces." - "A minimal distance d is ensured, and reached in n substeps. When using the surfaces for subsequent volume meshing by gmsh, this step prevent too flat tetrahedra2)" + "A minimal distance d is ensured, and reached in n substeps. When using the surfaces for subsequent volume meshing by gmsh, this step prevent too flat tetrahedra2)", ) finetuning_substeps = traits.Int( - argstr='%d', - requires=['finetuning_distance'], + argstr="%d", + requires=["finetuning_distance"], position=-1, desc="Used to fine-tune the minimal distance between surfaces." - "A minimal distance d is ensured, and reached in n substeps. When using the surfaces for subsequent volume meshing by gmsh, this step prevent too flat tetrahedra2)" + "A minimal distance d is ensured, and reached in n substeps. When using the surfaces for subsequent volume meshing by gmsh, this step prevent too flat tetrahedra2)", ) dilation = traits.Int( - argstr='--dilate %d', - desc="Dilate the surface by d. d < 0 means shrinking.") + argstr="--dilate %d", desc="Dilate the surface by d. d < 0 means shrinking." + ) set_intersections_to_one = traits.Bool( - argstr='--intersect', + argstr="--intersect", desc="If the mesh contains intersections, return value = 1." - "If saved in gmsh format, intersections will be highlighted.") + "If saved in gmsh format, intersections will be highlighted.", + ) in_file1 = File(exists=True, argstr="%s", position=1, mandatory=True) in_file2 = File(exists=True, argstr="%s", position=2) output_type = traits.Enum( - 'off', ['stl', 'msh', 'wrl', 'vrml', 'fs', 'off'], + "off", + ["stl", "msh", "wrl", "vrml", "fs", "off"], usedefault=True, - desc='The output type to save the file as.') + desc="The output type to save the file as.", + ) out_filename = File( - genfile=True, - argstr="-o %s", - desc='The output filename for the fixed mesh file') + genfile=True, argstr="-o %s", desc="The output filename for the fixed mesh file" + ) class MeshFixOutputSpec(TraitedSpec): - mesh_file = File(exists=True, desc='The output mesh file') + mesh_file = File(exists=True, desc="The output mesh file") class MeshFix(CommandLine): @@ -177,7 +195,8 @@ class MeshFix(CommandLine): >>> fix.cmdline 'meshfix lh-pial.stl rh-pial.stl -o lh-pial_fixed.off' """ - _cmd = 'meshfix' + + _cmd = "meshfix" input_spec = MeshFixInputSpec output_spec = MeshFixOutputSpec @@ -185,33 +204,32 @@ def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_filename): path, name, ext = split_filename(self.inputs.out_filename) - ext = ext.replace('.', '') - out_types = ['stl', 'msh', 'wrl', 'vrml', 'fs', 'off'] + ext = ext.replace(".", "") + out_types = ["stl", "msh", "wrl", "vrml", "fs", "off"] # Make sure that the output filename uses one of the possible file types if any(ext == out_type.lower() for out_type in out_types): - outputs['mesh_file'] = op.abspath(self.inputs.out_filename) + outputs["mesh_file"] = op.abspath(self.inputs.out_filename) else: - outputs['mesh_file'] = op.abspath( - name + '.' + self.inputs.output_type) + outputs["mesh_file"] = op.abspath(name + "." + self.inputs.output_type) else: - outputs['mesh_file'] = op.abspath(self._gen_outfilename()) + outputs["mesh_file"] = op.abspath(self._gen_outfilename()) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file1) - if self.inputs.save_as_freesurfer_mesh or self.inputs.output_type == 'fs': - self.inputs.output_type = 'fs' + if self.inputs.save_as_freesurfer_mesh or self.inputs.output_type == "fs": + self.inputs.output_type = "fs" self.inputs.save_as_freesurfer_mesh = True - if self.inputs.save_as_stl or self.inputs.output_type == 'stl': - self.inputs.output_type = 'stl' + if self.inputs.save_as_stl or self.inputs.output_type == "stl": + self.inputs.output_type = "stl" self.inputs.save_as_stl = True - if self.inputs.save_as_vrml or self.inputs.output_type == 'vrml': - self.inputs.output_type = 'vrml' + if self.inputs.save_as_vrml or self.inputs.output_type == "vrml": + self.inputs.output_type = "vrml" self.inputs.save_as_vrml = True - return name + '_fixed.' + self.inputs.output_type + return name + "_fixed." + self.inputs.output_type diff --git a/nipype/interfaces/minc/__init__.py b/nipype/interfaces/minc/__init__.py index 1ebea58b64..c593ea998b 100644 --- a/nipype/interfaces/minc/__init__.py +++ b/nipype/interfaces/minc/__init__.py @@ -9,7 +9,7 @@ http://carlo-hamalainen.net """ -from .base import (Info) +from .base import Info from .minc import ( Average, diff --git a/nipype/interfaces/minc/base.py b/nipype/interfaces/minc/base.py index 9436c2c3d1..5aca3e434e 100644 --- a/nipype/interfaces/minc/base.py +++ b/nipype/interfaces/minc/base.py @@ -14,7 +14,7 @@ from ..base import CommandLine -warnings.filterwarnings('always', category=UserWarning) +warnings.filterwarnings("always", category=UserWarning) def check_minc(): @@ -52,47 +52,46 @@ def version(): """ try: clout = CommandLine( - command='mincinfo', - args='-version', - terminal_output='allatonce').run() + command="mincinfo", args="-version", terminal_output="allatonce" + ).run() except IOError: return None out = clout.runtime.stdout def read_program_version(s): - if 'program' in s: - return s.split(':')[1].strip() + if "program" in s: + return s.split(":")[1].strip() return None def read_libminc_version(s): - if 'libminc' in s: - return s.split(':')[1].strip() + if "libminc" in s: + return s.split(":")[1].strip() return None def read_netcdf_version(s): - if 'netcdf' in s: - return ' '.join(s.split(':')[1:]).strip() + if "netcdf" in s: + return " ".join(s.split(":")[1:]).strip() return None def read_hdf5_version(s): - if 'HDF5' in s: - return s.split(':')[1].strip() + if "HDF5" in s: + return s.split(":")[1].strip() return None versions = { - 'minc': None, - 'libminc': None, - 'netcdf': None, - 'hdf5': None, + "minc": None, + "libminc": None, + "netcdf": None, + "hdf5": None, } - for l in out.split('\n'): + for l in out.split("\n"): for (name, f) in [ - ('minc', read_program_version), - ('libminc', read_libminc_version), - ('netcdf', read_netcdf_version), - ('hdf5', read_hdf5_version), + ("minc", read_program_version), + ("libminc", read_libminc_version), + ("netcdf", read_netcdf_version), + ("hdf5", read_hdf5_version), ]: if f(l) is not None: versions[name] = f(l) @@ -126,11 +125,13 @@ def aggregate_filename(files, new_suffix): path = os.getcwd() - if common_prefix == '': + if common_prefix == "": return os.path.abspath( os.path.join( - path, - os.path.splitext(files[0])[0] + '_' + new_suffix + '.mnc')) + path, os.path.splitext(files[0])[0] + "_" + new_suffix + ".mnc" + ) + ) else: return os.path.abspath( - os.path.join(path, common_prefix + '_' + new_suffix + '.mnc')) + os.path.join(path, common_prefix + "_" + new_suffix + ".mnc") + ) diff --git a/nipype/interfaces/minc/minc.py b/nipype/interfaces/minc/minc.py index 791c7bbc64..b4dfa1dac8 100644 --- a/nipype/interfaces/minc/minc.py +++ b/nipype/interfaces/minc/minc.py @@ -14,203 +14,233 @@ import re import warnings -from ..base import (TraitedSpec, CommandLineInputSpec, CommandLine, - StdOutCommandLineInputSpec, StdOutCommandLine, File, - Directory, InputMultiPath, OutputMultiPath, traits, - isdefined) +from ..base import ( + TraitedSpec, + CommandLineInputSpec, + CommandLine, + StdOutCommandLineInputSpec, + StdOutCommandLine, + File, + Directory, + InputMultiPath, + OutputMultiPath, + traits, + isdefined, +) from .base import aggregate_filename -warnings.filterwarnings('always', category=UserWarning) +warnings.filterwarnings("always", category=UserWarning) class ExtractInputSpec(StdOutCommandLineInputSpec): input_file = File( - desc='input file', - exists=True, - mandatory=True, - argstr='%s', - position=-2, + desc="input file", exists=True, mandatory=True, argstr="%s", position=-2, ) output_file = File( - desc='output file', + desc="output file", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s.raw', - keep_extension=False) + name_template="%s.raw", + keep_extension=False, + ) _xor_write = ( - 'write_ascii', - 'write_ascii', - 'write_byte', - 'write_short', - 'write_int', - 'write_long', - 'write_float', - 'write_double', - 'write_signed', - 'write_unsigned', + "write_ascii", + "write_ascii", + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + "write_signed", + "write_unsigned", ) write_ascii = traits.Bool( - desc='Write out data as ascii strings (default).', - argstr='-ascii', - xor=_xor_write) + desc="Write out data as ascii strings (default).", + argstr="-ascii", + xor=_xor_write, + ) write_byte = traits.Bool( - desc='Write out data as bytes.', argstr='-byte', xor=_xor_write) + desc="Write out data as bytes.", argstr="-byte", xor=_xor_write + ) write_short = traits.Bool( - desc='Write out data as short integers.', - argstr='-short', - xor=_xor_write) + desc="Write out data as short integers.", argstr="-short", xor=_xor_write + ) write_int = traits.Bool( - desc='Write out data as 32-bit integers.', - argstr='-int', - xor=_xor_write) + desc="Write out data as 32-bit integers.", argstr="-int", xor=_xor_write + ) write_long = traits.Bool( - desc='Superseded by write_int.', argstr='-long', xor=_xor_write) + desc="Superseded by write_int.", argstr="-long", xor=_xor_write + ) write_float = traits.Bool( - desc='Write out data as single precision floating-point values.', - argstr='-float', - xor=_xor_write) + desc="Write out data as single precision floating-point values.", + argstr="-float", + xor=_xor_write, + ) write_double = traits.Bool( - desc='Write out data as double precision floating-point values.', - argstr='-double', - xor=_xor_write) + desc="Write out data as double precision floating-point values.", + argstr="-double", + xor=_xor_write, + ) - _xor_signed = ('write_signed', 'write_unsigned') + _xor_signed = ("write_signed", "write_unsigned") write_signed = traits.Bool( - desc='Write out signed data.', argstr='-signed', xor=_xor_signed) + desc="Write out signed data.", argstr="-signed", xor=_xor_signed + ) write_unsigned = traits.Bool( - desc='Write out unsigned data.', argstr='-unsigned', xor=_xor_signed) + desc="Write out unsigned data.", argstr="-unsigned", xor=_xor_signed + ) write_range = traits.Tuple( traits.Float, traits.Float, - argstr='-range %s %s', - desc= - 'Specify the range of output values\nDefault value: 1.79769e+308 1.79769e+308.', + argstr="-range %s %s", + desc="Specify the range of output values\nDefault value: 1.79769e+308 1.79769e+308.", ) _xor_normalize = ( - 'normalize', - 'nonormalize', + "normalize", + "nonormalize", ) normalize = traits.Bool( - desc='Normalize integer pixel values to file max and min.', - argstr='-normalize', - xor=_xor_normalize) + desc="Normalize integer pixel values to file max and min.", + argstr="-normalize", + xor=_xor_normalize, + ) nonormalize = traits.Bool( - desc='Turn off pixel normalization.', - argstr='-nonormalize', - xor=_xor_normalize) + desc="Turn off pixel normalization.", argstr="-nonormalize", xor=_xor_normalize + ) image_range = traits.Tuple( traits.Float, traits.Float, - desc='Specify the range of real image values for normalization.', - argstr='-image_range %s %s') + desc="Specify the range of real image values for normalization.", + argstr="-image_range %s %s", + ) image_minimum = traits.Float( - desc=('Specify the minimum real image value for normalization.' - 'Default value: 1.79769e+308.'), - argstr='-image_minimum %s') + desc=( + "Specify the minimum real image value for normalization." + "Default value: 1.79769e+308." + ), + argstr="-image_minimum %s", + ) image_maximum = traits.Float( - desc=('Specify the maximum real image value for normalization.' - 'Default value: 1.79769e+308.'), - argstr='-image_maximum %s') + desc=( + "Specify the maximum real image value for normalization." + "Default value: 1.79769e+308." + ), + argstr="-image_maximum %s", + ) start = InputMultiPath( traits.Int, - desc='Specifies corner of hyperslab (C conventions for indices).', - sep=',', - argstr='-start %s', + desc="Specifies corner of hyperslab (C conventions for indices).", + sep=",", + argstr="-start %s", ) count = InputMultiPath( traits.Int, - desc='Specifies edge lengths of hyperslab to read.', - sep=',', - argstr='-count %s', + desc="Specifies edge lengths of hyperslab to read.", + sep=",", + argstr="-count %s", ) # FIXME Can we make sure that len(start) == len(count)? - _xor_flip = ('flip_positive_direction', 'flip_negative_direction', - 'flip_any_direction') + _xor_flip = ( + "flip_positive_direction", + "flip_negative_direction", + "flip_any_direction", + ) flip_positive_direction = traits.Bool( - desc='Flip images to always have positive direction.', - argstr='-positive_direction', - xor=_xor_flip) + desc="Flip images to always have positive direction.", + argstr="-positive_direction", + xor=_xor_flip, + ) flip_negative_direction = traits.Bool( - desc='Flip images to always have negative direction.', - argstr='-negative_direction', - xor=_xor_flip) + desc="Flip images to always have negative direction.", + argstr="-negative_direction", + xor=_xor_flip, + ) flip_any_direction = traits.Bool( - desc='Do not flip images (Default).', - argstr='-any_direction', - xor=_xor_flip) + desc="Do not flip images (Default).", argstr="-any_direction", xor=_xor_flip + ) - _xor_x_flip = ('flip_x_positive', 'flip_x_negative', 'flip_x_any') + _xor_x_flip = ("flip_x_positive", "flip_x_negative", "flip_x_any") flip_x_positive = traits.Bool( - desc='Flip images to give positive xspace:step value (left-to-right).', - argstr='+xdirection', - xor=_xor_x_flip) + desc="Flip images to give positive xspace:step value (left-to-right).", + argstr="+xdirection", + xor=_xor_x_flip, + ) flip_x_negative = traits.Bool( - desc='Flip images to give negative xspace:step value (right-to-left).', - argstr='-xdirection', - xor=_xor_x_flip) + desc="Flip images to give negative xspace:step value (right-to-left).", + argstr="-xdirection", + xor=_xor_x_flip, + ) flip_x_any = traits.Bool( - desc='Don\'t flip images along x-axis (default).', - argstr='-xanydirection', - xor=_xor_x_flip) + desc="Don't flip images along x-axis (default).", + argstr="-xanydirection", + xor=_xor_x_flip, + ) - _xor_y_flip = ('flip_y_positive', 'flip_y_negative', 'flip_y_any') + _xor_y_flip = ("flip_y_positive", "flip_y_negative", "flip_y_any") flip_y_positive = traits.Bool( - desc='Flip images to give positive yspace:step value (post-to-ant).', - argstr='+ydirection', - xor=_xor_y_flip) + desc="Flip images to give positive yspace:step value (post-to-ant).", + argstr="+ydirection", + xor=_xor_y_flip, + ) flip_y_negative = traits.Bool( - desc='Flip images to give negative yspace:step value (ant-to-post).', - argstr='-ydirection', - xor=_xor_y_flip) + desc="Flip images to give negative yspace:step value (ant-to-post).", + argstr="-ydirection", + xor=_xor_y_flip, + ) flip_y_any = traits.Bool( - desc='Don\'t flip images along y-axis (default).', - argstr='-yanydirection', - xor=_xor_y_flip) + desc="Don't flip images along y-axis (default).", + argstr="-yanydirection", + xor=_xor_y_flip, + ) - _xor_z_flip = ('flip_z_positive', 'flip_z_negative', 'flip_z_any') + _xor_z_flip = ("flip_z_positive", "flip_z_negative", "flip_z_any") flip_z_positive = traits.Bool( - desc='Flip images to give positive zspace:step value (inf-to-sup).', - argstr='+zdirection', - xor=_xor_z_flip) + desc="Flip images to give positive zspace:step value (inf-to-sup).", + argstr="+zdirection", + xor=_xor_z_flip, + ) flip_z_negative = traits.Bool( - desc='Flip images to give negative zspace:step value (sup-to-inf).', - argstr='-zdirection', - xor=_xor_z_flip) + desc="Flip images to give negative zspace:step value (sup-to-inf).", + argstr="-zdirection", + xor=_xor_z_flip, + ) flip_z_any = traits.Bool( - desc='Don\'t flip images along z-axis (default).', - argstr='-zanydirection', - xor=_xor_z_flip) + desc="Don't flip images along z-axis (default).", + argstr="-zanydirection", + xor=_xor_z_flip, + ) class ExtractOutputSpec(TraitedSpec): - output_file = File(desc='output file in raw/text format', exists=True) + output_file = File(desc="output file in raw/text format", exists=True) class Extract(StdOutCommandLine): @@ -231,89 +261,98 @@ class Extract(StdOutCommandLine): input_spec = ExtractInputSpec output_spec = ExtractOutputSpec - _cmd = 'mincextract' + _cmd = "mincextract" class ToRawInputSpec(StdOutCommandLineInputSpec): input_file = File( - desc='input file', - exists=True, - mandatory=True, - argstr='%s', - position=-2, + desc="input file", exists=True, mandatory=True, argstr="%s", position=-2, ) output_file = File( - desc='output file', + desc="output file", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s.raw', - keep_extension=False) + name_template="%s.raw", + keep_extension=False, + ) - _xor_write = ('write_byte', 'write_short', 'write_int', 'write_long', - 'write_float', 'write_double') + _xor_write = ( + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + ) write_byte = traits.Bool( - desc='Write out data as bytes.', argstr='-byte', xor=_xor_write) + desc="Write out data as bytes.", argstr="-byte", xor=_xor_write + ) write_short = traits.Bool( - desc='Write out data as short integers.', - argstr='-short', - xor=_xor_write) + desc="Write out data as short integers.", argstr="-short", xor=_xor_write + ) write_int = traits.Bool( - desc='Write out data as 32-bit integers.', - argstr='-int', - xor=_xor_write) + desc="Write out data as 32-bit integers.", argstr="-int", xor=_xor_write + ) write_long = traits.Bool( - desc='Superseded by write_int.', argstr='-long', xor=_xor_write) + desc="Superseded by write_int.", argstr="-long", xor=_xor_write + ) write_float = traits.Bool( - desc='Write out data as single precision floating-point values.', - argstr='-float', - xor=_xor_write) + desc="Write out data as single precision floating-point values.", + argstr="-float", + xor=_xor_write, + ) write_double = traits.Bool( - desc='Write out data as double precision floating-point values.', - argstr='-double', - xor=_xor_write) + desc="Write out data as double precision floating-point values.", + argstr="-double", + xor=_xor_write, + ) - _xor_signed = ('write_signed', 'write_unsigned') + _xor_signed = ("write_signed", "write_unsigned") write_signed = traits.Bool( - desc='Write out signed data.', argstr='-signed', xor=_xor_signed) + desc="Write out signed data.", argstr="-signed", xor=_xor_signed + ) write_unsigned = traits.Bool( - desc='Write out unsigned data.', argstr='-unsigned', xor=_xor_signed) + desc="Write out unsigned data.", argstr="-unsigned", xor=_xor_signed + ) write_range = traits.Tuple( traits.Float, traits.Float, - argstr='-range %s %s', - desc=('Specify the range of output values.' - 'Default value: 1.79769e+308 1.79769e+308.'), + argstr="-range %s %s", + desc=( + "Specify the range of output values." + "Default value: 1.79769e+308 1.79769e+308." + ), ) _xor_normalize = ( - 'normalize', - 'nonormalize', + "normalize", + "nonormalize", ) normalize = traits.Bool( - desc='Normalize integer pixel values to file max and min.', - argstr='-normalize', - xor=_xor_normalize) + desc="Normalize integer pixel values to file max and min.", + argstr="-normalize", + xor=_xor_normalize, + ) nonormalize = traits.Bool( - desc='Turn off pixel normalization.', - argstr='-nonormalize', - xor=_xor_normalize) + desc="Turn off pixel normalization.", argstr="-nonormalize", xor=_xor_normalize + ) class ToRawOutputSpec(TraitedSpec): - output_file = File(desc='output file in raw format', exists=True) + output_file = File(desc="output file in raw format", exists=True) class ToRaw(StdOutCommandLine): @@ -335,40 +374,42 @@ class ToRaw(StdOutCommandLine): input_spec = ToRawInputSpec output_spec = ToRawOutputSpec - _cmd = 'minctoraw' + _cmd = "minctoraw" class ConvertInputSpec(CommandLineInputSpec): input_file = File( - desc='input file for converting', + desc="input file for converting", exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_convert_output.mnc') + name_template="%s_convert_output.mnc", + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) - two = traits.Bool(desc='Create a MINC 2 output file.', argstr='-2') + default_value=True, + ) + two = traits.Bool(desc="Create a MINC 2 output file.", argstr="-2") template = traits.Bool( - desc= - ('Create a template file. The dimensions, variables, and' - 'attributes of the input file are preserved but all data it set to zero.' - ), - argstr='-template', + desc=( + "Create a template file. The dimensions, variables, and" + "attributes of the input file are preserved but all data it set to zero." + ), + argstr="-template", ) compression = traits.Enum( @@ -382,20 +423,19 @@ class ConvertInputSpec(CommandLineInputSpec): 7, 8, 9, - argstr='-compress %s', - desc='Set the compression level, from 0 (disabled) to 9 (maximum).', + argstr="-compress %s", + desc="Set the compression level, from 0 (disabled) to 9 (maximum).", ) chunk = traits.Range( low=0, - desc= - 'Set the target block size for chunking (0 default, >1 block size).', - argstr='-chunk %d', + desc="Set the target block size for chunking (0 default, >1 block size).", + argstr="-chunk %d", ) class ConvertOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Convert(CommandLine): @@ -412,42 +452,43 @@ class Convert(CommandLine): input_spec = ConvertInputSpec output_spec = ConvertOutputSpec - _cmd = 'mincconvert' + _cmd = "mincconvert" class CopyInputSpec(CommandLineInputSpec): input_file = File( - desc='input file to copy', + desc="input file to copy", exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_copy.mnc') + name_template="%s_copy.mnc", + ) - _xor_pixel = ('pixel_values', 'real_values') + _xor_pixel = ("pixel_values", "real_values") pixel_values = traits.Bool( - desc='Copy pixel values as is.', - argstr='-pixel_values', - xor=_xor_pixel) + desc="Copy pixel values as is.", argstr="-pixel_values", xor=_xor_pixel + ) real_values = traits.Bool( - desc='Copy real pixel intensities (default).', - argstr='-real_values', - xor=_xor_pixel) + desc="Copy real pixel intensities (default).", + argstr="-real_values", + xor=_xor_pixel, + ) class CopyOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Copy(CommandLine): @@ -464,72 +505,75 @@ class Copy(CommandLine): input_spec = CopyInputSpec output_spec = CopyOutputSpec - _cmd = 'minccopy' + _cmd = "minccopy" class ToEcatInputSpec(CommandLineInputSpec): input_file = File( - desc='input file to convert', + desc="input file to convert", exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_to_ecat.v', - keep_extension=False) + name_template="%s_to_ecat.v", + keep_extension=False, + ) ignore_patient_variable = traits.Bool( - desc='Ignore informations from the minc patient variable.', - argstr='-ignore_patient_variable', + desc="Ignore informations from the minc patient variable.", + argstr="-ignore_patient_variable", ) ignore_study_variable = traits.Bool( - desc='Ignore informations from the minc study variable.', - argstr='-ignore_study_variable', + desc="Ignore informations from the minc study variable.", + argstr="-ignore_study_variable", ) ignore_acquisition_variable = traits.Bool( - desc='Ignore informations from the minc acquisition variable.', - argstr='-ignore_acquisition_variable', + desc="Ignore informations from the minc acquisition variable.", + argstr="-ignore_acquisition_variable", ) ignore_ecat_acquisition_variable = traits.Bool( - desc='Ignore informations from the minc ecat_acquisition variable.', - argstr='-ignore_ecat_acquisition_variable', + desc="Ignore informations from the minc ecat_acquisition variable.", + argstr="-ignore_ecat_acquisition_variable", ) ignore_ecat_main = traits.Bool( - desc='Ignore informations from the minc ecat-main variable.', - argstr='-ignore_ecat_main', + desc="Ignore informations from the minc ecat-main variable.", + argstr="-ignore_ecat_main", ) ignore_ecat_subheader_variable = traits.Bool( - desc='Ignore informations from the minc ecat-subhdr variable.', - argstr='-ignore_ecat_subheader_variable', + desc="Ignore informations from the minc ecat-subhdr variable.", + argstr="-ignore_ecat_subheader_variable", ) no_decay_corr_fctr = traits.Bool( - desc='Do not compute the decay correction factors', - argstr='-no_decay_corr_fctr', + desc="Do not compute the decay correction factors", + argstr="-no_decay_corr_fctr", ) voxels_as_integers = traits.Bool( - desc=('Voxel values are treated as integers, scale and' - 'calibration factors are set to unity'), - argstr='-label', + desc=( + "Voxel values are treated as integers, scale and" + "calibration factors are set to unity" + ), + argstr="-label", ) class ToEcatOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class ToEcat(CommandLine): @@ -552,85 +596,84 @@ class ToEcat(CommandLine): input_spec = ToEcatInputSpec output_spec = ToEcatOutputSpec - _cmd = 'minctoecat' + _cmd = "minctoecat" class DumpInputSpec(StdOutCommandLineInputSpec): input_file = File( - desc='input file', - exists=True, - mandatory=True, - argstr='%s', - position=-2, + desc="input file", exists=True, mandatory=True, argstr="%s", position=-2, ) output_file = File( - desc='output file', + desc="output file", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_dump.txt', - keep_extension=False) + name_template="%s_dump.txt", + keep_extension=False, + ) _xor_coords_or_header = ( - 'coordinate_data', - 'header_data', + "coordinate_data", + "header_data", ) coordinate_data = traits.Bool( - desc='Coordinate variable data and header information.', - argstr='-c', - xor=_xor_coords_or_header) + desc="Coordinate variable data and header information.", + argstr="-c", + xor=_xor_coords_or_header, + ) header_data = traits.Bool( - desc='Header information only, no data.', - argstr='-h', - xor=_xor_coords_or_header) + desc="Header information only, no data.", argstr="-h", xor=_xor_coords_or_header + ) _xor_annotations = ( - 'annotations_brief', - 'annotations_full', + "annotations_brief", + "annotations_full", ) annotations_brief = traits.Enum( - 'c', - 'f', - argstr='-b %s', - desc='Brief annotations for C or Fortran indices in data.', - xor=_xor_annotations) + "c", + "f", + argstr="-b %s", + desc="Brief annotations for C or Fortran indices in data.", + xor=_xor_annotations, + ) annotations_full = traits.Enum( - 'c', - 'f', - argstr='-f %s', - desc='Full annotations for C or Fortran indices in data.', - xor=_xor_annotations) + "c", + "f", + argstr="-f %s", + desc="Full annotations for C or Fortran indices in data.", + xor=_xor_annotations, + ) variables = InputMultiPath( traits.Str, - desc='Output data for specified variables only.', - sep=',', - argstr='-v %s') + desc="Output data for specified variables only.", + sep=",", + argstr="-v %s", + ) line_length = traits.Range( - low=0, - desc='Line length maximum in data section (default 80).', - argstr='-l %d') + low=0, desc="Line length maximum in data section (default 80).", argstr="-l %d" + ) netcdf_name = traits.Str( - desc='Name for netCDF (default derived from file name).', - argstr='-n %s') + desc="Name for netCDF (default derived from file name).", argstr="-n %s" + ) precision = traits.Either( traits.Int(), traits.Tuple(traits.Int, traits.Int), - desc='Display floating-point values with less precision', - argstr='%s', + desc="Display floating-point values with less precision", + argstr="%s", ) # See _format_arg in Dump for actual formatting. class DumpOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Dump(StdOutCommandLine): @@ -652,208 +695,227 @@ class Dump(StdOutCommandLine): input_spec = DumpInputSpec output_spec = DumpOutputSpec - _cmd = 'mincdump' + _cmd = "mincdump" def _format_arg(self, name, spec, value): - if name == 'precision': + if name == "precision": if isinstance(value, int): - return '-p %d' % value - elif isinstance(value, tuple) and isinstance( - value[0], int) and isinstance(value[1], int): - return '-p %d,%d' % ( - value[0], - value[1], - ) + return "-p %d" % value + elif ( + isinstance(value, tuple) + and isinstance(value[0], int) + and isinstance(value[1], int) + ): + return "-p %d,%d" % (value[0], value[1],) else: - raise ValueError('Invalid precision argument: ' + str(value)) + raise ValueError("Invalid precision argument: " + str(value)) return super(Dump, self)._format_arg(name, spec, value) class AverageInputSpec(CommandLineInputSpec): - _xor_input_files = ('input_files', 'filelist') + _xor_input_files = ("input_files", "filelist") input_files = InputMultiPath( File(exists=True), - desc='input file(s)', + desc="input file(s)", mandatory=True, - sep=' ', - argstr='%s', + sep=" ", + argstr="%s", position=-2, - xor=_xor_input_files) + xor=_xor_input_files, + ) filelist = File( - desc='Specify the name of a file containing input file names.', - argstr='-filelist %s', + desc="Specify the name of a file containing input file names.", + argstr="-filelist %s", exists=True, mandatory=True, - xor=_xor_input_files) + xor=_xor_input_files, + ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_files'], + name_source=["input_files"], hash_files=False, - name_template='%s_averaged.mnc') + name_template="%s_averaged.mnc", + ) - two = traits.Bool(desc='Create a MINC 2 output file.', argstr='-2') + two = traits.Bool(desc="Create a MINC 2 output file.", argstr="-2") clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) _xor_verbose = ( - 'verbose', - 'quiet', + "verbose", + "quiet", ) verbose = traits.Bool( - desc='Print out log messages (default).', - argstr='-verbose', - xor=_xor_verbose) + desc="Print out log messages (default).", argstr="-verbose", xor=_xor_verbose + ) quiet = traits.Bool( - desc='Do not print out log messages.', - argstr='-quiet', - xor=_xor_verbose) + desc="Do not print out log messages.", argstr="-quiet", xor=_xor_verbose + ) - debug = traits.Bool(desc='Print out debugging messages.', argstr='-debug') + debug = traits.Bool(desc="Print out debugging messages.", argstr="-debug") _xor_check_dimensions = ( - 'check_dimensions', - 'no_check_dimensions', + "check_dimensions", + "no_check_dimensions", ) check_dimensions = traits.Bool( - desc='Check that dimension info matches across files (default).', - argstr='-check_dimensions', - xor=_xor_check_dimensions) + desc="Check that dimension info matches across files (default).", + argstr="-check_dimensions", + xor=_xor_check_dimensions, + ) no_check_dimensions = traits.Bool( - desc='Do not check dimension info.', - argstr='-nocheck_dimensions', - xor=_xor_check_dimensions) + desc="Do not check dimension info.", + argstr="-nocheck_dimensions", + xor=_xor_check_dimensions, + ) _xor_format = ( - 'format_filetype', - 'format_byte', - 'format_short', - 'format_int', - 'format_long', - 'format_float', - 'format_double', - 'format_signed', - 'format_unsigned', + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", ) format_filetype = traits.Bool( - desc='Use data type of first file (default).', - argstr='-filetype', - xor=_xor_format) + desc="Use data type of first file (default).", + argstr="-filetype", + xor=_xor_format, + ) format_byte = traits.Bool( - desc='Write out byte data.', argstr='-byte', xor=_xor_format) + desc="Write out byte data.", argstr="-byte", xor=_xor_format + ) format_short = traits.Bool( - desc='Write out short integer data.', argstr='-short', xor=_xor_format) + desc="Write out short integer data.", argstr="-short", xor=_xor_format + ) format_int = traits.Bool( - desc='Write out 32-bit integer data.', argstr='-int', xor=_xor_format) + desc="Write out 32-bit integer data.", argstr="-int", xor=_xor_format + ) format_long = traits.Bool( - desc='Superseded by -int.', argstr='-long', xor=_xor_format) + desc="Superseded by -int.", argstr="-long", xor=_xor_format + ) format_float = traits.Bool( - desc='Write out single-precision floating-point data.', - argstr='-float', - xor=_xor_format) + desc="Write out single-precision floating-point data.", + argstr="-float", + xor=_xor_format, + ) format_double = traits.Bool( - desc='Write out double-precision floating-point data.', - argstr='-double', - xor=_xor_format) + desc="Write out double-precision floating-point data.", + argstr="-double", + xor=_xor_format, + ) format_signed = traits.Bool( - desc='Write signed integer data.', argstr='-signed', xor=_xor_format) + desc="Write signed integer data.", argstr="-signed", xor=_xor_format + ) format_unsigned = traits.Bool( - desc='Write unsigned integer data (default).', - argstr='-unsigned', - xor=_xor_format) + desc="Write unsigned integer data (default).", + argstr="-unsigned", + xor=_xor_format, + ) max_buffer_size_in_kb = traits.Range( low=0, - desc='Specify the maximum size of the internal buffers (in kbytes).', + desc="Specify the maximum size of the internal buffers (in kbytes).", value=4096, usedefault=True, - argstr='-max_buffer_size_in_kb %d', + argstr="-max_buffer_size_in_kb %d", ) _xor_normalize = ( - 'normalize', - 'nonormalize', + "normalize", + "nonormalize", ) normalize = traits.Bool( - desc='Normalize data sets for mean intensity.', - argstr='-normalize', - xor=_xor_normalize) + desc="Normalize data sets for mean intensity.", + argstr="-normalize", + xor=_xor_normalize, + ) nonormalize = traits.Bool( - desc='Do not normalize data sets (default).', - argstr='-nonormalize', - xor=_xor_normalize) + desc="Do not normalize data sets (default).", + argstr="-nonormalize", + xor=_xor_normalize, + ) voxel_range = traits.Tuple( traits.Int, traits.Int, - argstr='-range %d %d', - desc='Valid range for output data.') + argstr="-range %d %d", + desc="Valid range for output data.", + ) - sdfile = File( - desc='Specify an output sd file (default=none).', argstr='-sdfile %s') + sdfile = File(desc="Specify an output sd file (default=none).", argstr="-sdfile %s") - _xor_copy_header = ('copy_header', 'no_copy_header') + _xor_copy_header = ("copy_header", "no_copy_header") copy_header = traits.Bool( - desc= - 'Copy all of the header from the first file (default for one file).', - argstr='-copy_header', - xor=_xor_copy_header) + desc="Copy all of the header from the first file (default for one file).", + argstr="-copy_header", + xor=_xor_copy_header, + ) no_copy_header = traits.Bool( - desc= - 'Do not copy all of the header from the first file (default for many files)).', - argstr='-nocopy_header', - xor=_xor_copy_header) + desc="Do not copy all of the header from the first file (default for many files)).", + argstr="-nocopy_header", + xor=_xor_copy_header, + ) avgdim = traits.Str( - desc='Specify a dimension along which we wish to average.', - argstr='-avgdim %s') + desc="Specify a dimension along which we wish to average.", argstr="-avgdim %s" + ) binarize = traits.Bool( - desc='Binarize the volume by looking for values in a given range.', - argstr='-binarize') + desc="Binarize the volume by looking for values in a given range.", + argstr="-binarize", + ) binrange = traits.Tuple( traits.Float, traits.Float, - argstr='-binrange %s %s', - desc= - 'Specify a range for binarization. Default value: 1.79769e+308 -1.79769e+308.' + argstr="-binrange %s %s", + desc="Specify a range for binarization. Default value: 1.79769e+308 -1.79769e+308.", ) binvalue = traits.Float( - desc=('Specify a target value (+/- 0.5) for' - 'binarization. Default value: -1.79769e+308'), - argstr='-binvalue %s') + desc=( + "Specify a target value (+/- 0.5) for" + "binarization. Default value: -1.79769e+308" + ), + argstr="-binvalue %s", + ) weights = InputMultiPath( traits.Str, desc='Specify weights for averaging (",,...").', - sep=',', - argstr='-weights %s', + sep=",", + argstr="-weights %s", ) width_weighted = traits.Bool( - desc='Weight by dimension widths when -avgdim is used.', - argstr='-width_weighted', - requires=('avgdim', )) + desc="Weight by dimension widths when -avgdim is used.", + argstr="-width_weighted", + requires=("avgdim",), + ) class AverageOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Average(CommandLine): @@ -873,43 +935,46 @@ class Average(CommandLine): input_spec = AverageInputSpec output_spec = AverageOutputSpec - _cmd = 'mincaverage' + _cmd = "mincaverage" class BlobInputSpec(CommandLineInputSpec): input_file = File( - desc='input file to blob', + desc="input file to blob", exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_blob.mnc') + name_template="%s_blob.mnc", + ) trace = traits.Bool( - desc='compute the trace (approximate growth and shrinkage) -- FAST', - argstr='-trace') + desc="compute the trace (approximate growth and shrinkage) -- FAST", + argstr="-trace", + ) determinant = traits.Bool( - desc='compute the determinant (exact growth and shrinkage) -- SLOW', - argstr='-determinant') + desc="compute the determinant (exact growth and shrinkage) -- SLOW", + argstr="-determinant", + ) translation = traits.Bool( - desc='compute translation (structure displacement)', - argstr='-translation') + desc="compute translation (structure displacement)", argstr="-translation" + ) magnitude = traits.Bool( - desc='compute the magnitude of the displacement vector', - argstr='-magnitude') + desc="compute the magnitude of the displacement vector", argstr="-magnitude" + ) class BlobOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Blob(CommandLine): @@ -927,175 +992,194 @@ class Blob(CommandLine): input_spec = BlobInputSpec output_spec = BlobOutputSpec - _cmd = 'mincblob' + _cmd = "mincblob" class CalcInputSpec(CommandLineInputSpec): - _xor_input_files = ('input_files', 'filelist') + _xor_input_files = ("input_files", "filelist") input_files = InputMultiPath( File(exists=True), - desc='input file(s) for calculation', + desc="input file(s) for calculation", mandatory=True, - sep=' ', - argstr='%s', + sep=" ", + argstr="%s", position=-2, ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_files'], + name_source=["input_files"], hash_files=False, - name_template='%s_calc.mnc') + name_template="%s_calc.mnc", + ) - two = traits.Bool(desc='Create a MINC 2 output file.', argstr='-2') + two = traits.Bool(desc="Create a MINC 2 output file.", argstr="-2") clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) _xor_verbose = ( - 'verbose', - 'quiet', + "verbose", + "quiet", ) verbose = traits.Bool( - desc='Print out log messages (default).', - argstr='-verbose', - xor=_xor_verbose) + desc="Print out log messages (default).", argstr="-verbose", xor=_xor_verbose + ) quiet = traits.Bool( - desc='Do not print out log messages.', - argstr='-quiet', - xor=_xor_verbose) + desc="Do not print out log messages.", argstr="-quiet", xor=_xor_verbose + ) - debug = traits.Bool(desc='Print out debugging messages.', argstr='-debug') + debug = traits.Bool(desc="Print out debugging messages.", argstr="-debug") filelist = File( - desc='Specify the name of a file containing input file names.', - argstr='-filelist %s', + desc="Specify the name of a file containing input file names.", + argstr="-filelist %s", mandatory=True, - xor=_xor_input_files) + xor=_xor_input_files, + ) - _xor_copy_header = ('copy_header', 'no_copy_header') + _xor_copy_header = ("copy_header", "no_copy_header") copy_header = traits.Bool( - desc='Copy all of the header from the first file.', - argstr='-copy_header', - xor=_xor_copy_header) + desc="Copy all of the header from the first file.", + argstr="-copy_header", + xor=_xor_copy_header, + ) no_copy_header = traits.Bool( - desc='Do not copy all of the header from the first file.', - argstr='-nocopy_header', - xor=_xor_copy_header) + desc="Do not copy all of the header from the first file.", + argstr="-nocopy_header", + xor=_xor_copy_header, + ) _xor_format = ( - 'format_filetype', - 'format_byte', - 'format_short', - 'format_int', - 'format_long', - 'format_float', - 'format_double', - 'format_signed', - 'format_unsigned', + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", ) format_filetype = traits.Bool( - desc='Use data type of first file (default).', - argstr='-filetype', - xor=_xor_format) + desc="Use data type of first file (default).", + argstr="-filetype", + xor=_xor_format, + ) format_byte = traits.Bool( - desc='Write out byte data.', argstr='-byte', xor=_xor_format) + desc="Write out byte data.", argstr="-byte", xor=_xor_format + ) format_short = traits.Bool( - desc='Write out short integer data.', argstr='-short', xor=_xor_format) + desc="Write out short integer data.", argstr="-short", xor=_xor_format + ) format_int = traits.Bool( - desc='Write out 32-bit integer data.', argstr='-int', xor=_xor_format) + desc="Write out 32-bit integer data.", argstr="-int", xor=_xor_format + ) format_long = traits.Bool( - desc='Superseded by -int.', argstr='-long', xor=_xor_format) + desc="Superseded by -int.", argstr="-long", xor=_xor_format + ) format_float = traits.Bool( - desc='Write out single-precision floating-point data.', - argstr='-float', - xor=_xor_format) + desc="Write out single-precision floating-point data.", + argstr="-float", + xor=_xor_format, + ) format_double = traits.Bool( - desc='Write out double-precision floating-point data.', - argstr='-double', - xor=_xor_format) + desc="Write out double-precision floating-point data.", + argstr="-double", + xor=_xor_format, + ) format_signed = traits.Bool( - desc='Write signed integer data.', argstr='-signed', xor=_xor_format) + desc="Write signed integer data.", argstr="-signed", xor=_xor_format + ) format_unsigned = traits.Bool( - desc='Write unsigned integer data (default).', - argstr='-unsigned', - xor=_xor_format) + desc="Write unsigned integer data (default).", + argstr="-unsigned", + xor=_xor_format, + ) voxel_range = traits.Tuple( traits.Int, traits.Int, - argstr='-range %d %d', - desc='Valid range for output data.', + argstr="-range %d %d", + desc="Valid range for output data.", ) max_buffer_size_in_kb = traits.Range( low=0, - desc='Specify the maximum size of the internal buffers (in kbytes).', - argstr='-max_buffer_size_in_kb %d') + desc="Specify the maximum size of the internal buffers (in kbytes).", + argstr="-max_buffer_size_in_kb %d", + ) _xor_check_dimensions = ( - 'check_dimensions', - 'no_check_dimensions', + "check_dimensions", + "no_check_dimensions", ) check_dimensions = traits.Bool( - desc='Check that files have matching dimensions (default).', - argstr='-check_dimensions', - xor=_xor_check_dimensions) + desc="Check that files have matching dimensions (default).", + argstr="-check_dimensions", + xor=_xor_check_dimensions, + ) no_check_dimensions = traits.Bool( - desc='Do not check that files have matching dimensions.', - argstr='-nocheck_dimensions', - xor=_xor_check_dimensions) + desc="Do not check that files have matching dimensions.", + argstr="-nocheck_dimensions", + xor=_xor_check_dimensions, + ) # FIXME Is it sensible to use ignore_nan and propagate_nan at the same # time? Document this. ignore_nan = traits.Bool( - desc='Ignore invalid data (NaN) for accumulations.', - argstr='-ignore_nan') + desc="Ignore invalid data (NaN) for accumulations.", argstr="-ignore_nan" + ) propagate_nan = traits.Bool( - desc='Invalid data in any file at a voxel produces a NaN (default).', - argstr='-propagate_nan') + desc="Invalid data in any file at a voxel produces a NaN (default).", + argstr="-propagate_nan", + ) # FIXME Double-check that these are mutually exclusive? - _xor_nan_zero_illegal = ('output_nan', 'output_zero', - 'output_illegal_value') + _xor_nan_zero_illegal = ("output_nan", "output_zero", "output_illegal_value") output_nan = traits.Bool( - desc='Output NaN when an illegal operation is done (default).', - argstr='-nan', - xor=_xor_nan_zero_illegal) + desc="Output NaN when an illegal operation is done (default).", + argstr="-nan", + xor=_xor_nan_zero_illegal, + ) output_zero = traits.Bool( - desc='Output zero when an illegal operation is done.', - argstr='-zero', - xor=_xor_nan_zero_illegal) + desc="Output zero when an illegal operation is done.", + argstr="-zero", + xor=_xor_nan_zero_illegal, + ) output_illegal = traits.Bool( - desc= - 'Value to write out when an illegal operation is done. Default value: 1.79769e+308', - argstr='-illegal_value', - xor=_xor_nan_zero_illegal) + desc="Value to write out when an illegal operation is done. Default value: 1.79769e+308", + argstr="-illegal_value", + xor=_xor_nan_zero_illegal, + ) - _xor_expression = ('expression', 'expfile') + _xor_expression = ("expression", "expfile") expression = traits.Str( - desc='Expression to use in calculations.', - argstr='-expression \'%s\'', + desc="Expression to use in calculations.", + argstr="-expression '%s'", xor=_xor_expression, - mandatory=True) + mandatory=True, + ) expfile = File( - desc='Name of file containing expression.', - argstr='-expfile %s', + desc="Name of file containing expression.", + argstr="-expfile %s", xor=_xor_expression, - mandatory=True) + mandatory=True, + ) # FIXME test this one, the argstr will probably need tweaking, see # _format_arg. @@ -1103,22 +1187,24 @@ class CalcInputSpec(CommandLineInputSpec): traits.Tuple( traits.Str, File, - argstr='-outfile %s %s', - desc= - ('List of (symbol, file) tuples indicating that output should be written' - 'to the specified file, taking values from the symbol which should be' - 'created in the expression (see the EXAMPLES section). If this option' - 'is given, then all non-option arguments are taken as input files.' - 'This option can be used multiple times for multiple output files.' - ))) + argstr="-outfile %s %s", + desc=( + "List of (symbol, file) tuples indicating that output should be written" + "to the specified file, taking values from the symbol which should be" + "created in the expression (see the EXAMPLES section). If this option" + "is given, then all non-option arguments are taken as input files." + "This option can be used multiple times for multiple output files." + ), + ) + ) eval_width = traits.Int( - desc='Number of voxels to evaluate simultaneously.', - argstr='-eval_width %s') + desc="Number of voxels to evaluate simultaneously.", argstr="-eval_width %s" + ) class CalcOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Calc(CommandLine): @@ -1138,7 +1224,7 @@ class Calc(CommandLine): input_spec = CalcInputSpec output_spec = CalcOutputSpec - _cmd = 'minccalc' + _cmd = "minccalc" # FIXME mincbbox produces output like @@ -1151,47 +1237,49 @@ class Calc(CommandLine): class BBoxInputSpec(StdOutCommandLineInputSpec): input_file = File( - desc='input file', - exists=True, - mandatory=True, - argstr='%s', - position=-2, + desc="input file", exists=True, mandatory=True, argstr="%s", position=-2, ) output_file = File( - desc='output file containing bounding box corners', + desc="output file containing bounding box corners", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_bbox.txt', - keep_extension=False) + name_template="%s_bbox.txt", + keep_extension=False, + ) threshold = traits.Int( 0, - desc='VIO_Real value threshold for bounding box. Default value: 0.', - argstr='-threshold') + desc="VIO_Real value threshold for bounding box. Default value: 0.", + argstr="-threshold", + ) - _xor_one_two = ('one_line', 'two_lines') + _xor_one_two = ("one_line", "two_lines") one_line = traits.Bool( - desc='Output on one line (default): start_x y z width_x y z', - argstr='-one_line', - xor=_xor_one_two) + desc="Output on one line (default): start_x y z width_x y z", + argstr="-one_line", + xor=_xor_one_two, + ) two_lines = traits.Bool( - desc='Output on two lines: start_x y z \n width_x y z', - argstr='-two_lines', - xor=_xor_one_two) + desc="Output on two lines: start_x y z \n width_x y z", + argstr="-two_lines", + xor=_xor_one_two, + ) format_mincresample = traits.Bool( - desc= - 'Output format for mincresample: (-step x y z -start x y z -nelements x y z', - argstr='-mincresample') + desc="Output format for mincresample: (-step x y z -start x y z -nelements x y z", + argstr="-mincresample", + ) format_mincreshape = traits.Bool( - desc='Output format for mincreshape: (-start x,y,z -count dx,dy,dz', - argstr='-mincreshape') + desc="Output format for mincreshape: (-start x,y,z -count dx,dy,dz", + argstr="-mincreshape", + ) format_minccrop = traits.Bool( - desc='Output format for minccrop: (-xlim x1 x2 -ylim y1 y2 -zlim z1 z2', - argstr='-minccrop') + desc="Output format for minccrop: (-xlim x1 x2 -ylim y1 y2 -zlim z1 z2", + argstr="-minccrop", + ) # FIXME Not implemented, will clash with our parsing of the output? # Command-specific options: @@ -1202,8 +1290,7 @@ class BBoxInputSpec(StdOutCommandLineInputSpec): class BBoxOutputSpec(TraitedSpec): - output_file = File( - desc='output file containing bounding box corners', exists=True) + output_file = File(desc="output file containing bounding box corners", exists=True) class BBox(StdOutCommandLine): @@ -1222,7 +1309,7 @@ class BBox(StdOutCommandLine): input_spec = BBoxInputSpec output_spec = BBoxOutputSpec - _cmd = 'mincbbox' + _cmd = "mincbbox" class BeastInputSpec(CommandLineInputSpec): @@ -1272,94 +1359,117 @@ class BeastInputSpec(CommandLineInputSpec): """ probability_map = traits.Bool( - desc='Output the probability map instead of crisp mask.', - argstr='-probability') + desc="Output the probability map instead of crisp mask.", argstr="-probability" + ) flip_images = traits.Bool( - desc= - 'Flip images around the mid-sagittal plane to increase patch count.', - argstr='-flip') + desc="Flip images around the mid-sagittal plane to increase patch count.", + argstr="-flip", + ) load_moments = traits.Bool( - desc=('Do not calculate moments instead use precalculated' - 'library moments. (for optimization purposes)'), - argstr='-load_moments') - fill_holes = traits.Bool( - desc='Fill holes in the binary output.', argstr='-fill') + desc=( + "Do not calculate moments instead use precalculated" + "library moments. (for optimization purposes)" + ), + argstr="-load_moments", + ) + fill_holes = traits.Bool(desc="Fill holes in the binary output.", argstr="-fill") median_filter = traits.Bool( - desc='Apply a median filter on the probability map.', argstr='-median') + desc="Apply a median filter on the probability map.", argstr="-median" + ) nlm_filter = traits.Bool( - desc='Apply an NLM filter on the probability map (experimental).', - argstr='-nlm_filter') + desc="Apply an NLM filter on the probability map (experimental).", + argstr="-nlm_filter", + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) configuration_file = File( - desc='Specify configuration file.', argstr='-configuration %s') + desc="Specify configuration file.", argstr="-configuration %s" + ) voxel_size = traits.Int( - 4, usedefault=True, - desc=('Specify voxel size for calculations (4, 2, or 1).' - 'Default value: 4. Assumes no multiscale. Use configuration' - 'file for multiscale.'), - argstr='-voxel_size %s') + 4, + usedefault=True, + desc=( + "Specify voxel size for calculations (4, 2, or 1)." + "Default value: 4. Assumes no multiscale. Use configuration" + "file for multiscale." + ), + argstr="-voxel_size %s", + ) abspath = traits.Bool( - desc= - 'File paths in the library are absolute (default is relative to library root).', - argstr='-abspath', + desc="File paths in the library are absolute (default is relative to library root).", + argstr="-abspath", usedefault=True, - default_value=True) + default_value=True, + ) patch_size = traits.Int( - 1, usedefault=True, - desc='Specify patch size for single scale approach. Default value: 1.', - argstr='-patch_size %s') + 1, + usedefault=True, + desc="Specify patch size for single scale approach. Default value: 1.", + argstr="-patch_size %s", + ) search_area = traits.Int( - 2, usedefault=True, - desc= - 'Specify size of search area for single scale approach. Default value: 2.', - argstr='-search_area %s') + 2, + usedefault=True, + desc="Specify size of search area for single scale approach. Default value: 2.", + argstr="-search_area %s", + ) confidence_level_alpha = traits.Float( - 0.5, usedefault=True, - desc='Specify confidence level Alpha. Default value: 0.5', - argstr='-alpha %s') + 0.5, + usedefault=True, + desc="Specify confidence level Alpha. Default value: 0.5", + argstr="-alpha %s", + ) smoothness_factor_beta = traits.Float( - 0.5, usedefault=True, - desc='Specify smoothness factor Beta. Default value: 0.25', - argstr='-beta %s') + 0.5, + usedefault=True, + desc="Specify smoothness factor Beta. Default value: 0.25", + argstr="-beta %s", + ) threshold_patch_selection = traits.Float( - 0.95, usedefault=True, - desc='Specify threshold for patch selection. Default value: 0.95', - argstr='-threshold %s') + 0.95, + usedefault=True, + desc="Specify threshold for patch selection. Default value: 0.95", + argstr="-threshold %s", + ) number_selected_images = traits.Int( - 20, usedefault=True, - desc='Specify number of selected images. Default value: 20', - argstr='-selection_num %s') + 20, + usedefault=True, + desc="Specify number of selected images. Default value: 20", + argstr="-selection_num %s", + ) same_resolution = traits.Bool( - desc='Output final mask with the same resolution as input file.', - argstr='-same_resolution') + desc="Output final mask with the same resolution as input file.", + argstr="-same_resolution", + ) library_dir = Directory( - desc='library directory', position=-3, argstr='%s', mandatory=True) - input_file = File( - desc='input file', position=-2, argstr='%s', mandatory=True) + desc="library directory", position=-3, argstr="%s", mandatory=True + ) + input_file = File(desc="input file", position=-2, argstr="%s", mandatory=True) output_file = File( - desc='output file', + desc="output file", position=-1, - argstr='%s', - name_source=['input_file'], + argstr="%s", + name_source=["input_file"], hash_files=False, - name_template='%s_beast_mask.mnc') + name_template="%s_beast_mask.mnc", + ) class BeastOutputSpec(TraitedSpec): - output_file = File(desc='output mask file', exists=True) + output_file = File(desc="output mask file", exists=True) class Beast(CommandLine): @@ -1379,150 +1489,158 @@ class Beast(CommandLine): input_spec = BeastInputSpec output_spec = BeastOutputSpec - _cmd = 'mincbeast' + _cmd = "mincbeast" class PikInputSpec(CommandLineInputSpec): input_file = File( - desc='input file', - exists=True, - mandatory=True, - argstr='%s', - position=-2, + desc="input file", exists=True, mandatory=True, argstr="%s", position=-2, ) - _xor_image_type = ('jpg', 'png') + _xor_image_type = ("jpg", "png") - jpg = traits.Bool(desc='Output a jpg file.', xor=_xor_image_type) - png = traits.Bool(desc='Output a png file (default).', xor=_xor_image_type) + jpg = traits.Bool(desc="Output a jpg file.", xor=_xor_image_type) + png = traits.Bool(desc="Output a png file (default).", xor=_xor_image_type) output_file = File( - desc='output file', - argstr='%s', + desc="output file", + argstr="%s", genfile=True, position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s.png', - keep_extension=False) + name_template="%s.png", + keep_extension=False, + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) # FIXME not implemented: --verbose # --fake # --lookup ==> arguments to pass to minclookup scale = traits.Int( - 2, usedefault=True, - desc=('Scaling factor for resulting image. By default images are' - 'output at twice their original resolution.'), - argstr='--scale %s') + 2, + usedefault=True, + desc=( + "Scaling factor for resulting image. By default images are" + "output at twice their original resolution." + ), + argstr="--scale %s", + ) width = traits.Int( - desc= - 'Autoscale the resulting image to have a fixed image width (in pixels).', - argstr='--width %s') + desc="Autoscale the resulting image to have a fixed image width (in pixels).", + argstr="--width %s", + ) depth = traits.Enum( 8, 16, - desc='Bitdepth for resulting image 8 or 16 (MSB machines only!)', - argstr='--depth %s') + desc="Bitdepth for resulting image 8 or 16 (MSB machines only!)", + argstr="--depth %s", + ) - _xor_title = ('title_string', 'title_with_filename') + _xor_title = ("title_string", "title_with_filename") title = traits.Either( - traits.Bool(desc='Use input filename as title in resulting image.'), - traits.Str(desc='Add a title to the resulting image.'), - argstr='%s') # see _format_arg for actual arg string + traits.Bool(desc="Use input filename as title in resulting image."), + traits.Str(desc="Add a title to the resulting image."), + argstr="%s", + ) # see _format_arg for actual arg string title_size = traits.Int( - desc='Font point size for the title.', - argstr='--title_size %s', - requires=['title']) + desc="Font point size for the title.", + argstr="--title_size %s", + requires=["title"], + ) annotated_bar = traits.Bool( - desc= - 'create an annotated bar to match the image (use height of the output image)', - argstr='--anot_bar') + desc="create an annotated bar to match the image (use height of the output image)", + argstr="--anot_bar", + ) # FIXME tuple of floats? Not voxel values? Man page doesn't specify. minc_range = traits.Tuple( traits.Float, traits.Float, - desc='Valid range of values for MINC file.', - argstr='--range %s %s') + desc="Valid range of values for MINC file.", + argstr="--range %s %s", + ) - _xor_image_range = ('image_range', 'auto_range') + _xor_image_range = ("image_range", "auto_range") image_range = traits.Tuple( traits.Float, traits.Float, - desc='Range of image values to use for pixel intensity.', - argstr='--image_range %s %s', - xor=_xor_image_range) + desc="Range of image values to use for pixel intensity.", + argstr="--image_range %s %s", + xor=_xor_image_range, + ) auto_range = traits.Bool( - desc= - 'Automatically determine image range using a 5 and 95% PcT. (histogram)', - argstr='--auto_range', - xor=_xor_image_range) + desc="Automatically determine image range using a 5 and 95% PcT. (histogram)", + argstr="--auto_range", + xor=_xor_image_range, + ) start = traits.Int( - desc='Slice number to get. (note this is in voxel co-ordinates).', - argstr='--slice %s') # FIXME Int is correct? + desc="Slice number to get. (note this is in voxel co-ordinates).", + argstr="--slice %s", + ) # FIXME Int is correct? - _xor_slice = ('slice_z', 'slice_y', 'slice_x') + _xor_slice = ("slice_z", "slice_y", "slice_x") slice_z = traits.Bool( - desc='Get an axial/transverse (z) slice.', argstr='-z', xor=_xor_slice) - slice_y = traits.Bool( - desc='Get a coronal (y) slice.', argstr='-y', xor=_xor_slice) + desc="Get an axial/transverse (z) slice.", argstr="-z", xor=_xor_slice + ) + slice_y = traits.Bool(desc="Get a coronal (y) slice.", argstr="-y", xor=_xor_slice) slice_x = traits.Bool( - desc='Get a sagittal (x) slice.', argstr='-x', - xor=_xor_slice) # FIXME typo in man page? sagital? + desc="Get a sagittal (x) slice.", argstr="-x", xor=_xor_slice + ) # FIXME typo in man page? sagital? triplanar = traits.Bool( - desc='Create a triplanar view of the input file.', - argstr='--triplanar') + desc="Create a triplanar view of the input file.", argstr="--triplanar" + ) tile_size = traits.Int( - desc='Pixel size for each image in a triplanar.', - argstr='--tilesize %s') + desc="Pixel size for each image in a triplanar.", argstr="--tilesize %s" + ) - _xor_sagittal_offset = ('sagittal_offset', 'sagittal_offset_perc') + _xor_sagittal_offset = ("sagittal_offset", "sagittal_offset_perc") sagittal_offset = traits.Int( - desc='Offset the sagittal slice from the centre.', - argstr='--sagittal_offset %s') + desc="Offset the sagittal slice from the centre.", argstr="--sagittal_offset %s" + ) sagittal_offset_perc = traits.Range( low=0, high=100, - desc='Offset the sagittal slice by a percentage from the centre.', - argstr='--sagittal_offset_perc %d', + desc="Offset the sagittal slice by a percentage from the centre.", + argstr="--sagittal_offset_perc %d", ) - _xor_vertical_horizontal = ('vertical_triplanar_view', - 'horizontal_triplanar_view') + _xor_vertical_horizontal = ("vertical_triplanar_view", "horizontal_triplanar_view") vertical_triplanar_view = traits.Bool( - desc='Create a vertical triplanar view (Default).', - argstr='--vertical', - xor=_xor_vertical_horizontal) + desc="Create a vertical triplanar view (Default).", + argstr="--vertical", + xor=_xor_vertical_horizontal, + ) horizontal_triplanar_view = traits.Bool( - desc='Create a horizontal triplanar view.', - argstr='--horizontal', - xor=_xor_vertical_horizontal) + desc="Create a horizontal triplanar view.", + argstr="--horizontal", + xor=_xor_vertical_horizontal, + ) - lookup = traits.Str( - desc='Arguments to pass to minclookup', argstr='--lookup %s') + lookup = traits.Str(desc="Arguments to pass to minclookup", argstr="--lookup %s") class PikOutputSpec(TraitedSpec): - output_file = File(desc='output image', exists=True) + output_file = File(desc="output image", exists=True) class Pik(CommandLine): @@ -1545,102 +1663,104 @@ class Pik(CommandLine): input_spec = PikInputSpec output_spec = PikOutputSpec - _cmd = 'mincpik' + _cmd = "mincpik" def _format_arg(self, name, spec, value): - if name == 'title': + if name == "title": if isinstance(value, bool) and value: - return '--title' + return "--title" elif isinstance(value, str): - return '--title --title_text %s' % (value, ) + return "--title --title_text %s" % (value,) else: - raise ValueError( - 'Unknown value for "title" argument: ' + str(value)) + raise ValueError('Unknown value for "title" argument: ' + str(value)) return super(Pik, self)._format_arg(name, spec, value) class BlurInputSpec(CommandLineInputSpec): input_file = File( - desc='input file', - exists=True, - mandatory=True, - argstr='%s', - position=-2, + desc="input file", exists=True, mandatory=True, argstr="%s", position=-2, ) - output_file_base = File(desc='output file base', argstr='%s', position=-1) + output_file_base = File(desc="output file base", argstr="%s", position=-1) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) - _xor_kernel = ('gaussian', 'rect') + _xor_kernel = ("gaussian", "rect") gaussian = traits.Bool( - desc='Use a gaussian smoothing kernel (default).', - argstr='-gaussian', - xor=_xor_kernel) + desc="Use a gaussian smoothing kernel (default).", + argstr="-gaussian", + xor=_xor_kernel, + ) rect = traits.Bool( - desc='Use a rect (box) smoothing kernel.', - argstr='-rect', - xor=_xor_kernel) + desc="Use a rect (box) smoothing kernel.", argstr="-rect", xor=_xor_kernel + ) gradient = traits.Bool( - desc='Create the gradient magnitude volume as well.', - argstr='-gradient') + desc="Create the gradient magnitude volume as well.", argstr="-gradient" + ) partial = traits.Bool( - desc= - 'Create the partial derivative and gradient magnitude volumes as well.', - argstr='-partial') + desc="Create the partial derivative and gradient magnitude volumes as well.", + argstr="-partial", + ) no_apodize = traits.Bool( - desc='Do not apodize the data before blurring.', argstr='-no_apodize') + desc="Do not apodize the data before blurring.", argstr="-no_apodize" + ) - _xor_main_options = ('fwhm', 'fwhm3d', 'standard_dev') + _xor_main_options = ("fwhm", "fwhm3d", "standard_dev") fwhm = traits.Float( 0, - desc='Full-width-half-maximum of gaussian kernel. Default value: 0.', - argstr='-fwhm %s', + desc="Full-width-half-maximum of gaussian kernel. Default value: 0.", + argstr="-fwhm %s", xor=_xor_main_options, - mandatory=True) + mandatory=True, + ) standard_dev = traits.Float( 0, - desc='Standard deviation of gaussian kernel. Default value: 0.', - argstr='-standarddev %s', + desc="Standard deviation of gaussian kernel. Default value: 0.", + argstr="-standarddev %s", xor=_xor_main_options, - mandatory=True) + mandatory=True, + ) fwhm3d = traits.Tuple( traits.Float, traits.Float, traits.Float, - argstr='-3dfwhm %s %s %s', - desc=('Full-width-half-maximum of gaussian kernel.' - 'Default value: -1.79769e+308 -1.79769e+308 -1.79769e+308.'), + argstr="-3dfwhm %s %s %s", + desc=( + "Full-width-half-maximum of gaussian kernel." + "Default value: -1.79769e+308 -1.79769e+308 -1.79769e+308." + ), xor=_xor_main_options, - mandatory=True) + mandatory=True, + ) dimensions = traits.Enum( 3, 1, 2, - desc= - 'Number of dimensions to blur (either 1,2 or 3). Default value: 3.', - argstr='-dimensions %s') + desc="Number of dimensions to blur (either 1,2 or 3). Default value: 3.", + argstr="-dimensions %s", + ) class BlurOutputSpec(TraitedSpec): - output_file = File(desc='Blurred output file.', exists=True) + output_file = File(desc="Blurred output file.", exists=True) - gradient_dxyz = File(desc='Gradient dxyz.') - partial_dx = File(desc='Partial gradient dx.') - partial_dy = File(desc='Partial gradient dy.') - partial_dz = File(desc='Partial gradient dz.') - partial_dxyz = File(desc='Partial gradient dxyz.') + gradient_dxyz = File(desc="Gradient dxyz.") + partial_dx = File(desc="Partial gradient dx.") + partial_dy = File(desc="Partial gradient dy.") + partial_dz = File(desc="Partial gradient dz.") + partial_dxyz = File(desc="Partial gradient dxyz.") class Blur(StdOutCommandLine): @@ -1682,7 +1802,7 @@ class Blur(StdOutCommandLine): input_spec = BlurInputSpec output_spec = BlurOutputSpec - _cmd = 'mincblur' + _cmd = "mincblur" def _gen_output_base(self): output_file_base = self.inputs.output_file_base @@ -1690,13 +1810,11 @@ def _gen_output_base(self): if isdefined(output_file_base): return output_file_base else: - base_file_name = os.path.split( - self.inputs.input_file)[1] # e.g. 'foo.mnc' - base_file_name_no_ext = os.path.splitext(base_file_name)[ - 0] # e.g. 'foo' + base_file_name = os.path.split(self.inputs.input_file)[1] # e.g. 'foo.mnc' + base_file_name_no_ext = os.path.splitext(base_file_name)[0] # e.g. 'foo' output_base = os.path.join( - os.getcwd(), base_file_name_no_ext + - '_bluroutput') # e.g. '/tmp/blah/foo_bluroutput' + os.getcwd(), base_file_name_no_ext + "_bluroutput" + ) # e.g. '/tmp/blah/foo_bluroutput' # return os.path.splitext(self.inputs.input_file)[0] + # '_bluroutput' return output_base @@ -1706,16 +1824,16 @@ def _list_outputs(self): output_file_base = self._gen_output_base() - outputs['output_file'] = output_file_base + '_blur.mnc' + outputs["output_file"] = output_file_base + "_blur.mnc" if isdefined(self.inputs.gradient): - outputs['gradient_dxyz'] = output_file_base + '_dxyz.mnc' + outputs["gradient_dxyz"] = output_file_base + "_dxyz.mnc" if isdefined(self.inputs.partial): - outputs['partial_dx'] = output_file_base + '_dx.mnc' - outputs['partial_dy'] = output_file_base + '_dy.mnc' - outputs['partial_dz'] = output_file_base + '_dz.mnc' - outputs['partial_dxyz'] = output_file_base + '_dxyz.mnc' + outputs["partial_dx"] = output_file_base + "_dx.mnc" + outputs["partial_dy"] = output_file_base + "_dy.mnc" + outputs["partial_dz"] = output_file_base + "_dz.mnc" + outputs["partial_dxyz"] = output_file_base + "_dxyz.mnc" return outputs @@ -1730,156 +1848,177 @@ def cmdline(self): # FIXME this seems like a bit of a hack. Can we force output_file # to show up in cmdline by default, even if it isn't specified in # the instantiation of Pik? - return '%s %s' % (orig_cmdline, self._gen_output_base()) + return "%s %s" % (orig_cmdline, self._gen_output_base()) class MathInputSpec(CommandLineInputSpec): - _xor_input_files = ('input_files', 'filelist') + _xor_input_files = ("input_files", "filelist") input_files = InputMultiPath( File(exists=True), - desc='input file(s) for calculation', + desc="input file(s) for calculation", mandatory=True, - sep=' ', - argstr='%s', + sep=" ", + argstr="%s", position=-2, - xor=_xor_input_files) + xor=_xor_input_files, + ) output_file = File( - desc='output file', - argstr='%s', + desc="output file", + argstr="%s", genfile=True, position=-1, - name_source=['input_files'], + name_source=["input_files"], hash_files=False, - name_template='%s_mincmath.mnc') + name_template="%s_mincmath.mnc", + ) filelist = File( - desc='Specify the name of a file containing input file names.', - argstr='-filelist %s', + desc="Specify the name of a file containing input file names.", + argstr="-filelist %s", exists=True, mandatory=True, - xor=_xor_input_files) + xor=_xor_input_files, + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) - two = traits.Bool(desc='Create a MINC 2 output file.', argstr='-2') + two = traits.Bool(desc="Create a MINC 2 output file.", argstr="-2") - _xor_copy_header = ('copy_header', 'no_copy_header') + _xor_copy_header = ("copy_header", "no_copy_header") copy_header = traits.Bool( - desc= - 'Copy all of the header from the first file (default for one file).', - argstr='-copy_header', - xor=_xor_copy_header) + desc="Copy all of the header from the first file (default for one file).", + argstr="-copy_header", + xor=_xor_copy_header, + ) no_copy_header = traits.Bool( - desc= - 'Do not copy all of the header from the first file (default for many files)).', - argstr='-nocopy_header', - xor=_xor_copy_header) + desc="Do not copy all of the header from the first file (default for many files)).", + argstr="-nocopy_header", + xor=_xor_copy_header, + ) _xor_format = ( - 'format_filetype', - 'format_byte', - 'format_short', - 'format_int', - 'format_long', - 'format_float', - 'format_double', - 'format_signed', - 'format_unsigned', + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", ) format_filetype = traits.Bool( - desc='Use data type of first file (default).', - argstr='-filetype', - xor=_xor_format) + desc="Use data type of first file (default).", + argstr="-filetype", + xor=_xor_format, + ) format_byte = traits.Bool( - desc='Write out byte data.', argstr='-byte', xor=_xor_format) + desc="Write out byte data.", argstr="-byte", xor=_xor_format + ) format_short = traits.Bool( - desc='Write out short integer data.', argstr='-short', xor=_xor_format) + desc="Write out short integer data.", argstr="-short", xor=_xor_format + ) format_int = traits.Bool( - desc='Write out 32-bit integer data.', argstr='-int', xor=_xor_format) + desc="Write out 32-bit integer data.", argstr="-int", xor=_xor_format + ) format_long = traits.Bool( - desc='Superseded by -int.', argstr='-long', xor=_xor_format) + desc="Superseded by -int.", argstr="-long", xor=_xor_format + ) format_float = traits.Bool( - desc='Write out single-precision floating-point data.', - argstr='-float', - xor=_xor_format) + desc="Write out single-precision floating-point data.", + argstr="-float", + xor=_xor_format, + ) format_double = traits.Bool( - desc='Write out double-precision floating-point data.', - argstr='-double', - xor=_xor_format) + desc="Write out double-precision floating-point data.", + argstr="-double", + xor=_xor_format, + ) format_signed = traits.Bool( - desc='Write signed integer data.', argstr='-signed', xor=_xor_format) + desc="Write signed integer data.", argstr="-signed", xor=_xor_format + ) format_unsigned = traits.Bool( - desc='Write unsigned integer data (default).', - argstr='-unsigned', - xor=_xor_format) + desc="Write unsigned integer data (default).", + argstr="-unsigned", + xor=_xor_format, + ) voxel_range = traits.Tuple( traits.Int, traits.Int, - argstr='-range %d %d', - desc='Valid range for output data.') + argstr="-range %d %d", + desc="Valid range for output data.", + ) max_buffer_size_in_kb = traits.Range( low=0, - desc='Specify the maximum size of the internal buffers (in kbytes).', + desc="Specify the maximum size of the internal buffers (in kbytes).", value=4096, usedefault=True, - argstr='-max_buffer_size_in_kb %d', + argstr="-max_buffer_size_in_kb %d", ) _xor_check_dimensions = ( - 'check_dimensions', - 'no_check_dimensions', + "check_dimensions", + "no_check_dimensions", ) check_dimensions = traits.Bool( - desc='Check that dimension info matches across files (default).', - argstr='-check_dimensions', - xor=_xor_check_dimensions) + desc="Check that dimension info matches across files (default).", + argstr="-check_dimensions", + xor=_xor_check_dimensions, + ) no_check_dimensions = traits.Bool( - desc='Do not check dimension info.', - argstr='-nocheck_dimensions', - xor=_xor_check_dimensions) + desc="Do not check dimension info.", + argstr="-nocheck_dimensions", + xor=_xor_check_dimensions, + ) dimension = traits.Str( - desc= - 'Specify a dimension along which we wish to perform a calculation.', - argstr='-dimension %s') + desc="Specify a dimension along which we wish to perform a calculation.", + argstr="-dimension %s", + ) # FIXME Is it sensible to use ignore_nan and propagate_nan at the same # time? Document this. ignore_nan = traits.Bool( - desc='Ignore invalid data (NaN) for accumulations.', - argstr='-ignore_nan') + desc="Ignore invalid data (NaN) for accumulations.", argstr="-ignore_nan" + ) propagate_nan = traits.Bool( - desc='Invalid data in any file at a voxel produces a NaN (default).', - argstr='-propagate_nan') + desc="Invalid data in any file at a voxel produces a NaN (default).", + argstr="-propagate_nan", + ) # FIXME Double-check that these are mutually exclusive? - _xor_nan_zero_illegal = ('output_nan', 'output_zero', - 'output_illegal_value') + _xor_nan_zero_illegal = ("output_nan", "output_zero", "output_illegal_value") output_nan = traits.Bool( - desc='Output NaN when an illegal operation is done (default).', - argstr='-nan', - xor=_xor_nan_zero_illegal) + desc="Output NaN when an illegal operation is done (default).", + argstr="-nan", + xor=_xor_nan_zero_illegal, + ) output_zero = traits.Bool( - desc='Output zero when an illegal operation is done.', - argstr='-zero', - xor=_xor_nan_zero_illegal) + desc="Output zero when an illegal operation is done.", + argstr="-zero", + xor=_xor_nan_zero_illegal, + ) output_illegal = traits.Bool( - desc=('Value to write out when an illegal operation' - 'is done. Default value: 1.79769e+308'), - argstr='-illegal_value', - xor=_xor_nan_zero_illegal) + desc=( + "Value to write out when an illegal operation" + "is done. Default value: 1.79769e+308" + ), + argstr="-illegal_value", + xor=_xor_nan_zero_illegal, + ) # FIXME A whole bunch of the parameters will be mutually exclusive, e.g. surely can't do sqrt and abs at the same time? # Or does mincmath do one and then the next? @@ -1889,156 +2028,186 @@ class MathInputSpec(CommandLineInputSpec): ########################################################################## bool_or_const_traits = [ - 'test_gt', 'test_lt', 'test_eq', 'test_ne', 'test_ge', 'test_le', - 'calc_add', 'calc_sub', 'calc_mul', 'calc_div' + "test_gt", + "test_lt", + "test_eq", + "test_ne", + "test_ge", + "test_le", + "calc_add", + "calc_sub", + "calc_mul", + "calc_div", ] test_gt = traits.Either( traits.Bool(), traits.Float(), - desc='Test for vol1 > vol2 or vol1 > constant.', - argstr='-gt') + desc="Test for vol1 > vol2 or vol1 > constant.", + argstr="-gt", + ) test_lt = traits.Either( traits.Bool(), traits.Float(), - desc='Test for vol1 < vol2 or vol1 < constant.', - argstr='-lt') + desc="Test for vol1 < vol2 or vol1 < constant.", + argstr="-lt", + ) test_eq = traits.Either( traits.Bool(), traits.Float(), - desc='Test for integer vol1 == vol2 or vol1 == constant.', - argstr='-eq') + desc="Test for integer vol1 == vol2 or vol1 == constant.", + argstr="-eq", + ) test_ne = traits.Either( traits.Bool(), traits.Float(), - desc='Test for integer vol1 != vol2 or vol1 != const.', - argstr='-ne') + desc="Test for integer vol1 != vol2 or vol1 != const.", + argstr="-ne", + ) test_ge = traits.Either( traits.Bool(), traits.Float(), - desc='Test for vol1 >= vol2 or vol1 >= const.', - argstr='-ge') + desc="Test for vol1 >= vol2 or vol1 >= const.", + argstr="-ge", + ) test_le = traits.Either( traits.Bool(), traits.Float(), - desc='Test for vol1 <= vol2 or vol1 <= const.', - argstr='-le') + desc="Test for vol1 <= vol2 or vol1 <= const.", + argstr="-le", + ) calc_add = traits.Either( traits.Bool(), traits.Float(), - desc='Add N volumes or volume + constant.', - argstr='-add') + desc="Add N volumes or volume + constant.", + argstr="-add", + ) calc_sub = traits.Either( traits.Bool(), traits.Float(), - desc='Subtract 2 volumes or volume - constant.', - argstr='-sub') + desc="Subtract 2 volumes or volume - constant.", + argstr="-sub", + ) calc_mul = traits.Either( traits.Bool(), traits.Float(), - desc='Multiply N volumes or volume * constant.', - argstr='-mult') + desc="Multiply N volumes or volume * constant.", + argstr="-mult", + ) calc_div = traits.Either( traits.Bool(), traits.Float(), - desc='Divide 2 volumes or volume / constant.', - argstr='-div') + desc="Divide 2 volumes or volume / constant.", + argstr="-div", + ) ###################################### # Traits that expect a single volume # ###################################### single_volume_traits = [ - 'invert', 'calc_not', 'sqrt', 'square', 'abs', 'exp', 'log', 'scale', - 'clamp', 'segment', 'nsegment', 'isnan', 'isnan' + "invert", + "calc_not", + "sqrt", + "square", + "abs", + "exp", + "log", + "scale", + "clamp", + "segment", + "nsegment", + "isnan", + "isnan", ] # FIXME enforce this in _parse_inputs and check for other members invert = traits.Either( - traits.Float(), desc='Calculate 1/c.', argstr='-invert -const %s') + traits.Float(), desc="Calculate 1/c.", argstr="-invert -const %s" + ) - calc_not = traits.Bool(desc='Calculate !vol1.', argstr='-not') + calc_not = traits.Bool(desc="Calculate !vol1.", argstr="-not") - sqrt = traits.Bool(desc='Take square root of a volume.', argstr='-sqrt') - square = traits.Bool(desc='Take square of a volume.', argstr='-square') - abs = traits.Bool(desc='Take absolute value of a volume.', argstr='-abs') + sqrt = traits.Bool(desc="Take square root of a volume.", argstr="-sqrt") + square = traits.Bool(desc="Take square of a volume.", argstr="-square") + abs = traits.Bool(desc="Take absolute value of a volume.", argstr="-abs") exp = traits.Tuple( traits.Float, traits.Float, - argstr='-exp -const2 %s %s', - desc='Calculate c2*exp(c1*x). Both constants must be specified.') + argstr="-exp -const2 %s %s", + desc="Calculate c2*exp(c1*x). Both constants must be specified.", + ) log = traits.Tuple( traits.Float, traits.Float, - argstr='-log -const2 %s %s', - desc='Calculate log(x/c2)/c1. The constants c1 and c2 default to 1.') + argstr="-log -const2 %s %s", + desc="Calculate log(x/c2)/c1. The constants c1 and c2 default to 1.", + ) scale = traits.Tuple( traits.Float, traits.Float, - argstr='-scale -const2 %s %s', - desc='Scale a volume: volume * c1 + c2.') + argstr="-scale -const2 %s %s", + desc="Scale a volume: volume * c1 + c2.", + ) clamp = traits.Tuple( traits.Float, traits.Float, - argstr='-clamp -const2 %s %s', - desc='Clamp a volume to lie between two values.') + argstr="-clamp -const2 %s %s", + desc="Clamp a volume to lie between two values.", + ) segment = traits.Tuple( traits.Float, traits.Float, - argstr='-segment -const2 %s %s', - desc= - 'Segment a volume using range of -const2: within range = 1, outside range = 0.' + argstr="-segment -const2 %s %s", + desc="Segment a volume using range of -const2: within range = 1, outside range = 0.", ) nsegment = traits.Tuple( traits.Float, traits.Float, - argstr='-nsegment -const2 %s %s', - desc='Opposite of -segment: within range = 0, outside range = 1.') + argstr="-nsegment -const2 %s %s", + desc="Opposite of -segment: within range = 0, outside range = 1.", + ) - isnan = traits.Bool(desc='Test for NaN values in vol1.', argstr='-isnan') + isnan = traits.Bool(desc="Test for NaN values in vol1.", argstr="-isnan") - nisnan = traits.Bool(desc='Negation of -isnan.', argstr='-nisnan') + nisnan = traits.Bool(desc="Negation of -isnan.", argstr="-nisnan") ############################################ # Traits that expect precisely two volumes # ############################################ - two_volume_traits = ['percentdiff'] + two_volume_traits = ["percentdiff"] percentdiff = traits.Float( - desc= - 'Percent difference between 2 volumes, thresholded (const def=0.0).', - argstr='-percentdiff') + desc="Percent difference between 2 volumes, thresholded (const def=0.0).", + argstr="-percentdiff", + ) ##################################### # Traits that expect N >= 1 volumes # ##################################### - n_volume_traits = [ - 'count_valid', 'maximum', 'minimum', 'calc_add', 'calc_or' - ] + n_volume_traits = ["count_valid", "maximum", "minimum", "calc_add", "calc_or"] count_valid = traits.Bool( - desc='Count the number of valid values in N volumes.', - argstr='-count_valid') + desc="Count the number of valid values in N volumes.", argstr="-count_valid" + ) - maximum = traits.Bool(desc='Find maximum of N volumes.', argstr='-maximum') - minimum = traits.Bool(desc='Find minimum of N volumes.', argstr='-minimum') + maximum = traits.Bool(desc="Find maximum of N volumes.", argstr="-maximum") + minimum = traits.Bool(desc="Find minimum of N volumes.", argstr="-minimum") - calc_and = traits.Bool( - desc='Calculate vol1 && vol2 (&& ...).', argstr='-and') - calc_or = traits.Bool( - desc='Calculate vol1 || vol2 (|| ...).', argstr='-or') + calc_and = traits.Bool(desc="Calculate vol1 && vol2 (&& ...).", argstr="-and") + calc_or = traits.Bool(desc="Calculate vol1 || vol2 (|| ...).", argstr="-or") class MathOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Math(StdOutCommandLine): @@ -2064,7 +2233,7 @@ class Math(StdOutCommandLine): input_spec = MathInputSpec output_spec = MathOutputSpec - _cmd = 'mincmath' + _cmd = "mincmath" def _format_arg(self, name, spec, value): assert value is not None @@ -2076,18 +2245,11 @@ def _format_arg(self, name, spec, value): if isinstance(value, bool) and value: return spec.argstr elif isinstance(value, bool) and not value: - raise ValueError('Does not make sense to specify %s=False' % - (name, )) + raise ValueError("Does not make sense to specify %s=False" % (name,)) elif isinstance(value, float): - return '%s -const %s' % ( - spec.argstr, - value, - ) + return "%s -const %s" % (spec.argstr, value,) else: - raise ValueError('Invalid %s argument: %s' % ( - name, - value, - )) + raise ValueError("Invalid %s argument: %s" % (name, value,)) return super(Math, self)._format_arg(name, spec, value) @@ -2104,22 +2266,19 @@ def _parse_inputs(self): if isinstance(t, bool): if nr_input_files != 2: raise ValueError( - 'Due to the %s option we expected 2 files but input_files is of length %d' - % ( - n, - nr_input_files, - )) + "Due to the %s option we expected 2 files but input_files is of length %d" + % (n, nr_input_files,) + ) elif isinstance(t, float): if nr_input_files != 1: raise ValueError( - 'Due to the %s option we expected 1 file but input_files is of length %d' - % ( - n, - nr_input_files, - )) + "Due to the %s option we expected 1 file but input_files is of length %d" + % (n, nr_input_files,) + ) else: raise ValueError( - 'Argument should be a bool or const, but got: %s' % t) + "Argument should be a bool or const, but got: %s" % t + ) for n in self.input_spec.single_volume_traits: t = self.inputs.__getattribute__(n) @@ -2127,11 +2286,9 @@ def _parse_inputs(self): if isdefined(t): if nr_input_files != 1: raise ValueError( - 'Due to the %s option we expected 1 file but input_files is of length %d' - % ( - n, - nr_input_files, - )) + "Due to the %s option we expected 1 file but input_files is of length %d" + % (n, nr_input_files,) + ) for n in self.input_spec.two_volume_traits: t = self.inputs.__getattribute__(n) @@ -2139,11 +2296,9 @@ def _parse_inputs(self): if isdefined(t): if nr_input_files != 2: raise ValueError( - 'Due to the %s option we expected 2 files but input_files is of length %d' - % ( - n, - nr_input_files, - )) + "Due to the %s option we expected 2 files but input_files is of length %d" + % (n, nr_input_files,) + ) for n in self.input_spec.n_volume_traits: t = self.inputs.__getattribute__(n) @@ -2151,11 +2306,9 @@ def _parse_inputs(self): if isdefined(t): if not nr_input_files >= 1: raise ValueError( - 'Due to the %s option we expected at least one file but input_files is of length %d' - % ( - n, - nr_input_files, - )) + "Due to the %s option we expected at least one file but input_files is of length %d" + % (n, nr_input_files,) + ) return super(Math, self)._parse_inputs() @@ -2171,58 +2324,58 @@ class ResampleInputSpec(CommandLineInputSpec): """ input_file = File( - desc='input file for resampling', + desc="input file for resampling", exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_resample.mnc') + name_template="%s_resample.mnc", + ) # This is a dummy input. - input_grid_files = InputMultiPath( - File, - desc='input grid file(s)', - ) + input_grid_files = InputMultiPath(File, desc="input grid file(s)",) - two = traits.Bool(desc='Create a MINC 2 output file.', argstr='-2') + two = traits.Bool(desc="Create a MINC 2 output file.", argstr="-2") clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) - _xor_interpolation = ('trilinear_interpolation', 'tricubic_interpolation', - 'nearest_neighbour_interpolation', - 'sinc_interpolation') + _xor_interpolation = ( + "trilinear_interpolation", + "tricubic_interpolation", + "nearest_neighbour_interpolation", + "sinc_interpolation", + ) trilinear_interpolation = traits.Bool( - desc='Do trilinear interpolation.', - argstr='-trilinear', - xor=_xor_interpolation) + desc="Do trilinear interpolation.", argstr="-trilinear", xor=_xor_interpolation + ) tricubic_interpolation = traits.Bool( - desc='Do tricubic interpolation.', - argstr='-tricubic', - xor=_xor_interpolation) + desc="Do tricubic interpolation.", argstr="-tricubic", xor=_xor_interpolation + ) nearest_neighbour_interpolation = traits.Bool( - desc='Do nearest neighbour interpolation.', - argstr='-nearest_neighbour', - xor=_xor_interpolation) + desc="Do nearest neighbour interpolation.", + argstr="-nearest_neighbour", + xor=_xor_interpolation, + ) sinc_interpolation = traits.Bool( - desc='Do windowed sinc interpolation.', - argstr='-sinc', - xor=_xor_interpolation) + desc="Do windowed sinc interpolation.", argstr="-sinc", xor=_xor_interpolation + ) half_width_sinc_window = traits.Enum( 5, @@ -2235,293 +2388,343 @@ class ResampleInputSpec(CommandLineInputSpec): 8, 9, 10, - desc='Set half-width of sinc window (1-10). Default value: 5.', - argstr='-width %s', - requires=['sinc_interpolation']) + desc="Set half-width of sinc window (1-10). Default value: 5.", + argstr="-width %s", + requires=["sinc_interpolation"], + ) - _xor_sinc_window_type = ('sinc_window_hanning', 'sinc_window_hamming') + _xor_sinc_window_type = ("sinc_window_hanning", "sinc_window_hamming") sinc_window_hanning = traits.Bool( - desc='Set sinc window type to Hanning.', - argstr='-hanning', + desc="Set sinc window type to Hanning.", + argstr="-hanning", xor=_xor_sinc_window_type, - requires=['sinc_interpolation']) + requires=["sinc_interpolation"], + ) sinc_window_hamming = traits.Bool( - desc='Set sinc window type to Hamming.', - argstr='-hamming', + desc="Set sinc window type to Hamming.", + argstr="-hamming", xor=_xor_sinc_window_type, - requires=['sinc_interpolation']) + requires=["sinc_interpolation"], + ) transformation = File( - desc='File giving world transformation. (Default = identity).', + desc="File giving world transformation. (Default = identity).", exists=True, - argstr='-transformation %s') + argstr="-transformation %s", + ) invert_transformation = traits.Bool( - desc='Invert the transformation before using it.', - argstr='-invert_transformation') + desc="Invert the transformation before using it.", + argstr="-invert_transformation", + ) - _xor_input_sampling = ('vio_transform', 'no_input_sampling') + _xor_input_sampling = ("vio_transform", "no_input_sampling") vio_transform = traits.Bool( - desc='VIO_Transform the input sampling with the transform (default).', - argstr='-tfm_input_sampling', - xor=_xor_input_sampling) + desc="VIO_Transform the input sampling with the transform (default).", + argstr="-tfm_input_sampling", + xor=_xor_input_sampling, + ) no_input_sampling = traits.Bool( - desc='Use the input sampling without transforming (old behaviour).', - argstr='-use_input_sampling', - xor=_xor_input_sampling) + desc="Use the input sampling without transforming (old behaviour).", + argstr="-use_input_sampling", + xor=_xor_input_sampling, + ) like = File( - desc='Specifies a model file for the resampling.', - argstr='-like %s', - exists=True) + desc="Specifies a model file for the resampling.", + argstr="-like %s", + exists=True, + ) _xor_format = ( - 'format_byte', - 'format_short', - 'format_int', - 'format_long', - 'format_float', - 'format_double', - 'format_signed', - 'format_unsigned', + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", ) format_byte = traits.Bool( - desc='Write out byte data.', argstr='-byte', xor=_xor_format) + desc="Write out byte data.", argstr="-byte", xor=_xor_format + ) format_short = traits.Bool( - desc='Write out short integer data.', argstr='-short', xor=_xor_format) + desc="Write out short integer data.", argstr="-short", xor=_xor_format + ) format_int = traits.Bool( - desc='Write out 32-bit integer data.', argstr='-int', xor=_xor_format) + desc="Write out 32-bit integer data.", argstr="-int", xor=_xor_format + ) format_long = traits.Bool( - desc='Superseded by -int.', argstr='-long', xor=_xor_format) + desc="Superseded by -int.", argstr="-long", xor=_xor_format + ) format_float = traits.Bool( - desc='Write out single-precision floating-point data.', - argstr='-float', - xor=_xor_format) + desc="Write out single-precision floating-point data.", + argstr="-float", + xor=_xor_format, + ) format_double = traits.Bool( - desc='Write out double-precision floating-point data.', - argstr='-double', - xor=_xor_format) + desc="Write out double-precision floating-point data.", + argstr="-double", + xor=_xor_format, + ) format_signed = traits.Bool( - desc='Write signed integer data.', argstr='-signed', xor=_xor_format) + desc="Write signed integer data.", argstr="-signed", xor=_xor_format + ) format_unsigned = traits.Bool( - desc='Write unsigned integer data (default).', - argstr='-unsigned', - xor=_xor_format) + desc="Write unsigned integer data (default).", + argstr="-unsigned", + xor=_xor_format, + ) output_range = traits.Tuple( traits.Float, traits.Float, - argstr='-range %s %s', - desc= - 'Valid range for output data. Default value: -1.79769e+308 -1.79769e+308.' + argstr="-range %s %s", + desc="Valid range for output data. Default value: -1.79769e+308 -1.79769e+308.", ) - _xor_slices = ('transverse', 'sagittal', 'coronal') + _xor_slices = ("transverse", "sagittal", "coronal") transverse_slices = traits.Bool( - desc='Write out transverse slices.', - argstr='-transverse', - xor=_xor_slices) + desc="Write out transverse slices.", argstr="-transverse", xor=_xor_slices + ) sagittal_slices = traits.Bool( - desc='Write out sagittal slices', argstr='-sagittal', xor=_xor_slices) + desc="Write out sagittal slices", argstr="-sagittal", xor=_xor_slices + ) coronal_slices = traits.Bool( - desc='Write out coronal slices', argstr='-coronal', xor=_xor_slices) + desc="Write out coronal slices", argstr="-coronal", xor=_xor_slices + ) - _xor_fill = ('nofill', 'fill') + _xor_fill = ("nofill", "fill") no_fill = traits.Bool( - desc='Use value zero for points outside of input volume.', - argstr='-nofill', - xor=_xor_fill) + desc="Use value zero for points outside of input volume.", + argstr="-nofill", + xor=_xor_fill, + ) fill = traits.Bool( - desc='Use a fill value for points outside of input volume.', - argstr='-fill', - xor=_xor_fill) + desc="Use a fill value for points outside of input volume.", + argstr="-fill", + xor=_xor_fill, + ) fill_value = traits.Float( - desc=('Specify a fill value for points outside of input volume.' - 'Default value: 1.79769e+308.'), - argstr='-fillvalue %s', - requires=['fill']) + desc=( + "Specify a fill value for points outside of input volume." + "Default value: 1.79769e+308." + ), + argstr="-fillvalue %s", + requires=["fill"], + ) - _xor_scale = ('keep_real_range', 'nokeep_real_range') + _xor_scale = ("keep_real_range", "nokeep_real_range") keep_real_range = traits.Bool( - desc='Keep the real scale of the input volume.', - argstr='-keep_real_range', - xor=_xor_scale) + desc="Keep the real scale of the input volume.", + argstr="-keep_real_range", + xor=_xor_scale, + ) nokeep_real_range = traits.Bool( - desc='Do not keep the real scale of the data (default).', - argstr='-nokeep_real_range', - xor=_xor_scale) + desc="Do not keep the real scale of the data (default).", + argstr="-nokeep_real_range", + xor=_xor_scale, + ) - _xor_spacetype = ('spacetype', 'talairach') + _xor_spacetype = ("spacetype", "talairach") spacetype = traits.Str( - desc='Set the spacetype attribute to a specified string.', - argstr='-spacetype %s') - talairach = traits.Bool( - desc='Output is in Talairach space.', argstr='-talairach') + desc="Set the spacetype attribute to a specified string.", + argstr="-spacetype %s", + ) + talairach = traits.Bool(desc="Output is in Talairach space.", argstr="-talairach") origin = traits.Tuple( traits.Float, traits.Float, traits.Float, - desc=('Origin of first pixel in 3D space.' - 'Default value: 1.79769e+308 1.79769e+308 1.79769e+308.'), - argstr='-origin %s %s %s') + desc=( + "Origin of first pixel in 3D space." + "Default value: 1.79769e+308 1.79769e+308 1.79769e+308." + ), + argstr="-origin %s %s %s", + ) standard_sampling = traits.Bool( - desc='Set the sampling to standard values (step, start and dircos).', - argstr='-standard_sampling') # FIXME Bool? + desc="Set the sampling to standard values (step, start and dircos).", + argstr="-standard_sampling", + ) # FIXME Bool? units = traits.Str( - desc='Specify the units of the output sampling.', - argstr='-units %s') # FIXME String? + desc="Specify the units of the output sampling.", argstr="-units %s" + ) # FIXME String? # Elements along each dimension. # FIXME Ints? Ranges? # FIXME Check that this xor behaves correctly. - _xor_nelements = ('nelements', 'nelements_x_y_or_z') + _xor_nelements = ("nelements", "nelements_x_y_or_z") # nr elements along each dimension nelements = traits.Tuple( traits.Int, traits.Int, traits.Int, - desc='Number of elements along each dimension (X, Y, Z).', - argstr='-nelements %s %s %s', - xor=_xor_nelements) + desc="Number of elements along each dimension (X, Y, Z).", + argstr="-nelements %s %s %s", + xor=_xor_nelements, + ) # FIXME Is mincresample happy if we only specify one of these, or do we # need the requires=...? xnelements = traits.Int( - desc='Number of elements along the X dimension.', - argstr='-xnelements %s', - requires=('ynelements', 'znelements'), - xor=_xor_nelements) + desc="Number of elements along the X dimension.", + argstr="-xnelements %s", + requires=("ynelements", "znelements"), + xor=_xor_nelements, + ) ynelements = traits.Int( - desc='Number of elements along the Y dimension.', - argstr='-ynelements %s', - requires=('xnelements', 'znelements'), - xor=_xor_nelements) + desc="Number of elements along the Y dimension.", + argstr="-ynelements %s", + requires=("xnelements", "znelements"), + xor=_xor_nelements, + ) znelements = traits.Int( - desc='Number of elements along the Z dimension.', - argstr='-znelements %s', - requires=('xnelements', 'ynelements'), - xor=_xor_nelements) + desc="Number of elements along the Z dimension.", + argstr="-znelements %s", + requires=("xnelements", "ynelements"), + xor=_xor_nelements, + ) # step size along each dimension - _xor_step = ('step', 'step_x_y_or_z') + _xor_step = ("step", "step_x_y_or_z") step = traits.Tuple( traits.Int, traits.Int, traits.Int, - desc= - 'Step size along each dimension (X, Y, Z). Default value: (0, 0, 0).', - argstr='-step %s %s %s', - xor=_xor_nelements) + desc="Step size along each dimension (X, Y, Z). Default value: (0, 0, 0).", + argstr="-step %s %s %s", + xor=_xor_nelements, + ) # FIXME Use the requires=...? xstep = traits.Int( - desc='Step size along the X dimension. Default value: 0.', - argstr='-xstep %s', - requires=('ystep', 'zstep'), - xor=_xor_step) + desc="Step size along the X dimension. Default value: 0.", + argstr="-xstep %s", + requires=("ystep", "zstep"), + xor=_xor_step, + ) ystep = traits.Int( - desc='Step size along the Y dimension. Default value: 0.', - argstr='-ystep %s', - requires=('xstep', 'zstep'), - xor=_xor_step) + desc="Step size along the Y dimension. Default value: 0.", + argstr="-ystep %s", + requires=("xstep", "zstep"), + xor=_xor_step, + ) zstep = traits.Int( - desc='Step size along the Z dimension. Default value: 0.', - argstr='-zstep %s', - requires=('xstep', 'ystep'), - xor=_xor_step) + desc="Step size along the Z dimension. Default value: 0.", + argstr="-zstep %s", + requires=("xstep", "ystep"), + xor=_xor_step, + ) # start point along each dimension - _xor_start = ('start', 'start_x_y_or_z') + _xor_start = ("start", "start_x_y_or_z") start = traits.Tuple( traits.Float, traits.Float, traits.Float, - desc=('Start point along each dimension (X, Y, Z).' - 'Default value: 1.79769e+308 1.79769e+308 1.79769e+308.'), - argstr='-start %s %s %s', - xor=_xor_nelements) + desc=( + "Start point along each dimension (X, Y, Z)." + "Default value: 1.79769e+308 1.79769e+308 1.79769e+308." + ), + argstr="-start %s %s %s", + xor=_xor_nelements, + ) # FIXME Use the requires=...? xstart = traits.Float( - desc='Start point along the X dimension. Default value: 1.79769e+308.', - argstr='-xstart %s', - requires=('ystart', 'zstart'), - xor=_xor_start) + desc="Start point along the X dimension. Default value: 1.79769e+308.", + argstr="-xstart %s", + requires=("ystart", "zstart"), + xor=_xor_start, + ) ystart = traits.Float( - desc='Start point along the Y dimension. Default value: 1.79769e+308.', - argstr='-ystart %s', - requires=('xstart', 'zstart'), - xor=_xor_start) + desc="Start point along the Y dimension. Default value: 1.79769e+308.", + argstr="-ystart %s", + requires=("xstart", "zstart"), + xor=_xor_start, + ) zstart = traits.Float( - desc='Start point along the Z dimension. Default value: 1.79769e+308.', - argstr='-zstart %s', - requires=('xstart', 'ystart'), - xor=_xor_start) + desc="Start point along the Z dimension. Default value: 1.79769e+308.", + argstr="-zstart %s", + requires=("xstart", "ystart"), + xor=_xor_start, + ) # dircos along each dimension - _xor_dircos = ('dircos', 'dircos_x_y_or_z') + _xor_dircos = ("dircos", "dircos_x_y_or_z") dircos = traits.Tuple( traits.Float, traits.Float, traits.Float, desc=( - 'Direction cosines along each dimension (X, Y, Z). Default value:' - '1.79769e+308 1.79769e+308 1.79769e+308 1.79769e+308 ...' - ' 1.79769e+308 1.79769e+308 1.79769e+308 1.79769e+308 1.79769e+308.' + "Direction cosines along each dimension (X, Y, Z). Default value:" + "1.79769e+308 1.79769e+308 1.79769e+308 1.79769e+308 ..." + " 1.79769e+308 1.79769e+308 1.79769e+308 1.79769e+308 1.79769e+308." ), - argstr='-dircos %s %s %s', - xor=_xor_nelements) + argstr="-dircos %s %s %s", + xor=_xor_nelements, + ) # FIXME Use the requires=...? xdircos = traits.Float( - desc=('Direction cosines along the X dimension.' - 'Default value: 1.79769e+308 1.79769e+308 1.79769e+308.'), - argstr='-xdircos %s', - requires=('ydircos', 'zdircos'), - xor=_xor_dircos) + desc=( + "Direction cosines along the X dimension." + "Default value: 1.79769e+308 1.79769e+308 1.79769e+308." + ), + argstr="-xdircos %s", + requires=("ydircos", "zdircos"), + xor=_xor_dircos, + ) ydircos = traits.Float( - desc=('Direction cosines along the Y dimension.' - 'Default value: 1.79769e+308 1.79769e+308 1.79769e+308.'), - argstr='-ydircos %s', - requires=('xdircos', 'zdircos'), - xor=_xor_dircos) + desc=( + "Direction cosines along the Y dimension." + "Default value: 1.79769e+308 1.79769e+308 1.79769e+308." + ), + argstr="-ydircos %s", + requires=("xdircos", "zdircos"), + xor=_xor_dircos, + ) zdircos = traits.Float( - desc=('Direction cosines along the Z dimension.' - 'Default value: 1.79769e+308 1.79769e+308 1.79769e+308.'), - argstr='-zdircos %s', - requires=('xdircos', 'ydircos'), - xor=_xor_dircos) + desc=( + "Direction cosines along the Z dimension." + "Default value: 1.79769e+308 1.79769e+308 1.79769e+308." + ), + argstr="-zdircos %s", + requires=("xdircos", "ydircos"), + xor=_xor_dircos, + ) class ResampleOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Resample(StdOutCommandLine): @@ -2540,7 +2743,7 @@ class Resample(StdOutCommandLine): input_spec = ResampleInputSpec output_spec = ResampleOutputSpec - _cmd = 'mincresample' + _cmd = "mincresample" class NormInputSpec(CommandLineInputSpec): @@ -2558,89 +2761,95 @@ class NormInputSpec(CommandLineInputSpec): """ input_file = File( - desc='input file to normalise', + desc="input file to normalise", exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_norm.mnc') + name_template="%s_norm.mnc", + ) output_threshold_mask = File( - desc='File in which to store the threshold mask.', - argstr='-threshold_mask %s', - name_source=['input_file'], + desc="File in which to store the threshold mask.", + argstr="-threshold_mask %s", + name_source=["input_file"], hash_files=False, - name_template='%s_norm_threshold_mask.mnc') + name_template="%s_norm_threshold_mask.mnc", + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) # Normalisation Options mask = File( - desc='Calculate the image normalisation within a mask.', - argstr='-mask %s', - exists=True) + desc="Calculate the image normalisation within a mask.", + argstr="-mask %s", + exists=True, + ) clamp = traits.Bool( - desc='Force the ouput range between limits [default].', - argstr='-clamp', + desc="Force the ouput range between limits [default].", + argstr="-clamp", usedefault=True, - default_value=True) + default_value=True, + ) cutoff = traits.Range( low=0.0, high=100.0, - desc= - 'Cutoff value to use to calculate thresholds by a histogram PcT in %. [default: 0.01]', - argstr='-cutoff %s', + desc="Cutoff value to use to calculate thresholds by a histogram PcT in %. [default: 0.01]", + argstr="-cutoff %s", ) - lower = traits.Float(desc='Lower real value to use.', argstr='-lower %s') - upper = traits.Float(desc='Upper real value to use.', argstr='-upper %s') + lower = traits.Float(desc="Lower real value to use.", argstr="-lower %s") + upper = traits.Float(desc="Upper real value to use.", argstr="-upper %s") out_floor = traits.Float( - desc='Output files maximum [default: 0]', - argstr='-out_floor %s') # FIXME is this a float? + desc="Output files maximum [default: 0]", argstr="-out_floor %s" + ) # FIXME is this a float? out_ceil = traits.Float( - desc='Output files minimum [default: 100]', - argstr='-out_ceil %s') # FIXME is this a float? + desc="Output files minimum [default: 100]", argstr="-out_ceil %s" + ) # FIXME is this a float? # Threshold Options threshold = traits.Bool( - desc= - 'Threshold the image (set values below threshold_perc to -out_floor).', - argstr='-threshold') + desc="Threshold the image (set values below threshold_perc to -out_floor).", + argstr="-threshold", + ) threshold_perc = traits.Range( low=0.0, high=100.0, - desc= - 'Threshold percentage (0.1 == lower 10% of intensity range) [default: 0.1].', - argstr='-threshold_perc %s') + desc="Threshold percentage (0.1 == lower 10% of intensity range) [default: 0.1].", + argstr="-threshold_perc %s", + ) threshold_bmt = traits.Bool( - desc='Use the resulting image BiModalT as the threshold.', - argstr='-threshold_bmt') + desc="Use the resulting image BiModalT as the threshold.", + argstr="-threshold_bmt", + ) threshold_blur = traits.Float( - desc='Blur FWHM for intensity edges then thresholding [default: 2].', - argstr='-threshold_blur %s') + desc="Blur FWHM for intensity edges then thresholding [default: 2].", + argstr="-threshold_blur %s", + ) class NormOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) - output_threshold_mask = File(desc='threshold mask file') + output_file = File(desc="output file", exists=True) + output_threshold_mask = File(desc="threshold mask file") class Norm(CommandLine): @@ -2658,7 +2867,7 @@ class Norm(CommandLine): input_spec = NormInputSpec output_spec = NormOutputSpec - _cmd = 'mincnorm' + _cmd = "mincnorm" """ @@ -2699,50 +2908,53 @@ class VolcentreInputSpec(CommandLineInputSpec): """ input_file = File( - desc='input file to centre', + desc="input file to centre", exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_volcentre.mnc') + name_template="%s_volcentre.mnc", + ) verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='-verbose') + desc="Print out log messages. Default: False.", argstr="-verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) com = traits.Bool( - desc= - 'Use the CoM of the volume for the new centre (via mincstats). Default: False', - argstr='-com') + desc="Use the CoM of the volume for the new centre (via mincstats). Default: False", + argstr="-com", + ) centre = traits.Tuple( traits.Float, traits.Float, traits.Float, - argstr='-centre %s %s %s', - desc='Centre to use (x,y,z) [default: 0 0 0].', + argstr="-centre %s %s %s", + desc="Centre to use (x,y,z) [default: 0 0 0].", ) zero_dircos = traits.Bool( - desc='Set the direction cosines to identity [default].', - argstr='-zero_dircos') + desc="Set the direction cosines to identity [default].", argstr="-zero_dircos" + ) class VolcentreOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Volcentre(CommandLine): @@ -2759,7 +2971,7 @@ class Volcentre(CommandLine): input_spec = VolcentreInputSpec output_spec = VolcentreOutputSpec - _cmd = 'volcentre' + _cmd = "volcentre" class VolpadInputSpec(CommandLineInputSpec): @@ -2787,55 +2999,59 @@ class VolpadInputSpec(CommandLineInputSpec): """ input_file = File( - desc='input file to centre', + desc="input file to centre", exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_volpad.mnc') + name_template="%s_volpad.mnc", + ) verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='-verbose') + desc="Print out log messages. Default: False.", argstr="-verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) auto = traits.Bool( - desc= - 'Automatically determine padding distances (uses -distance as max). Default: False.', - argstr='-auto') + desc="Automatically determine padding distances (uses -distance as max). Default: False.", + argstr="-auto", + ) auto_freq = traits.Float( - desc= - 'Frequency of voxels over bimodalt threshold to stop at [default: 500].', - argstr='-auto_freq %s') + desc="Frequency of voxels over bimodalt threshold to stop at [default: 500].", + argstr="-auto_freq %s", + ) distance = traits.Int( - desc='Padding distance (in voxels) [default: 4].', - argstr='-distance %s') + desc="Padding distance (in voxels) [default: 4].", argstr="-distance %s" + ) smooth = traits.Bool( - desc='Smooth (blur) edges before padding. Default: False.', - argstr='-smooth') + desc="Smooth (blur) edges before padding. Default: False.", argstr="-smooth" + ) smooth_distance = traits.Int( - desc='Smoothing distance (in voxels) [default: 4].', - argstr='-smooth_distance %s') + desc="Smoothing distance (in voxels) [default: 4].", + argstr="-smooth_distance %s", + ) class VolpadOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Volpad(CommandLine): @@ -2852,52 +3068,57 @@ class Volpad(CommandLine): input_spec = VolpadInputSpec output_spec = VolpadOutputSpec - _cmd = 'volpad' + _cmd = "volpad" class VolisoInputSpec(CommandLineInputSpec): input_file = File( - desc='input file to convert to isotropic sampling', + desc="input file to convert to isotropic sampling", exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_voliso.mnc') + name_template="%s_voliso.mnc", + ) verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='--verbose') + desc="Print out log messages. Default: False.", argstr="--verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='--clobber', + desc="Overwrite existing file.", + argstr="--clobber", usedefault=True, - default_value=True) + default_value=True, + ) maxstep = traits.Float( - desc='The target maximum step desired in the output volume.', - argstr='--maxstep %s') + desc="The target maximum step desired in the output volume.", + argstr="--maxstep %s", + ) minstep = traits.Float( - desc='The target minimum step desired in the output volume.', - argstr='--minstep %s') + desc="The target minimum step desired in the output volume.", + argstr="--minstep %s", + ) avgstep = traits.Bool( - desc= - 'Calculate the maximum step from the average steps of the input volume.', - argstr='--avgstep') + desc="Calculate the maximum step from the average steps of the input volume.", + argstr="--avgstep", + ) class VolisoOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Voliso(CommandLine): @@ -2915,42 +3136,43 @@ class Voliso(CommandLine): input_spec = VolisoInputSpec output_spec = VolisoOutputSpec - _cmd = 'voliso' + _cmd = "voliso" class GennlxfmInputSpec(CommandLineInputSpec): output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['like'], + name_source=["like"], hash_files=False, - name_template='%s_gennlxfm.xfm') + name_template="%s_gennlxfm.xfm", + ) verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='-verbose') + desc="Print out log messages. Default: False.", argstr="-verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) ident = traits.Bool( - desc='Generate an identity xfm. Default: False.', argstr='-ident') - step = traits.Int( - desc='Output ident xfm step [default: 1].', argstr='-step %s') + desc="Generate an identity xfm. Default: False.", argstr="-ident" + ) + step = traits.Int(desc="Output ident xfm step [default: 1].", argstr="-step %s") like = File( - desc='Generate a nlxfm like this file.', - exists=True, - argstr='-like %s', + desc="Generate a nlxfm like this file.", exists=True, argstr="-like %s", ) class GennlxfmOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) - output_grid = File(desc='output grid', exists=True) + output_file = File(desc="output file", exists=True) + output_grid = File(desc="output grid", exists=True) class Gennlxfm(CommandLine): @@ -2973,51 +3195,53 @@ class Gennlxfm(CommandLine): input_spec = GennlxfmInputSpec output_spec = GennlxfmOutputSpec - _cmd = 'gennlxfm' + _cmd = "gennlxfm" def _list_outputs(self): outputs = super(Gennlxfm, self)._list_outputs() - outputs['output_grid'] = re.sub('.(nlxfm|xfm)$', '_grid_0.mnc', - outputs['output_file']) + outputs["output_grid"] = re.sub( + ".(nlxfm|xfm)$", "_grid_0.mnc", outputs["output_file"] + ) return outputs class XfmConcatInputSpec(CommandLineInputSpec): input_files = InputMultiPath( File(exists=True), - desc='input file(s)', + desc="input file(s)", mandatory=True, - sep=' ', - argstr='%s', - position=-2) + sep=" ", + argstr="%s", + position=-2, + ) # This is a dummy input. - input_grid_files = InputMultiPath( - File, - desc='input grid file(s)', - ) + input_grid_files = InputMultiPath(File, desc="input grid file(s)",) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_files'], + name_source=["input_files"], hash_files=False, - name_template='%s_xfmconcat.xfm') + name_template="%s_xfmconcat.xfm", + ) verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='-verbose') + desc="Print out log messages. Default: False.", argstr="-verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) class XfmConcatOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) - output_grids = OutputMultiPath(File(exists=True), desc='output grids') + output_file = File(desc="output file", exists=True) + output_grids = OutputMultiPath(File(exists=True), desc="output grids") class XfmConcat(CommandLine): @@ -3036,64 +3260,60 @@ class XfmConcat(CommandLine): input_spec = XfmConcatInputSpec output_spec = XfmConcatOutputSpec - _cmd = 'xfmconcat' + _cmd = "xfmconcat" def _list_outputs(self): outputs = super(XfmConcat, self)._list_outputs() - if os.path.exists(outputs['output_file']): - if 'grid' in open(outputs['output_file'], 'r').read(): - outputs['output_grids'] = glob.glob( - re.sub('.(nlxfm|xfm)$', '_grid_*.mnc', - outputs['output_file'])) + if os.path.exists(outputs["output_file"]): + if "grid" in open(outputs["output_file"], "r").read(): + outputs["output_grids"] = glob.glob( + re.sub(".(nlxfm|xfm)$", "_grid_*.mnc", outputs["output_file"]) + ) return outputs class BestLinRegInputSpec(CommandLineInputSpec): source = File( - desc='source Minc file', - exists=True, - mandatory=True, - argstr='%s', - position=-4, + desc="source Minc file", exists=True, mandatory=True, argstr="%s", position=-4, ) target = File( - desc='target Minc file', - exists=True, - mandatory=True, - argstr='%s', - position=-3, + desc="target Minc file", exists=True, mandatory=True, argstr="%s", position=-3, ) output_xfm = File( - desc='output xfm file', + desc="output xfm file", genfile=True, - argstr='%s', + argstr="%s", position=-2, - name_source=['source'], + name_source=["source"], hash_files=False, - name_template='%s_bestlinreg.xfm', - keep_extension=False) + name_template="%s_bestlinreg.xfm", + keep_extension=False, + ) output_mnc = File( - desc='output mnc file', + desc="output mnc file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['source'], + name_source=["source"], hash_files=False, - name_template='%s_bestlinreg.mnc', - keep_extension=False) + name_template="%s_bestlinreg.mnc", + keep_extension=False, + ) verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='-verbose') + desc="Print out log messages. Default: False.", argstr="-verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) # FIXME Very bare implementation, none of these are done yet: """ @@ -3107,8 +3327,8 @@ class BestLinRegInputSpec(CommandLineInputSpec): class BestLinRegOutputSpec(TraitedSpec): - output_xfm = File(desc='output xfm file', exists=True) - output_mnc = File(desc='output mnc file', exists=True) + output_xfm = File(desc="output xfm file", exists=True) + output_mnc = File(desc="output mnc file", exists=True) class BestLinReg(CommandLine): @@ -3132,69 +3352,58 @@ class BestLinReg(CommandLine): input_spec = BestLinRegInputSpec output_spec = BestLinRegOutputSpec - _cmd = 'bestlinreg' + _cmd = "bestlinreg" class NlpFitInputSpec(CommandLineInputSpec): source = File( - desc='source Minc file', - exists=True, - mandatory=True, - argstr='%s', - position=-3, + desc="source Minc file", exists=True, mandatory=True, argstr="%s", position=-3, ) target = File( - desc='target Minc file', - exists=True, - mandatory=True, - argstr='%s', - position=-2, + desc="target Minc file", exists=True, mandatory=True, argstr="%s", position=-2, ) - output_xfm = File( - desc='output xfm file', - genfile=True, - argstr='%s', - position=-1, - ) + output_xfm = File(desc="output xfm file", genfile=True, argstr="%s", position=-1,) # This is a dummy input. - input_grid_files = InputMultiPath( - File, - desc='input grid file(s)', - ) + input_grid_files = InputMultiPath(File, desc="input grid file(s)",) config_file = File( - desc='File containing the fitting configuration use.', - argstr='-config_file %s', + desc="File containing the fitting configuration use.", + argstr="-config_file %s", mandatory=True, - exists=True) + exists=True, + ) init_xfm = File( - desc='Initial transformation (default identity).', - argstr='-init_xfm %s', + desc="Initial transformation (default identity).", + argstr="-init_xfm %s", mandatory=True, - exists=True) + exists=True, + ) source_mask = File( - desc='Source mask to use during fitting.', - argstr='-source_mask %s', + desc="Source mask to use during fitting.", + argstr="-source_mask %s", mandatory=True, - exists=True) + exists=True, + ) verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='-verbose') + desc="Print out log messages. Default: False.", argstr="-verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) class NlpFitOutputSpec(TraitedSpec): - output_xfm = File(desc='output xfm file', exists=True) - output_grid = File(desc='output grid file', exists=True) + output_xfm = File(desc="output xfm file", exists=True) + output_grid = File(desc="output grid file", exists=True) class NlpFit(CommandLine): @@ -3222,30 +3431,33 @@ class NlpFit(CommandLine): input_spec = NlpFitInputSpec output_spec = NlpFitOutputSpec - _cmd = 'nlpfit' + _cmd = "nlpfit" def _gen_filename(self, name): - if name == 'output_xfm': + if name == "output_xfm": output_xfm = self.inputs.output_xfm if isdefined(output_xfm): return os.path.abspath(output_xfm) else: - return aggregate_filename( - [self.inputs.source, self.inputs.target], - 'nlpfit_xfm_output') + '.xfm' + return ( + aggregate_filename( + [self.inputs.source, self.inputs.target], "nlpfit_xfm_output" + ) + + ".xfm" + ) else: raise NotImplemented def _list_outputs(self): outputs = self.output_spec().get() - outputs['output_xfm'] = os.path.abspath( - self._gen_filename('output_xfm')) + outputs["output_xfm"] = os.path.abspath(self._gen_filename("output_xfm")) - assert os.path.exists(outputs['output_xfm']) - if 'grid' in open(outputs['output_xfm'], 'r').read(): - outputs['output_grid'] = re.sub('.(nlxfm|xfm)$', '_grid_0.mnc', - outputs['output_xfm']) + assert os.path.exists(outputs["output_xfm"]) + if "grid" in open(outputs["output_xfm"], "r").read(): + outputs["output_grid"] = re.sub( + ".(nlxfm|xfm)$", "_grid_0.mnc", outputs["output_xfm"] + ) return outputs @@ -3253,49 +3465,48 @@ def _list_outputs(self): class XfmAvgInputSpec(CommandLineInputSpec): input_files = InputMultiPath( File(exists=True), - desc='input file(s)', + desc="input file(s)", mandatory=True, - sep=' ', - argstr='%s', - position=-2) + sep=" ", + argstr="%s", + position=-2, + ) # This is a dummy input. - input_grid_files = InputMultiPath( - File, - desc='input grid file(s)', - ) + input_grid_files = InputMultiPath(File, desc="input grid file(s)",) - output_file = File( - desc='output file', - genfile=True, - argstr='%s', - position=-1, - ) + output_file = File(desc="output file", genfile=True, argstr="%s", position=-1,) verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='-verbose') + desc="Print out log messages. Default: False.", argstr="-verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) # FIXME xor these: avg_linear = traits.Bool( - desc='average the linear part [default].', argstr='-avg_linear') + desc="average the linear part [default].", argstr="-avg_linear" + ) avg_nonlinear = traits.Bool( - desc='average the non-linear part [default].', argstr='-avg_nonlinear') + desc="average the non-linear part [default].", argstr="-avg_nonlinear" + ) ignore_linear = traits.Bool( - desc='opposite of -avg_linear.', argstr='-ignore_linear') + desc="opposite of -avg_linear.", argstr="-ignore_linear" + ) ignore_nonlinear = traits.Bool( - desc='opposite of -avg_nonlinear.', argstr='-ignore_nonline') + desc="opposite of -avg_nonlinear.", argstr="-ignore_nonline" + ) class XfmAvgOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) - output_grid = File(desc='output grid file', exists=True) + output_file = File(desc="output file", exists=True) + output_grid = File(desc="output grid file", exists=True) class XfmAvg(CommandLine): @@ -3321,62 +3532,59 @@ class XfmAvg(CommandLine): input_spec = XfmAvgInputSpec output_spec = XfmAvgOutputSpec - _cmd = 'xfmavg' + _cmd = "xfmavg" def _gen_filename(self, name): - if name == 'output_file': + if name == "output_file": output_file = self.inputs.output_file if isdefined(output_file): return os.path.abspath(output_file) else: - return aggregate_filename(self.inputs.input_files, - 'xfmavg_output') + '.xfm' + return ( + aggregate_filename(self.inputs.input_files, "xfmavg_output") + + ".xfm" + ) else: raise NotImplemented def _gen_outfilename(self): - return self._gen_filename('output_file') + return self._gen_filename("output_file") def _list_outputs(self): outputs = self.output_spec().get() - outputs['output_file'] = os.path.abspath(self._gen_outfilename()) + outputs["output_file"] = os.path.abspath(self._gen_outfilename()) - assert os.path.exists(outputs['output_file']) - if 'grid' in open(outputs['output_file'], 'r').read(): - outputs['output_grid'] = re.sub('.(nlxfm|xfm)$', '_grid_0.mnc', - outputs['output_file']) + assert os.path.exists(outputs["output_file"]) + if "grid" in open(outputs["output_file"], "r").read(): + outputs["output_grid"] = re.sub( + ".(nlxfm|xfm)$", "_grid_0.mnc", outputs["output_file"] + ) return outputs class XfmInvertInputSpec(CommandLineInputSpec): input_file = File( - desc='input file', - exists=True, - mandatory=True, - argstr='%s', - position=-2) - - output_file = File( - desc='output file', - genfile=True, - argstr='%s', - position=-1, + desc="input file", exists=True, mandatory=True, argstr="%s", position=-2 ) + output_file = File(desc="output file", genfile=True, argstr="%s", position=-1,) + verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='-verbose') + desc="Print out log messages. Default: False.", argstr="-verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) class XfmInvertOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) - output_grid = File(desc='output grid file', exists=True) + output_file = File(desc="output file", exists=True) + output_grid = File(desc="output grid file", exists=True) class XfmInvert(CommandLine): @@ -3395,31 +3603,34 @@ class XfmInvert(CommandLine): input_spec = XfmInvertInputSpec output_spec = XfmInvertOutputSpec - _cmd = 'xfminvert' + _cmd = "xfminvert" def _gen_filename(self, name): - if name == 'output_file': + if name == "output_file": output_file = self.inputs.output_file if isdefined(output_file): return os.path.abspath(output_file) else: - return aggregate_filename([self.inputs.input_file], - 'xfminvert_output') + '.xfm' + return ( + aggregate_filename([self.inputs.input_file], "xfminvert_output") + + ".xfm" + ) else: raise NotImplemented def _gen_outfilename(self): - return self._gen_filename('output_file') + return self._gen_filename("output_file") def _list_outputs(self): outputs = self.output_spec().get() - outputs['output_file'] = os.path.abspath(self._gen_outfilename()) + outputs["output_file"] = os.path.abspath(self._gen_outfilename()) - assert os.path.exists(outputs['output_file']) - if 'grid' in open(outputs['output_file'], 'r').read(): - outputs['output_grid'] = re.sub('.(nlxfm|xfm)$', '_grid_0.mnc', - outputs['output_file']) + assert os.path.exists(outputs["output_file"]) + if "grid" in open(outputs["output_file"], "r").read(): + outputs["output_grid"] = re.sub( + ".(nlxfm|xfm)$", "_grid_0.mnc", outputs["output_file"] + ) return outputs @@ -3427,54 +3638,63 @@ def _list_outputs(self): class BigAverageInputSpec(CommandLineInputSpec): input_files = InputMultiPath( File(exists=True), - desc='input file(s)', + desc="input file(s)", mandatory=True, - sep=' ', - argstr='%s', - position=-2) + sep=" ", + argstr="%s", + position=-2, + ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_files'], + name_source=["input_files"], hash_files=False, - name_template='%s_bigaverage.mnc') + name_template="%s_bigaverage.mnc", + ) verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='--verbose') + desc="Print out log messages. Default: False.", argstr="--verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='--clobber', + desc="Overwrite existing file.", + argstr="--clobber", usedefault=True, - default_value=True) + default_value=True, + ) # FIXME Redumentary implementation, various parameters not implemented. # TODO! output_float = traits.Bool( - desc='Output files with float precision.', argstr='--float') + desc="Output files with float precision.", argstr="--float" + ) robust = traits.Bool( - desc=('Perform robust averaging, features that are outside 1 standard' - 'deviation from the mean are downweighted. Works well for noisy' - 'data with artifacts. see the --tmpdir option if you have a' - 'large number of input files.'), - argstr='-robust') + desc=( + "Perform robust averaging, features that are outside 1 standard" + "deviation from the mean are downweighted. Works well for noisy" + "data with artifacts. see the --tmpdir option if you have a" + "large number of input files." + ), + argstr="-robust", + ) # Should Nipype deal with where the temp directory is? - tmpdir = Directory(desc='temporary files directory', argstr='-tmpdir %s') + tmpdir = Directory(desc="temporary files directory", argstr="-tmpdir %s") sd_file = File( - desc='Place standard deviation image in specified file.', - argstr='--sdfile %s', - name_source=['input_files'], + desc="Place standard deviation image in specified file.", + argstr="--sdfile %s", + name_source=["input_files"], hash_files=False, - name_template='%s_bigaverage_stdev.mnc') + name_template="%s_bigaverage_stdev.mnc", + ) class BigAverageOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) - sd_file = File(desc='standard deviation image', exists=True) + output_file = File(desc="output file", exists=True) + sd_file = File(desc="standard deviation image", exists=True) class BigAverage(CommandLine): @@ -3512,42 +3732,41 @@ class BigAverage(CommandLine): input_spec = BigAverageInputSpec output_spec = BigAverageOutputSpec - _cmd = 'mincbigaverage' + _cmd = "mincbigaverage" class ReshapeInputSpec(CommandLineInputSpec): input_file = File( - desc='input file', - exists=True, - mandatory=True, - argstr='%s', - position=-2) + desc="input file", exists=True, mandatory=True, argstr="%s", position=-2 + ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_reshape.mnc') + name_template="%s_reshape.mnc", + ) verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='-verbose') + desc="Print out log messages. Default: False.", argstr="-verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) # FIXME MANY options not implemented! - write_short = traits.Bool( - desc='Convert to short integer data.', argstr='-short') + write_short = traits.Bool(desc="Convert to short integer data.", argstr="-short") class ReshapeOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) + output_file = File(desc="output file", exists=True) class Reshape(CommandLine): @@ -3570,78 +3789,76 @@ class Reshape(CommandLine): input_spec = ReshapeInputSpec output_spec = ReshapeOutputSpec - _cmd = 'mincreshape' + _cmd = "mincreshape" class VolSymmInputSpec(CommandLineInputSpec): input_file = File( - desc='input file', - exists=True, - mandatory=True, - argstr='%s', - position=-3) + desc="input file", exists=True, mandatory=True, argstr="%s", position=-3 + ) trans_file = File( - desc='output xfm trans file', + desc="output xfm trans file", genfile=True, - argstr='%s', + argstr="%s", position=-2, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_vol_symm.xfm', - keep_extension=False) + name_template="%s_vol_symm.xfm", + keep_extension=False, + ) output_file = File( - desc='output file', + desc="output file", genfile=True, - argstr='%s', + argstr="%s", position=-1, - name_source=['input_file'], + name_source=["input_file"], hash_files=False, - name_template='%s_vol_symm.mnc') + name_template="%s_vol_symm.mnc", + ) # This is a dummy input. - input_grid_files = InputMultiPath( - File, - desc='input grid file(s)', - ) + input_grid_files = InputMultiPath(File, desc="input grid file(s)",) verbose = traits.Bool( - desc='Print out log messages. Default: False.', argstr='-verbose') + desc="Print out log messages. Default: False.", argstr="-verbose" + ) clobber = traits.Bool( - desc='Overwrite existing file.', - argstr='-clobber', + desc="Overwrite existing file.", + argstr="-clobber", usedefault=True, - default_value=True) + default_value=True, + ) # FIXME MANY options not implemented! - fit_linear = traits.Bool(desc='Fit using a linear xfm.', argstr='-linear') - fit_nonlinear = traits.Bool( - desc='Fit using a non-linear xfm.', argstr='-nonlinear') + fit_linear = traits.Bool(desc="Fit using a linear xfm.", argstr="-linear") + fit_nonlinear = traits.Bool(desc="Fit using a non-linear xfm.", argstr="-nonlinear") # FIXME This changes the input/output behaviour of trans_file! Split into # two separate interfaces? nofit = traits.Bool( - desc='Use the input transformation instead of generating one.', - argstr='-nofit') + desc="Use the input transformation instead of generating one.", argstr="-nofit" + ) config_file = File( - desc= - 'File containing the fitting configuration (nlpfit -help for info).', - argstr='-config_file %s', - exists=True) + desc="File containing the fitting configuration (nlpfit -help for info).", + argstr="-config_file %s", + exists=True, + ) - x = traits.Bool(desc='Flip volume in x-plane (default).', argstr='-x') - y = traits.Bool(desc='Flip volume in y-plane.', argstr='-y') - z = traits.Bool(desc='Flip volume in z-plane.', argstr='-z') + x = traits.Bool(desc="Flip volume in x-plane (default).", argstr="-x") + y = traits.Bool(desc="Flip volume in y-plane.", argstr="-y") + z = traits.Bool(desc="Flip volume in z-plane.", argstr="-z") class VolSymmOutputSpec(TraitedSpec): - output_file = File(desc='output file', exists=True) - trans_file = File(desc='xfm trans file', exists=True) + output_file = File(desc="output file", exists=True) + trans_file = File(desc="xfm trans file", exists=True) output_grid = File( - desc='output grid file', exists=True) # FIXME Is exists=True correct? + desc="output grid file", exists=True + ) # FIXME Is exists=True correct? class VolSymm(CommandLine): @@ -3667,15 +3884,16 @@ class VolSymm(CommandLine): input_spec = VolSymmInputSpec output_spec = VolSymmOutputSpec - _cmd = 'volsymm' + _cmd = "volsymm" def _list_outputs(self): outputs = super(VolSymm, self)._list_outputs() # Have to manually check for the grid files. - if os.path.exists(outputs['trans_file']): - if 'grid' in open(outputs['trans_file'], 'r').read(): - outputs['output_grid'] = re.sub('.(nlxfm|xfm)$', '_grid_0.mnc', - outputs['trans_file']) + if os.path.exists(outputs["trans_file"]): + if "grid" in open(outputs["trans_file"], "r").read(): + outputs["output_grid"] = re.sub( + ".(nlxfm|xfm)$", "_grid_0.mnc", outputs["trans_file"] + ) return outputs diff --git a/nipype/interfaces/minc/testdata.py b/nipype/interfaces/minc/testdata.py index f027efa35e..f4e2836d65 100644 --- a/nipype/interfaces/minc/testdata.py +++ b/nipype/interfaces/minc/testdata.py @@ -3,14 +3,11 @@ import os from ...testing import example_data -minc2Dfile = example_data('minc_test_2D_00.mnc') -minc3Dfile = example_data('minc_test_3D_00.mnc') +minc2Dfile = example_data("minc_test_2D_00.mnc") +minc3Dfile = example_data("minc_test_3D_00.mnc") -nlp_config = example_data('minc_nlp.conf') +nlp_config = example_data("minc_nlp.conf") -def nonempty_minc_data(i, shape='2D'): - return example_data('minc_test_%s_%.2d.mnc' % ( - shape, - i, - )) +def nonempty_minc_data(i, shape="2D"): + return example_data("minc_test_%s_%.2d.mnc" % (shape, i,)) diff --git a/nipype/interfaces/minc/tests/test_auto_Average.py b/nipype/interfaces/minc/tests/test_auto_Average.py index dd8b49efe3..c9066611dd 100644 --- a/nipype/interfaces/minc/tests/test_auto_Average.py +++ b/nipype/interfaces/minc/tests/test_auto_Average.py @@ -4,154 +4,195 @@ def test_Average_inputs(): input_map = dict( - args=dict(argstr='%s', ), - avgdim=dict(argstr='-avgdim %s', ), - binarize=dict(argstr='-binarize', ), - binrange=dict(argstr='-binrange %s %s', ), - binvalue=dict(argstr='-binvalue %s', ), + args=dict(argstr="%s",), + avgdim=dict(argstr="-avgdim %s",), + binarize=dict(argstr="-binarize",), + binrange=dict(argstr="-binrange %s %s",), + binvalue=dict(argstr="-binvalue %s",), check_dimensions=dict( - argstr='-check_dimensions', - xor=('check_dimensions', 'no_check_dimensions'), - ), - clobber=dict( - argstr='-clobber', - usedefault=True, - ), - copy_header=dict( - argstr='-copy_header', - xor=('copy_header', 'no_copy_header'), - ), - debug=dict(argstr='-debug', ), - environ=dict( - nohash=True, - usedefault=True, + argstr="-check_dimensions", xor=("check_dimensions", "no_check_dimensions"), ), + clobber=dict(argstr="-clobber", usedefault=True,), + copy_header=dict(argstr="-copy_header", xor=("copy_header", "no_copy_header"),), + debug=dict(argstr="-debug",), + environ=dict(nohash=True, usedefault=True,), filelist=dict( - argstr='-filelist %s', + argstr="-filelist %s", extensions=None, mandatory=True, - xor=('input_files', 'filelist'), + xor=("input_files", "filelist"), ), format_byte=dict( - argstr='-byte', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-byte", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_double=dict( - argstr='-double', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-double", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_filetype=dict( - argstr='-filetype', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-filetype", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_float=dict( - argstr='-float', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-float", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_int=dict( - argstr='-int', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-int", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_long=dict( - argstr='-long', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-long", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_short=dict( - argstr='-short', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-short", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_signed=dict( - argstr='-signed', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-signed", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_unsigned=dict( - argstr='-unsigned', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-unsigned", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), input_files=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, - sep=' ', - xor=('input_files', 'filelist'), + sep=" ", + xor=("input_files", "filelist"), ), max_buffer_size_in_kb=dict( - argstr='-max_buffer_size_in_kb %d', - usedefault=True, + argstr="-max_buffer_size_in_kb %d", usedefault=True, ), no_check_dimensions=dict( - argstr='-nocheck_dimensions', - xor=('check_dimensions', 'no_check_dimensions'), + argstr="-nocheck_dimensions", + xor=("check_dimensions", "no_check_dimensions"), ), no_copy_header=dict( - argstr='-nocopy_header', - xor=('copy_header', 'no_copy_header'), - ), - nonormalize=dict( - argstr='-nonormalize', - xor=('normalize', 'nonormalize'), - ), - normalize=dict( - argstr='-normalize', - xor=('normalize', 'nonormalize'), + argstr="-nocopy_header", xor=("copy_header", "no_copy_header"), ), + nonormalize=dict(argstr="-nonormalize", xor=("normalize", "nonormalize"),), + normalize=dict(argstr="-normalize", xor=("normalize", "nonormalize"),), output_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, - name_source=['input_files'], - name_template='%s_averaged.mnc', + name_source=["input_files"], + name_template="%s_averaged.mnc", position=-1, ), - quiet=dict( - argstr='-quiet', - xor=('verbose', 'quiet'), - ), - sdfile=dict( - argstr='-sdfile %s', - extensions=None, - ), - two=dict(argstr='-2', ), - verbose=dict( - argstr='-verbose', - xor=('verbose', 'quiet'), - ), - voxel_range=dict(argstr='-range %d %d', ), - weights=dict( - argstr='-weights %s', - sep=',', - ), - width_weighted=dict( - argstr='-width_weighted', - requires=('avgdim', ), - ), + quiet=dict(argstr="-quiet", xor=("verbose", "quiet"),), + sdfile=dict(argstr="-sdfile %s", extensions=None,), + two=dict(argstr="-2",), + verbose=dict(argstr="-verbose", xor=("verbose", "quiet"),), + voxel_range=dict(argstr="-range %d %d",), + weights=dict(argstr="-weights %s", sep=",",), + width_weighted=dict(argstr="-width_weighted", requires=("avgdim",),), ) inputs = Average.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Average_outputs(): - output_map = dict(output_file=dict(extensions=None, ), ) + output_map = dict(output_file=dict(extensions=None,),) outputs = Average.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_BBox.py b/nipype/interfaces/minc/tests/test_auto_BBox.py index 9b92660b9a..c1dfbb370a 100644 --- a/nipype/interfaces/minc/tests/test_auto_BBox.py +++ b/nipype/interfaces/minc/tests/test_auto_BBox.py @@ -4,51 +4,34 @@ def test_BBox_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - format_minccrop=dict(argstr='-minccrop', ), - format_mincresample=dict(argstr='-mincresample', ), - format_mincreshape=dict(argstr='-mincreshape', ), - input_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - one_line=dict( - argstr='-one_line', - xor=('one_line', 'two_lines'), - ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + format_minccrop=dict(argstr="-minccrop",), + format_mincresample=dict(argstr="-mincresample",), + format_mincreshape=dict(argstr="-mincreshape",), + input_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + one_line=dict(argstr="-one_line", xor=("one_line", "two_lines"),), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), output_file=dict( extensions=None, hash_files=False, keep_extension=False, - name_source=['input_file'], - name_template='%s_bbox.txt', + name_source=["input_file"], + name_template="%s_bbox.txt", position=-1, ), - threshold=dict(argstr='-threshold', ), - two_lines=dict( - argstr='-two_lines', - xor=('one_line', 'two_lines'), - ), + threshold=dict(argstr="-threshold",), + two_lines=dict(argstr="-two_lines", xor=("one_line", "two_lines"),), ) inputs = BBox.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BBox_outputs(): - output_map = dict(output_file=dict(extensions=None, ), ) + output_map = dict(output_file=dict(extensions=None,),) outputs = BBox.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Beast.py b/nipype/interfaces/minc/tests/test_auto_Beast.py index 62944284a6..5aed48440f 100644 --- a/nipype/interfaces/minc/tests/test_auto_Beast.py +++ b/nipype/interfaces/minc/tests/test_auto_Beast.py @@ -4,85 +4,45 @@ def test_Beast_inputs(): input_map = dict( - abspath=dict( - argstr='-abspath', - usedefault=True, - ), - args=dict(argstr='%s', ), - clobber=dict( - argstr='-clobber', - usedefault=True, - ), - confidence_level_alpha=dict( - argstr='-alpha %s', - usedefault=True, - ), - configuration_file=dict( - argstr='-configuration %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fill_holes=dict(argstr='-fill', ), - flip_images=dict(argstr='-flip', ), - input_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - library_dir=dict( - argstr='%s', - mandatory=True, - position=-3, - ), - load_moments=dict(argstr='-load_moments', ), - median_filter=dict(argstr='-median', ), - nlm_filter=dict(argstr='-nlm_filter', ), - number_selected_images=dict( - argstr='-selection_num %s', - usedefault=True, - ), + abspath=dict(argstr="-abspath", usedefault=True,), + args=dict(argstr="%s",), + clobber=dict(argstr="-clobber", usedefault=True,), + confidence_level_alpha=dict(argstr="-alpha %s", usedefault=True,), + configuration_file=dict(argstr="-configuration %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + fill_holes=dict(argstr="-fill",), + flip_images=dict(argstr="-flip",), + input_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + library_dir=dict(argstr="%s", mandatory=True, position=-3,), + load_moments=dict(argstr="-load_moments",), + median_filter=dict(argstr="-median",), + nlm_filter=dict(argstr="-nlm_filter",), + number_selected_images=dict(argstr="-selection_num %s", usedefault=True,), output_file=dict( - argstr='%s', + argstr="%s", extensions=None, hash_files=False, - name_source=['input_file'], - name_template='%s_beast_mask.mnc', + name_source=["input_file"], + name_template="%s_beast_mask.mnc", position=-1, ), - patch_size=dict( - argstr='-patch_size %s', - usedefault=True, - ), - probability_map=dict(argstr='-probability', ), - same_resolution=dict(argstr='-same_resolution', ), - search_area=dict( - argstr='-search_area %s', - usedefault=True, - ), - smoothness_factor_beta=dict( - argstr='-beta %s', - usedefault=True, - ), - threshold_patch_selection=dict( - argstr='-threshold %s', - usedefault=True, - ), - voxel_size=dict( - argstr='-voxel_size %s', - usedefault=True, - ), + patch_size=dict(argstr="-patch_size %s", usedefault=True,), + probability_map=dict(argstr="-probability",), + same_resolution=dict(argstr="-same_resolution",), + search_area=dict(argstr="-search_area %s", usedefault=True,), + smoothness_factor_beta=dict(argstr="-beta %s", usedefault=True,), + threshold_patch_selection=dict(argstr="-threshold %s", usedefault=True,), + voxel_size=dict(argstr="-voxel_size %s", usedefault=True,), ) inputs = Beast.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Beast_outputs(): - output_map = dict(output_file=dict(extensions=None, ), ) + output_map = dict(output_file=dict(extensions=None,),) outputs = Beast.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_BestLinReg.py b/nipype/interfaces/minc/tests/test_auto_BestLinReg.py index 64aceb6182..3e765b0e52 100644 --- a/nipype/interfaces/minc/tests/test_auto_BestLinReg.py +++ b/nipype/interfaces/minc/tests/test_auto_BestLinReg.py @@ -4,58 +4,43 @@ def test_BestLinReg_inputs(): input_map = dict( - args=dict(argstr='%s', ), - clobber=dict( - argstr='-clobber', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + clobber=dict(argstr="-clobber", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), output_mnc=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, keep_extension=False, - name_source=['source'], - name_template='%s_bestlinreg.mnc', + name_source=["source"], + name_template="%s_bestlinreg.mnc", position=-1, ), output_xfm=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, keep_extension=False, - name_source=['source'], - name_template='%s_bestlinreg.xfm', + name_source=["source"], + name_template="%s_bestlinreg.xfm", position=-2, ), - source=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-4, - ), - target=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), - verbose=dict(argstr='-verbose', ), + source=dict(argstr="%s", extensions=None, mandatory=True, position=-4,), + target=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + verbose=dict(argstr="-verbose",), ) inputs = BestLinReg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BestLinReg_outputs(): output_map = dict( - output_mnc=dict(extensions=None, ), - output_xfm=dict(extensions=None, ), + output_mnc=dict(extensions=None,), output_xfm=dict(extensions=None,), ) outputs = BestLinReg.output_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_BigAverage.py b/nipype/interfaces/minc/tests/test_auto_BigAverage.py index 30c8540598..539ae73488 100644 --- a/nipype/interfaces/minc/tests/test_auto_BigAverage.py +++ b/nipype/interfaces/minc/tests/test_auto_BigAverage.py @@ -4,51 +4,41 @@ def test_BigAverage_inputs(): input_map = dict( - args=dict(argstr='%s', ), - clobber=dict( - argstr='--clobber', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - input_files=dict( - argstr='%s', - mandatory=True, - position=-2, - sep=' ', - ), + args=dict(argstr="%s",), + clobber=dict(argstr="--clobber", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + input_files=dict(argstr="%s", mandatory=True, position=-2, sep=" ",), output_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, - name_source=['input_files'], - name_template='%s_bigaverage.mnc', + name_source=["input_files"], + name_template="%s_bigaverage.mnc", position=-1, ), - output_float=dict(argstr='--float', ), - robust=dict(argstr='-robust', ), + output_float=dict(argstr="--float",), + robust=dict(argstr="-robust",), sd_file=dict( - argstr='--sdfile %s', + argstr="--sdfile %s", extensions=None, hash_files=False, - name_source=['input_files'], - name_template='%s_bigaverage_stdev.mnc', + name_source=["input_files"], + name_template="%s_bigaverage_stdev.mnc", ), - tmpdir=dict(argstr='-tmpdir %s', ), - verbose=dict(argstr='--verbose', ), + tmpdir=dict(argstr="-tmpdir %s",), + verbose=dict(argstr="--verbose",), ) inputs = BigAverage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BigAverage_outputs(): output_map = dict( - output_file=dict(extensions=None, ), - sd_file=dict(extensions=None, ), + output_file=dict(extensions=None,), sd_file=dict(extensions=None,), ) outputs = BigAverage.output_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_Blob.py b/nipype/interfaces/minc/tests/test_auto_Blob.py index 2cce7f294d..f51c3693f6 100644 --- a/nipype/interfaces/minc/tests/test_auto_Blob.py +++ b/nipype/interfaces/minc/tests/test_auto_Blob.py @@ -4,38 +4,32 @@ def test_Blob_inputs(): input_map = dict( - args=dict(argstr='%s', ), - determinant=dict(argstr='-determinant', ), - environ=dict( - nohash=True, - usedefault=True, - ), - input_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - magnitude=dict(argstr='-magnitude', ), + args=dict(argstr="%s",), + determinant=dict(argstr="-determinant",), + environ=dict(nohash=True, usedefault=True,), + input_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + magnitude=dict(argstr="-magnitude",), output_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, - name_source=['input_file'], - name_template='%s_blob.mnc', + name_source=["input_file"], + name_template="%s_blob.mnc", position=-1, ), - trace=dict(argstr='-trace', ), - translation=dict(argstr='-translation', ), + trace=dict(argstr="-trace",), + translation=dict(argstr="-translation",), ) inputs = Blob.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Blob_outputs(): - output_map = dict(output_file=dict(extensions=None, ), ) + output_map = dict(output_file=dict(extensions=None,),) outputs = Blob.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Blur.py b/nipype/interfaces/minc/tests/test_auto_Blur.py index 267863d80c..b37942f768 100644 --- a/nipype/interfaces/minc/tests/test_auto_Blur.py +++ b/nipype/interfaces/minc/tests/test_auto_Blur.py @@ -4,52 +4,29 @@ def test_Blur_inputs(): input_map = dict( - args=dict(argstr='%s', ), - clobber=dict( - argstr='-clobber', - usedefault=True, - ), - dimensions=dict(argstr='-dimensions %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + clobber=dict(argstr="-clobber", usedefault=True,), + dimensions=dict(argstr="-dimensions %s",), + environ=dict(nohash=True, usedefault=True,), fwhm=dict( - argstr='-fwhm %s', - mandatory=True, - xor=('fwhm', 'fwhm3d', 'standard_dev'), + argstr="-fwhm %s", mandatory=True, xor=("fwhm", "fwhm3d", "standard_dev"), ), fwhm3d=dict( - argstr='-3dfwhm %s %s %s', - mandatory=True, - xor=('fwhm', 'fwhm3d', 'standard_dev'), - ), - gaussian=dict( - argstr='-gaussian', - xor=('gaussian', 'rect'), - ), - gradient=dict(argstr='-gradient', ), - input_file=dict( - argstr='%s', - extensions=None, + argstr="-3dfwhm %s %s %s", mandatory=True, - position=-2, - ), - no_apodize=dict(argstr='-no_apodize', ), - output_file_base=dict( - argstr='%s', - extensions=None, - position=-1, - ), - partial=dict(argstr='-partial', ), - rect=dict( - argstr='-rect', - xor=('gaussian', 'rect'), - ), + xor=("fwhm", "fwhm3d", "standard_dev"), + ), + gaussian=dict(argstr="-gaussian", xor=("gaussian", "rect"),), + gradient=dict(argstr="-gradient",), + input_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + no_apodize=dict(argstr="-no_apodize",), + output_file_base=dict(argstr="%s", extensions=None, position=-1,), + partial=dict(argstr="-partial",), + rect=dict(argstr="-rect", xor=("gaussian", "rect"),), standard_dev=dict( - argstr='-standarddev %s', + argstr="-standarddev %s", mandatory=True, - xor=('fwhm', 'fwhm3d', 'standard_dev'), + xor=("fwhm", "fwhm3d", "standard_dev"), ), ) inputs = Blur.input_spec() @@ -57,14 +34,16 @@ def test_Blur_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Blur_outputs(): output_map = dict( - gradient_dxyz=dict(extensions=None, ), - output_file=dict(extensions=None, ), - partial_dx=dict(extensions=None, ), - partial_dxyz=dict(extensions=None, ), - partial_dy=dict(extensions=None, ), - partial_dz=dict(extensions=None, ), + gradient_dxyz=dict(extensions=None,), + output_file=dict(extensions=None,), + partial_dx=dict(extensions=None,), + partial_dxyz=dict(extensions=None,), + partial_dy=dict(extensions=None,), + partial_dz=dict(extensions=None,), ) outputs = Blur.output_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_Calc.py b/nipype/interfaces/minc/tests/test_auto_Calc.py index 6d077c5a52..670278dfa9 100644 --- a/nipype/interfaces/minc/tests/test_auto_Calc.py +++ b/nipype/interfaces/minc/tests/test_auto_Calc.py @@ -4,153 +4,201 @@ def test_Calc_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), check_dimensions=dict( - argstr='-check_dimensions', - xor=('check_dimensions', 'no_check_dimensions'), + argstr="-check_dimensions", xor=("check_dimensions", "no_check_dimensions"), ), - clobber=dict( - argstr='-clobber', - usedefault=True, - ), - copy_header=dict( - argstr='-copy_header', - xor=('copy_header', 'no_copy_header'), - ), - debug=dict(argstr='-debug', ), - environ=dict( - nohash=True, - usedefault=True, - ), - eval_width=dict(argstr='-eval_width %s', ), + clobber=dict(argstr="-clobber", usedefault=True,), + copy_header=dict(argstr="-copy_header", xor=("copy_header", "no_copy_header"),), + debug=dict(argstr="-debug",), + environ=dict(nohash=True, usedefault=True,), + eval_width=dict(argstr="-eval_width %s",), expfile=dict( - argstr='-expfile %s', + argstr="-expfile %s", extensions=None, mandatory=True, - xor=('expression', 'expfile'), + xor=("expression", "expfile"), ), expression=dict( - argstr="-expression '%s'", - mandatory=True, - xor=('expression', 'expfile'), + argstr="-expression '%s'", mandatory=True, xor=("expression", "expfile"), ), filelist=dict( - argstr='-filelist %s', + argstr="-filelist %s", extensions=None, mandatory=True, - xor=('input_files', 'filelist'), + xor=("input_files", "filelist"), ), format_byte=dict( - argstr='-byte', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-byte", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_double=dict( - argstr='-double', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-double", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_filetype=dict( - argstr='-filetype', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-filetype", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_float=dict( - argstr='-float', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-float", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_int=dict( - argstr='-int', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-int", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_long=dict( - argstr='-long', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-long", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_short=dict( - argstr='-short', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-short", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_signed=dict( - argstr='-signed', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-signed", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_unsigned=dict( - argstr='-unsigned', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), - ), - ignore_nan=dict(argstr='-ignore_nan', ), - input_files=dict( - argstr='%s', - mandatory=True, - position=-2, - sep=' ', - ), - max_buffer_size_in_kb=dict(argstr='-max_buffer_size_in_kb %d', ), + argstr="-unsigned", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), + ), + ignore_nan=dict(argstr="-ignore_nan",), + input_files=dict(argstr="%s", mandatory=True, position=-2, sep=" ",), + max_buffer_size_in_kb=dict(argstr="-max_buffer_size_in_kb %d",), no_check_dimensions=dict( - argstr='-nocheck_dimensions', - xor=('check_dimensions', 'no_check_dimensions'), + argstr="-nocheck_dimensions", + xor=("check_dimensions", "no_check_dimensions"), ), no_copy_header=dict( - argstr='-nocopy_header', - xor=('copy_header', 'no_copy_header'), + argstr="-nocopy_header", xor=("copy_header", "no_copy_header"), ), outfiles=dict(), output_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, - name_source=['input_files'], - name_template='%s_calc.mnc', + name_source=["input_files"], + name_template="%s_calc.mnc", position=-1, ), output_illegal=dict( - argstr='-illegal_value', - xor=('output_nan', 'output_zero', 'output_illegal_value'), + argstr="-illegal_value", + xor=("output_nan", "output_zero", "output_illegal_value"), ), output_nan=dict( - argstr='-nan', - xor=('output_nan', 'output_zero', 'output_illegal_value'), + argstr="-nan", xor=("output_nan", "output_zero", "output_illegal_value"), ), output_zero=dict( - argstr='-zero', - xor=('output_nan', 'output_zero', 'output_illegal_value'), - ), - propagate_nan=dict(argstr='-propagate_nan', ), - quiet=dict( - argstr='-quiet', - xor=('verbose', 'quiet'), - ), - two=dict(argstr='-2', ), - verbose=dict( - argstr='-verbose', - xor=('verbose', 'quiet'), + argstr="-zero", xor=("output_nan", "output_zero", "output_illegal_value"), ), - voxel_range=dict(argstr='-range %d %d', ), + propagate_nan=dict(argstr="-propagate_nan",), + quiet=dict(argstr="-quiet", xor=("verbose", "quiet"),), + two=dict(argstr="-2",), + verbose=dict(argstr="-verbose", xor=("verbose", "quiet"),), + voxel_range=dict(argstr="-range %d %d",), ) inputs = Calc.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Calc_outputs(): - output_map = dict(output_file=dict(extensions=None, ), ) + output_map = dict(output_file=dict(extensions=None,),) outputs = Calc.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Convert.py b/nipype/interfaces/minc/tests/test_auto_Convert.py index ba312ea2d3..695d371b47 100644 --- a/nipype/interfaces/minc/tests/test_auto_Convert.py +++ b/nipype/interfaces/minc/tests/test_auto_Convert.py @@ -4,42 +4,33 @@ def test_Convert_inputs(): input_map = dict( - args=dict(argstr='%s', ), - chunk=dict(argstr='-chunk %d', ), - clobber=dict( - argstr='-clobber', - usedefault=True, - ), - compression=dict(argstr='-compress %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - input_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), + args=dict(argstr="%s",), + chunk=dict(argstr="-chunk %d",), + clobber=dict(argstr="-clobber", usedefault=True,), + compression=dict(argstr="-compress %s",), + environ=dict(nohash=True, usedefault=True,), + input_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), output_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, - name_source=['input_file'], - name_template='%s_convert_output.mnc', + name_source=["input_file"], + name_template="%s_convert_output.mnc", position=-1, ), - template=dict(argstr='-template', ), - two=dict(argstr='-2', ), + template=dict(argstr="-template",), + two=dict(argstr="-2",), ) inputs = Convert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Convert_outputs(): - output_map = dict(output_file=dict(extensions=None, ), ) + output_map = dict(output_file=dict(extensions=None,),) outputs = Convert.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Copy.py b/nipype/interfaces/minc/tests/test_auto_Copy.py index 7bb0605c39..91736a67b3 100644 --- a/nipype/interfaces/minc/tests/test_auto_Copy.py +++ b/nipype/interfaces/minc/tests/test_auto_Copy.py @@ -4,42 +4,30 @@ def test_Copy_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - input_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + input_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), output_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, - name_source=['input_file'], - name_template='%s_copy.mnc', + name_source=["input_file"], + name_template="%s_copy.mnc", position=-1, ), - pixel_values=dict( - argstr='-pixel_values', - xor=('pixel_values', 'real_values'), - ), - real_values=dict( - argstr='-real_values', - xor=('pixel_values', 'real_values'), - ), + pixel_values=dict(argstr="-pixel_values", xor=("pixel_values", "real_values"),), + real_values=dict(argstr="-real_values", xor=("pixel_values", "real_values"),), ) inputs = Copy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Copy_outputs(): - output_map = dict(output_file=dict(extensions=None, ), ) + output_map = dict(output_file=dict(extensions=None,),) outputs = Copy.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Dump.py b/nipype/interfaces/minc/tests/test_auto_Dump.py index 4713f0aec3..eb1fe2c6a7 100644 --- a/nipype/interfaces/minc/tests/test_auto_Dump.py +++ b/nipype/interfaces/minc/tests/test_auto_Dump.py @@ -5,61 +5,39 @@ def test_Dump_inputs(): input_map = dict( annotations_brief=dict( - argstr='-b %s', - xor=('annotations_brief', 'annotations_full'), + argstr="-b %s", xor=("annotations_brief", "annotations_full"), ), annotations_full=dict( - argstr='-f %s', - xor=('annotations_brief', 'annotations_full'), - ), - args=dict(argstr='%s', ), - coordinate_data=dict( - argstr='-c', - xor=('coordinate_data', 'header_data'), - ), - environ=dict( - nohash=True, - usedefault=True, - ), - header_data=dict( - argstr='-h', - xor=('coordinate_data', 'header_data'), - ), - input_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - line_length=dict(argstr='-l %d', ), - netcdf_name=dict(argstr='-n %s', ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), + argstr="-f %s", xor=("annotations_brief", "annotations_full"), + ), + args=dict(argstr="%s",), + coordinate_data=dict(argstr="-c", xor=("coordinate_data", "header_data"),), + environ=dict(nohash=True, usedefault=True,), + header_data=dict(argstr="-h", xor=("coordinate_data", "header_data"),), + input_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + line_length=dict(argstr="-l %d",), + netcdf_name=dict(argstr="-n %s",), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), output_file=dict( extensions=None, hash_files=False, keep_extension=False, - name_source=['input_file'], - name_template='%s_dump.txt', + name_source=["input_file"], + name_template="%s_dump.txt", position=-1, ), - precision=dict(argstr='%s', ), - variables=dict( - argstr='-v %s', - sep=',', - ), + precision=dict(argstr="%s",), + variables=dict(argstr="-v %s", sep=",",), ) inputs = Dump.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Dump_outputs(): - output_map = dict(output_file=dict(extensions=None, ), ) + output_map = dict(output_file=dict(extensions=None,),) outputs = Dump.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Extract.py b/nipype/interfaces/minc/tests/test_auto_Extract.py index a3dfb069e5..0c05d4ab5f 100644 --- a/nipype/interfaces/minc/tests/test_auto_Extract.py +++ b/nipype/interfaces/minc/tests/test_auto_Extract.py @@ -4,151 +4,194 @@ def test_Extract_inputs(): input_map = dict( - args=dict(argstr='%s', ), - count=dict( - argstr='-count %s', - sep=',', - ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + count=dict(argstr="-count %s", sep=",",), + environ=dict(nohash=True, usedefault=True,), flip_any_direction=dict( - argstr='-any_direction', - xor=('flip_positive_direction', 'flip_negative_direction', - 'flip_any_direction'), + argstr="-any_direction", + xor=( + "flip_positive_direction", + "flip_negative_direction", + "flip_any_direction", + ), ), flip_negative_direction=dict( - argstr='-negative_direction', - xor=('flip_positive_direction', 'flip_negative_direction', - 'flip_any_direction'), + argstr="-negative_direction", + xor=( + "flip_positive_direction", + "flip_negative_direction", + "flip_any_direction", + ), ), flip_positive_direction=dict( - argstr='-positive_direction', - xor=('flip_positive_direction', 'flip_negative_direction', - 'flip_any_direction'), + argstr="-positive_direction", + xor=( + "flip_positive_direction", + "flip_negative_direction", + "flip_any_direction", + ), ), flip_x_any=dict( - argstr='-xanydirection', - xor=('flip_x_positive', 'flip_x_negative', 'flip_x_any'), + argstr="-xanydirection", + xor=("flip_x_positive", "flip_x_negative", "flip_x_any"), ), flip_x_negative=dict( - argstr='-xdirection', - xor=('flip_x_positive', 'flip_x_negative', 'flip_x_any'), + argstr="-xdirection", + xor=("flip_x_positive", "flip_x_negative", "flip_x_any"), ), flip_x_positive=dict( - argstr='+xdirection', - xor=('flip_x_positive', 'flip_x_negative', 'flip_x_any'), + argstr="+xdirection", + xor=("flip_x_positive", "flip_x_negative", "flip_x_any"), ), flip_y_any=dict( - argstr='-yanydirection', - xor=('flip_y_positive', 'flip_y_negative', 'flip_y_any'), + argstr="-yanydirection", + xor=("flip_y_positive", "flip_y_negative", "flip_y_any"), ), flip_y_negative=dict( - argstr='-ydirection', - xor=('flip_y_positive', 'flip_y_negative', 'flip_y_any'), + argstr="-ydirection", + xor=("flip_y_positive", "flip_y_negative", "flip_y_any"), ), flip_y_positive=dict( - argstr='+ydirection', - xor=('flip_y_positive', 'flip_y_negative', 'flip_y_any'), + argstr="+ydirection", + xor=("flip_y_positive", "flip_y_negative", "flip_y_any"), ), flip_z_any=dict( - argstr='-zanydirection', - xor=('flip_z_positive', 'flip_z_negative', 'flip_z_any'), + argstr="-zanydirection", + xor=("flip_z_positive", "flip_z_negative", "flip_z_any"), ), flip_z_negative=dict( - argstr='-zdirection', - xor=('flip_z_positive', 'flip_z_negative', 'flip_z_any'), + argstr="-zdirection", + xor=("flip_z_positive", "flip_z_negative", "flip_z_any"), ), flip_z_positive=dict( - argstr='+zdirection', - xor=('flip_z_positive', 'flip_z_negative', 'flip_z_any'), - ), - image_maximum=dict(argstr='-image_maximum %s', ), - image_minimum=dict(argstr='-image_minimum %s', ), - image_range=dict(argstr='-image_range %s %s', ), - input_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - nonormalize=dict( - argstr='-nonormalize', - xor=('normalize', 'nonormalize'), - ), - normalize=dict( - argstr='-normalize', - xor=('normalize', 'nonormalize'), - ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), + argstr="+zdirection", + xor=("flip_z_positive", "flip_z_negative", "flip_z_any"), + ), + image_maximum=dict(argstr="-image_maximum %s",), + image_minimum=dict(argstr="-image_minimum %s",), + image_range=dict(argstr="-image_range %s %s",), + input_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + nonormalize=dict(argstr="-nonormalize", xor=("normalize", "nonormalize"),), + normalize=dict(argstr="-normalize", xor=("normalize", "nonormalize"),), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), output_file=dict( extensions=None, hash_files=False, keep_extension=False, - name_source=['input_file'], - name_template='%s.raw', + name_source=["input_file"], + name_template="%s.raw", position=-1, ), - start=dict( - argstr='-start %s', - sep=',', - ), + start=dict(argstr="-start %s", sep=",",), write_ascii=dict( - argstr='-ascii', - xor=('write_ascii', 'write_ascii', 'write_byte', 'write_short', - 'write_int', 'write_long', 'write_float', 'write_double', - 'write_signed', 'write_unsigned'), + argstr="-ascii", + xor=( + "write_ascii", + "write_ascii", + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + "write_signed", + "write_unsigned", + ), ), write_byte=dict( - argstr='-byte', - xor=('write_ascii', 'write_ascii', 'write_byte', 'write_short', - 'write_int', 'write_long', 'write_float', 'write_double', - 'write_signed', 'write_unsigned'), + argstr="-byte", + xor=( + "write_ascii", + "write_ascii", + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + "write_signed", + "write_unsigned", + ), ), write_double=dict( - argstr='-double', - xor=('write_ascii', 'write_ascii', 'write_byte', 'write_short', - 'write_int', 'write_long', 'write_float', 'write_double', - 'write_signed', 'write_unsigned'), + argstr="-double", + xor=( + "write_ascii", + "write_ascii", + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + "write_signed", + "write_unsigned", + ), ), write_float=dict( - argstr='-float', - xor=('write_ascii', 'write_ascii', 'write_byte', 'write_short', - 'write_int', 'write_long', 'write_float', 'write_double', - 'write_signed', 'write_unsigned'), + argstr="-float", + xor=( + "write_ascii", + "write_ascii", + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + "write_signed", + "write_unsigned", + ), ), write_int=dict( - argstr='-int', - xor=('write_ascii', 'write_ascii', 'write_byte', 'write_short', - 'write_int', 'write_long', 'write_float', 'write_double', - 'write_signed', 'write_unsigned'), + argstr="-int", + xor=( + "write_ascii", + "write_ascii", + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + "write_signed", + "write_unsigned", + ), ), write_long=dict( - argstr='-long', - xor=('write_ascii', 'write_ascii', 'write_byte', 'write_short', - 'write_int', 'write_long', 'write_float', 'write_double', - 'write_signed', 'write_unsigned'), - ), - write_range=dict(argstr='-range %s %s', ), + argstr="-long", + xor=( + "write_ascii", + "write_ascii", + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + "write_signed", + "write_unsigned", + ), + ), + write_range=dict(argstr="-range %s %s",), write_short=dict( - argstr='-short', - xor=('write_ascii', 'write_ascii', 'write_byte', 'write_short', - 'write_int', 'write_long', 'write_float', 'write_double', - 'write_signed', 'write_unsigned'), - ), - write_signed=dict( - argstr='-signed', - xor=('write_signed', 'write_unsigned'), - ), + argstr="-short", + xor=( + "write_ascii", + "write_ascii", + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + "write_signed", + "write_unsigned", + ), + ), + write_signed=dict(argstr="-signed", xor=("write_signed", "write_unsigned"),), write_unsigned=dict( - argstr='-unsigned', - xor=('write_signed', 'write_unsigned'), + argstr="-unsigned", xor=("write_signed", "write_unsigned"), ), ) inputs = Extract.input_spec() @@ -156,8 +199,10 @@ def test_Extract_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Extract_outputs(): - output_map = dict(output_file=dict(extensions=None, ), ) + output_map = dict(output_file=dict(extensions=None,),) outputs = Extract.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Gennlxfm.py b/nipype/interfaces/minc/tests/test_auto_Gennlxfm.py index 58383c799e..deb6449d3d 100644 --- a/nipype/interfaces/minc/tests/test_auto_Gennlxfm.py +++ b/nipype/interfaces/minc/tests/test_auto_Gennlxfm.py @@ -4,41 +4,33 @@ def test_Gennlxfm_inputs(): input_map = dict( - args=dict(argstr='%s', ), - clobber=dict( - argstr='-clobber', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - ident=dict(argstr='-ident', ), - like=dict( - argstr='-like %s', - extensions=None, - ), + args=dict(argstr="%s",), + clobber=dict(argstr="-clobber", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + ident=dict(argstr="-ident",), + like=dict(argstr="-like %s", extensions=None,), output_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, - name_source=['like'], - name_template='%s_gennlxfm.xfm', + name_source=["like"], + name_template="%s_gennlxfm.xfm", position=-1, ), - step=dict(argstr='-step %s', ), - verbose=dict(argstr='-verbose', ), + step=dict(argstr="-step %s",), + verbose=dict(argstr="-verbose",), ) inputs = Gennlxfm.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Gennlxfm_outputs(): output_map = dict( - output_file=dict(extensions=None, ), - output_grid=dict(extensions=None, ), + output_file=dict(extensions=None,), output_grid=dict(extensions=None,), ) outputs = Gennlxfm.output_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_Math.py b/nipype/interfaces/minc/tests/test_auto_Math.py index 4758c9d897..32a5f68d66 100644 --- a/nipype/interfaces/minc/tests/test_auto_Math.py +++ b/nipype/interfaces/minc/tests/test_auto_Math.py @@ -4,165 +4,225 @@ def test_Math_inputs(): input_map = dict( - abs=dict(argstr='-abs', ), - args=dict(argstr='%s', ), - calc_add=dict(argstr='-add', ), - calc_and=dict(argstr='-and', ), - calc_div=dict(argstr='-div', ), - calc_mul=dict(argstr='-mult', ), - calc_not=dict(argstr='-not', ), - calc_or=dict(argstr='-or', ), - calc_sub=dict(argstr='-sub', ), + abs=dict(argstr="-abs",), + args=dict(argstr="%s",), + calc_add=dict(argstr="-add",), + calc_and=dict(argstr="-and",), + calc_div=dict(argstr="-div",), + calc_mul=dict(argstr="-mult",), + calc_not=dict(argstr="-not",), + calc_or=dict(argstr="-or",), + calc_sub=dict(argstr="-sub",), check_dimensions=dict( - argstr='-check_dimensions', - xor=('check_dimensions', 'no_check_dimensions'), - ), - clamp=dict(argstr='-clamp -const2 %s %s', ), - clobber=dict( - argstr='-clobber', - usedefault=True, - ), - copy_header=dict( - argstr='-copy_header', - xor=('copy_header', 'no_copy_header'), - ), - count_valid=dict(argstr='-count_valid', ), - dimension=dict(argstr='-dimension %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - exp=dict(argstr='-exp -const2 %s %s', ), + argstr="-check_dimensions", xor=("check_dimensions", "no_check_dimensions"), + ), + clamp=dict(argstr="-clamp -const2 %s %s",), + clobber=dict(argstr="-clobber", usedefault=True,), + copy_header=dict(argstr="-copy_header", xor=("copy_header", "no_copy_header"),), + count_valid=dict(argstr="-count_valid",), + dimension=dict(argstr="-dimension %s",), + environ=dict(nohash=True, usedefault=True,), + exp=dict(argstr="-exp -const2 %s %s",), filelist=dict( - argstr='-filelist %s', + argstr="-filelist %s", extensions=None, mandatory=True, - xor=('input_files', 'filelist'), + xor=("input_files", "filelist"), ), format_byte=dict( - argstr='-byte', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-byte", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_double=dict( - argstr='-double', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-double", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_filetype=dict( - argstr='-filetype', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-filetype", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_float=dict( - argstr='-float', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-float", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_int=dict( - argstr='-int', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-int", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_long=dict( - argstr='-long', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-long", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_short=dict( - argstr='-short', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-short", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_signed=dict( - argstr='-signed', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), + argstr="-signed", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_unsigned=dict( - argstr='-unsigned', - xor=('format_filetype', 'format_byte', 'format_short', - 'format_int', 'format_long', 'format_float', 'format_double', - 'format_signed', 'format_unsigned'), - ), - ignore_nan=dict(argstr='-ignore_nan', ), + argstr="-unsigned", + xor=( + "format_filetype", + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), + ), + ignore_nan=dict(argstr="-ignore_nan",), input_files=dict( - argstr='%s', + argstr="%s", mandatory=True, position=-2, - sep=' ', - xor=('input_files', 'filelist'), + sep=" ", + xor=("input_files", "filelist"), ), - invert=dict(argstr='-invert -const %s', ), - isnan=dict(argstr='-isnan', ), - log=dict(argstr='-log -const2 %s %s', ), + invert=dict(argstr="-invert -const %s",), + isnan=dict(argstr="-isnan",), + log=dict(argstr="-log -const2 %s %s",), max_buffer_size_in_kb=dict( - argstr='-max_buffer_size_in_kb %d', - usedefault=True, + argstr="-max_buffer_size_in_kb %d", usedefault=True, ), - maximum=dict(argstr='-maximum', ), - minimum=dict(argstr='-minimum', ), - nisnan=dict(argstr='-nisnan', ), + maximum=dict(argstr="-maximum",), + minimum=dict(argstr="-minimum",), + nisnan=dict(argstr="-nisnan",), no_check_dimensions=dict( - argstr='-nocheck_dimensions', - xor=('check_dimensions', 'no_check_dimensions'), + argstr="-nocheck_dimensions", + xor=("check_dimensions", "no_check_dimensions"), ), no_copy_header=dict( - argstr='-nocopy_header', - xor=('copy_header', 'no_copy_header'), + argstr="-nocopy_header", xor=("copy_header", "no_copy_header"), ), - nsegment=dict(argstr='-nsegment -const2 %s %s', ), + nsegment=dict(argstr="-nsegment -const2 %s %s",), output_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, - name_source=['input_files'], - name_template='%s_mincmath.mnc', + name_source=["input_files"], + name_template="%s_mincmath.mnc", position=-1, ), output_illegal=dict( - argstr='-illegal_value', - xor=('output_nan', 'output_zero', 'output_illegal_value'), + argstr="-illegal_value", + xor=("output_nan", "output_zero", "output_illegal_value"), ), output_nan=dict( - argstr='-nan', - xor=('output_nan', 'output_zero', 'output_illegal_value'), + argstr="-nan", xor=("output_nan", "output_zero", "output_illegal_value"), ), output_zero=dict( - argstr='-zero', - xor=('output_nan', 'output_zero', 'output_illegal_value'), - ), - percentdiff=dict(argstr='-percentdiff', ), - propagate_nan=dict(argstr='-propagate_nan', ), - scale=dict(argstr='-scale -const2 %s %s', ), - segment=dict(argstr='-segment -const2 %s %s', ), - sqrt=dict(argstr='-sqrt', ), - square=dict(argstr='-square', ), - test_eq=dict(argstr='-eq', ), - test_ge=dict(argstr='-ge', ), - test_gt=dict(argstr='-gt', ), - test_le=dict(argstr='-le', ), - test_lt=dict(argstr='-lt', ), - test_ne=dict(argstr='-ne', ), - two=dict(argstr='-2', ), - voxel_range=dict(argstr='-range %d %d', ), + argstr="-zero", xor=("output_nan", "output_zero", "output_illegal_value"), + ), + percentdiff=dict(argstr="-percentdiff",), + propagate_nan=dict(argstr="-propagate_nan",), + scale=dict(argstr="-scale -const2 %s %s",), + segment=dict(argstr="-segment -const2 %s %s",), + sqrt=dict(argstr="-sqrt",), + square=dict(argstr="-square",), + test_eq=dict(argstr="-eq",), + test_ge=dict(argstr="-ge",), + test_gt=dict(argstr="-gt",), + test_le=dict(argstr="-le",), + test_lt=dict(argstr="-lt",), + test_ne=dict(argstr="-ne",), + two=dict(argstr="-2",), + voxel_range=dict(argstr="-range %d %d",), ) inputs = Math.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Math_outputs(): - output_map = dict(output_file=dict(extensions=None, ), ) + output_map = dict(output_file=dict(extensions=None,),) outputs = Math.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_NlpFit.py b/nipype/interfaces/minc/tests/test_auto_NlpFit.py index c0092ab704..58e9e985db 100644 --- a/nipype/interfaces/minc/tests/test_auto_NlpFit.py +++ b/nipype/interfaces/minc/tests/test_auto_NlpFit.py @@ -4,60 +4,28 @@ def test_NlpFit_inputs(): input_map = dict( - args=dict(argstr='%s', ), - clobber=dict( - argstr='-clobber', - usedefault=True, - ), - config_file=dict( - argstr='-config_file %s', - extensions=None, - mandatory=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - init_xfm=dict( - argstr='-init_xfm %s', - extensions=None, - mandatory=True, - ), + args=dict(argstr="%s",), + clobber=dict(argstr="-clobber", usedefault=True,), + config_file=dict(argstr="-config_file %s", extensions=None, mandatory=True,), + environ=dict(nohash=True, usedefault=True,), + init_xfm=dict(argstr="-init_xfm %s", extensions=None, mandatory=True,), input_grid_files=dict(), - output_xfm=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, - ), - source=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), - source_mask=dict( - argstr='-source_mask %s', - extensions=None, - mandatory=True, - ), - target=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - verbose=dict(argstr='-verbose', ), + output_xfm=dict(argstr="%s", extensions=None, genfile=True, position=-1,), + source=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + source_mask=dict(argstr="-source_mask %s", extensions=None, mandatory=True,), + target=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + verbose=dict(argstr="-verbose",), ) inputs = NlpFit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NlpFit_outputs(): output_map = dict( - output_grid=dict(extensions=None, ), - output_xfm=dict(extensions=None, ), + output_grid=dict(extensions=None,), output_xfm=dict(extensions=None,), ) outputs = NlpFit.output_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_Norm.py b/nipype/interfaces/minc/tests/test_auto_Norm.py index 4bbae0454f..462b61459f 100644 --- a/nipype/interfaces/minc/tests/test_auto_Norm.py +++ b/nipype/interfaces/minc/tests/test_auto_Norm.py @@ -4,64 +4,49 @@ def test_Norm_inputs(): input_map = dict( - args=dict(argstr='%s', ), - clamp=dict( - argstr='-clamp', - usedefault=True, - ), - clobber=dict( - argstr='-clobber', - usedefault=True, - ), - cutoff=dict(argstr='-cutoff %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - input_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - lower=dict(argstr='-lower %s', ), - mask=dict( - argstr='-mask %s', - extensions=None, - ), - out_ceil=dict(argstr='-out_ceil %s', ), - out_floor=dict(argstr='-out_floor %s', ), + args=dict(argstr="%s",), + clamp=dict(argstr="-clamp", usedefault=True,), + clobber=dict(argstr="-clobber", usedefault=True,), + cutoff=dict(argstr="-cutoff %s",), + environ=dict(nohash=True, usedefault=True,), + input_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + lower=dict(argstr="-lower %s",), + mask=dict(argstr="-mask %s", extensions=None,), + out_ceil=dict(argstr="-out_ceil %s",), + out_floor=dict(argstr="-out_floor %s",), output_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, - name_source=['input_file'], - name_template='%s_norm.mnc', + name_source=["input_file"], + name_template="%s_norm.mnc", position=-1, ), output_threshold_mask=dict( - argstr='-threshold_mask %s', + argstr="-threshold_mask %s", extensions=None, hash_files=False, - name_source=['input_file'], - name_template='%s_norm_threshold_mask.mnc', + name_source=["input_file"], + name_template="%s_norm_threshold_mask.mnc", ), - threshold=dict(argstr='-threshold', ), - threshold_blur=dict(argstr='-threshold_blur %s', ), - threshold_bmt=dict(argstr='-threshold_bmt', ), - threshold_perc=dict(argstr='-threshold_perc %s', ), - upper=dict(argstr='-upper %s', ), + threshold=dict(argstr="-threshold",), + threshold_blur=dict(argstr="-threshold_blur %s",), + threshold_bmt=dict(argstr="-threshold_bmt",), + threshold_perc=dict(argstr="-threshold_perc %s",), + upper=dict(argstr="-upper %s",), ) inputs = Norm.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Norm_outputs(): output_map = dict( - output_file=dict(extensions=None, ), - output_threshold_mask=dict(extensions=None, ), + output_file=dict(extensions=None,), + output_threshold_mask=dict(extensions=None,), ) outputs = Norm.output_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_Pik.py b/nipype/interfaces/minc/tests/test_auto_Pik.py index 2a2ff5f851..530ead0317 100644 --- a/nipype/interfaces/minc/tests/test_auto_Pik.py +++ b/nipype/interfaces/minc/tests/test_auto_Pik.py @@ -4,88 +4,60 @@ def test_Pik_inputs(): input_map = dict( - annotated_bar=dict(argstr='--anot_bar', ), - args=dict(argstr='%s', ), - auto_range=dict( - argstr='--auto_range', - xor=('image_range', 'auto_range'), - ), - clobber=dict( - argstr='-clobber', - usedefault=True, - ), - depth=dict(argstr='--depth %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + annotated_bar=dict(argstr="--anot_bar",), + args=dict(argstr="%s",), + auto_range=dict(argstr="--auto_range", xor=("image_range", "auto_range"),), + clobber=dict(argstr="-clobber", usedefault=True,), + depth=dict(argstr="--depth %s",), + environ=dict(nohash=True, usedefault=True,), horizontal_triplanar_view=dict( - argstr='--horizontal', - xor=('vertical_triplanar_view', 'horizontal_triplanar_view'), + argstr="--horizontal", + xor=("vertical_triplanar_view", "horizontal_triplanar_view"), ), image_range=dict( - argstr='--image_range %s %s', - xor=('image_range', 'auto_range'), + argstr="--image_range %s %s", xor=("image_range", "auto_range"), ), - input_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - jpg=dict(xor=('jpg', 'png'), ), - lookup=dict(argstr='--lookup %s', ), - minc_range=dict(argstr='--range %s %s', ), + input_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + jpg=dict(xor=("jpg", "png"),), + lookup=dict(argstr="--lookup %s",), + minc_range=dict(argstr="--range %s %s",), output_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, keep_extension=False, - name_source=['input_file'], - name_template='%s.png', + name_source=["input_file"], + name_template="%s.png", position=-1, ), - png=dict(xor=('jpg', 'png'), ), - sagittal_offset=dict(argstr='--sagittal_offset %s', ), - sagittal_offset_perc=dict(argstr='--sagittal_offset_perc %d', ), - scale=dict( - argstr='--scale %s', - usedefault=True, - ), - slice_x=dict( - argstr='-x', - xor=('slice_z', 'slice_y', 'slice_x'), - ), - slice_y=dict( - argstr='-y', - xor=('slice_z', 'slice_y', 'slice_x'), - ), - slice_z=dict( - argstr='-z', - xor=('slice_z', 'slice_y', 'slice_x'), - ), - start=dict(argstr='--slice %s', ), - tile_size=dict(argstr='--tilesize %s', ), - title=dict(argstr='%s', ), - title_size=dict( - argstr='--title_size %s', - requires=['title'], - ), - triplanar=dict(argstr='--triplanar', ), + png=dict(xor=("jpg", "png"),), + sagittal_offset=dict(argstr="--sagittal_offset %s",), + sagittal_offset_perc=dict(argstr="--sagittal_offset_perc %d",), + scale=dict(argstr="--scale %s", usedefault=True,), + slice_x=dict(argstr="-x", xor=("slice_z", "slice_y", "slice_x"),), + slice_y=dict(argstr="-y", xor=("slice_z", "slice_y", "slice_x"),), + slice_z=dict(argstr="-z", xor=("slice_z", "slice_y", "slice_x"),), + start=dict(argstr="--slice %s",), + tile_size=dict(argstr="--tilesize %s",), + title=dict(argstr="%s",), + title_size=dict(argstr="--title_size %s", requires=["title"],), + triplanar=dict(argstr="--triplanar",), vertical_triplanar_view=dict( - argstr='--vertical', - xor=('vertical_triplanar_view', 'horizontal_triplanar_view'), + argstr="--vertical", + xor=("vertical_triplanar_view", "horizontal_triplanar_view"), ), - width=dict(argstr='--width %s', ), + width=dict(argstr="--width %s",), ) inputs = Pik.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Pik_outputs(): - output_map = dict(output_file=dict(extensions=None, ), ) + output_map = dict(output_file=dict(extensions=None,),) outputs = Pik.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Resample.py b/nipype/interfaces/minc/tests/test_auto_Resample.py index 3fdf821b88..32385be6c9 100644 --- a/nipype/interfaces/minc/tests/test_auto_Resample.py +++ b/nipype/interfaces/minc/tests/test_auto_Resample.py @@ -4,244 +4,274 @@ def test_Resample_inputs(): input_map = dict( - args=dict(argstr='%s', ), - clobber=dict( - argstr='-clobber', - usedefault=True, - ), + args=dict(argstr="%s",), + clobber=dict(argstr="-clobber", usedefault=True,), coronal_slices=dict( - argstr='-coronal', - xor=('transverse', 'sagittal', 'coronal'), + argstr="-coronal", xor=("transverse", "sagittal", "coronal"), ), dircos=dict( - argstr='-dircos %s %s %s', - xor=('nelements', 'nelements_x_y_or_z'), - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fill=dict( - argstr='-fill', - xor=('nofill', 'fill'), - ), - fill_value=dict( - argstr='-fillvalue %s', - requires=['fill'], + argstr="-dircos %s %s %s", xor=("nelements", "nelements_x_y_or_z"), ), + environ=dict(nohash=True, usedefault=True,), + fill=dict(argstr="-fill", xor=("nofill", "fill"),), + fill_value=dict(argstr="-fillvalue %s", requires=["fill"],), format_byte=dict( - argstr='-byte', - xor=('format_byte', 'format_short', 'format_int', 'format_long', - 'format_float', 'format_double', 'format_signed', - 'format_unsigned'), + argstr="-byte", + xor=( + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_double=dict( - argstr='-double', - xor=('format_byte', 'format_short', 'format_int', 'format_long', - 'format_float', 'format_double', 'format_signed', - 'format_unsigned'), + argstr="-double", + xor=( + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_float=dict( - argstr='-float', - xor=('format_byte', 'format_short', 'format_int', 'format_long', - 'format_float', 'format_double', 'format_signed', - 'format_unsigned'), + argstr="-float", + xor=( + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_int=dict( - argstr='-int', - xor=('format_byte', 'format_short', 'format_int', 'format_long', - 'format_float', 'format_double', 'format_signed', - 'format_unsigned'), + argstr="-int", + xor=( + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_long=dict( - argstr='-long', - xor=('format_byte', 'format_short', 'format_int', 'format_long', - 'format_float', 'format_double', 'format_signed', - 'format_unsigned'), + argstr="-long", + xor=( + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_short=dict( - argstr='-short', - xor=('format_byte', 'format_short', 'format_int', 'format_long', - 'format_float', 'format_double', 'format_signed', - 'format_unsigned'), + argstr="-short", + xor=( + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_signed=dict( - argstr='-signed', - xor=('format_byte', 'format_short', 'format_int', 'format_long', - 'format_float', 'format_double', 'format_signed', - 'format_unsigned'), + argstr="-signed", + xor=( + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), format_unsigned=dict( - argstr='-unsigned', - xor=('format_byte', 'format_short', 'format_int', 'format_long', - 'format_float', 'format_double', 'format_signed', - 'format_unsigned'), + argstr="-unsigned", + xor=( + "format_byte", + "format_short", + "format_int", + "format_long", + "format_float", + "format_double", + "format_signed", + "format_unsigned", + ), ), half_width_sinc_window=dict( - argstr='-width %s', - requires=['sinc_interpolation'], - ), - input_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, + argstr="-width %s", requires=["sinc_interpolation"], ), + input_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), input_grid_files=dict(), - invert_transformation=dict(argstr='-invert_transformation', ), + invert_transformation=dict(argstr="-invert_transformation",), keep_real_range=dict( - argstr='-keep_real_range', - xor=('keep_real_range', 'nokeep_real_range'), - ), - like=dict( - argstr='-like %s', - extensions=None, + argstr="-keep_real_range", xor=("keep_real_range", "nokeep_real_range"), ), + like=dict(argstr="-like %s", extensions=None,), nearest_neighbour_interpolation=dict( - argstr='-nearest_neighbour', - xor=('trilinear_interpolation', 'tricubic_interpolation', - 'nearest_neighbour_interpolation', 'sinc_interpolation'), + argstr="-nearest_neighbour", + xor=( + "trilinear_interpolation", + "tricubic_interpolation", + "nearest_neighbour_interpolation", + "sinc_interpolation", + ), ), nelements=dict( - argstr='-nelements %s %s %s', - xor=('nelements', 'nelements_x_y_or_z'), - ), - no_fill=dict( - argstr='-nofill', - xor=('nofill', 'fill'), + argstr="-nelements %s %s %s", xor=("nelements", "nelements_x_y_or_z"), ), + no_fill=dict(argstr="-nofill", xor=("nofill", "fill"),), no_input_sampling=dict( - argstr='-use_input_sampling', - xor=('vio_transform', 'no_input_sampling'), + argstr="-use_input_sampling", xor=("vio_transform", "no_input_sampling"), ), nokeep_real_range=dict( - argstr='-nokeep_real_range', - xor=('keep_real_range', 'nokeep_real_range'), + argstr="-nokeep_real_range", xor=("keep_real_range", "nokeep_real_range"), ), - origin=dict(argstr='-origin %s %s %s', ), + origin=dict(argstr="-origin %s %s %s",), output_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, - name_source=['input_file'], - name_template='%s_resample.mnc', + name_source=["input_file"], + name_template="%s_resample.mnc", position=-1, ), - output_range=dict(argstr='-range %s %s', ), + output_range=dict(argstr="-range %s %s",), sagittal_slices=dict( - argstr='-sagittal', - xor=('transverse', 'sagittal', 'coronal'), + argstr="-sagittal", xor=("transverse", "sagittal", "coronal"), ), sinc_interpolation=dict( - argstr='-sinc', - xor=('trilinear_interpolation', 'tricubic_interpolation', - 'nearest_neighbour_interpolation', 'sinc_interpolation'), + argstr="-sinc", + xor=( + "trilinear_interpolation", + "tricubic_interpolation", + "nearest_neighbour_interpolation", + "sinc_interpolation", + ), ), sinc_window_hamming=dict( - argstr='-hamming', - requires=['sinc_interpolation'], - xor=('sinc_window_hanning', 'sinc_window_hamming'), + argstr="-hamming", + requires=["sinc_interpolation"], + xor=("sinc_window_hanning", "sinc_window_hamming"), ), sinc_window_hanning=dict( - argstr='-hanning', - requires=['sinc_interpolation'], - xor=('sinc_window_hanning', 'sinc_window_hamming'), - ), - spacetype=dict(argstr='-spacetype %s', ), - standard_sampling=dict(argstr='-standard_sampling', ), - start=dict( - argstr='-start %s %s %s', - xor=('nelements', 'nelements_x_y_or_z'), - ), - step=dict( - argstr='-step %s %s %s', - xor=('nelements', 'nelements_x_y_or_z'), - ), - talairach=dict(argstr='-talairach', ), - transformation=dict( - argstr='-transformation %s', - extensions=None, - ), + argstr="-hanning", + requires=["sinc_interpolation"], + xor=("sinc_window_hanning", "sinc_window_hamming"), + ), + spacetype=dict(argstr="-spacetype %s",), + standard_sampling=dict(argstr="-standard_sampling",), + start=dict(argstr="-start %s %s %s", xor=("nelements", "nelements_x_y_or_z"),), + step=dict(argstr="-step %s %s %s", xor=("nelements", "nelements_x_y_or_z"),), + talairach=dict(argstr="-talairach",), + transformation=dict(argstr="-transformation %s", extensions=None,), transverse_slices=dict( - argstr='-transverse', - xor=('transverse', 'sagittal', 'coronal'), + argstr="-transverse", xor=("transverse", "sagittal", "coronal"), ), tricubic_interpolation=dict( - argstr='-tricubic', - xor=('trilinear_interpolation', 'tricubic_interpolation', - 'nearest_neighbour_interpolation', 'sinc_interpolation'), + argstr="-tricubic", + xor=( + "trilinear_interpolation", + "tricubic_interpolation", + "nearest_neighbour_interpolation", + "sinc_interpolation", + ), ), trilinear_interpolation=dict( - argstr='-trilinear', - xor=('trilinear_interpolation', 'tricubic_interpolation', - 'nearest_neighbour_interpolation', 'sinc_interpolation'), - ), - two=dict(argstr='-2', ), - units=dict(argstr='-units %s', ), + argstr="-trilinear", + xor=( + "trilinear_interpolation", + "tricubic_interpolation", + "nearest_neighbour_interpolation", + "sinc_interpolation", + ), + ), + two=dict(argstr="-2",), + units=dict(argstr="-units %s",), vio_transform=dict( - argstr='-tfm_input_sampling', - xor=('vio_transform', 'no_input_sampling'), + argstr="-tfm_input_sampling", xor=("vio_transform", "no_input_sampling"), ), xdircos=dict( - argstr='-xdircos %s', - requires=('ydircos', 'zdircos'), - xor=('dircos', 'dircos_x_y_or_z'), + argstr="-xdircos %s", + requires=("ydircos", "zdircos"), + xor=("dircos", "dircos_x_y_or_z"), ), xnelements=dict( - argstr='-xnelements %s', - requires=('ynelements', 'znelements'), - xor=('nelements', 'nelements_x_y_or_z'), + argstr="-xnelements %s", + requires=("ynelements", "znelements"), + xor=("nelements", "nelements_x_y_or_z"), ), xstart=dict( - argstr='-xstart %s', - requires=('ystart', 'zstart'), - xor=('start', 'start_x_y_or_z'), + argstr="-xstart %s", + requires=("ystart", "zstart"), + xor=("start", "start_x_y_or_z"), ), xstep=dict( - argstr='-xstep %s', - requires=('ystep', 'zstep'), - xor=('step', 'step_x_y_or_z'), + argstr="-xstep %s", + requires=("ystep", "zstep"), + xor=("step", "step_x_y_or_z"), ), ydircos=dict( - argstr='-ydircos %s', - requires=('xdircos', 'zdircos'), - xor=('dircos', 'dircos_x_y_or_z'), + argstr="-ydircos %s", + requires=("xdircos", "zdircos"), + xor=("dircos", "dircos_x_y_or_z"), ), ynelements=dict( - argstr='-ynelements %s', - requires=('xnelements', 'znelements'), - xor=('nelements', 'nelements_x_y_or_z'), + argstr="-ynelements %s", + requires=("xnelements", "znelements"), + xor=("nelements", "nelements_x_y_or_z"), ), ystart=dict( - argstr='-ystart %s', - requires=('xstart', 'zstart'), - xor=('start', 'start_x_y_or_z'), + argstr="-ystart %s", + requires=("xstart", "zstart"), + xor=("start", "start_x_y_or_z"), ), ystep=dict( - argstr='-ystep %s', - requires=('xstep', 'zstep'), - xor=('step', 'step_x_y_or_z'), + argstr="-ystep %s", + requires=("xstep", "zstep"), + xor=("step", "step_x_y_or_z"), ), zdircos=dict( - argstr='-zdircos %s', - requires=('xdircos', 'ydircos'), - xor=('dircos', 'dircos_x_y_or_z'), + argstr="-zdircos %s", + requires=("xdircos", "ydircos"), + xor=("dircos", "dircos_x_y_or_z"), ), znelements=dict( - argstr='-znelements %s', - requires=('xnelements', 'ynelements'), - xor=('nelements', 'nelements_x_y_or_z'), + argstr="-znelements %s", + requires=("xnelements", "ynelements"), + xor=("nelements", "nelements_x_y_or_z"), ), zstart=dict( - argstr='-zstart %s', - requires=('xstart', 'ystart'), - xor=('start', 'start_x_y_or_z'), + argstr="-zstart %s", + requires=("xstart", "ystart"), + xor=("start", "start_x_y_or_z"), ), zstep=dict( - argstr='-zstep %s', - requires=('xstep', 'ystep'), - xor=('step', 'step_x_y_or_z'), + argstr="-zstep %s", + requires=("xstep", "ystep"), + xor=("step", "step_x_y_or_z"), ), ) inputs = Resample.input_spec() @@ -249,8 +279,10 @@ def test_Resample_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Resample_outputs(): - output_map = dict(output_file=dict(extensions=None, ), ) + output_map = dict(output_file=dict(extensions=None,),) outputs = Resample.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Reshape.py b/nipype/interfaces/minc/tests/test_auto_Reshape.py index 4d51d6800b..92b0e5862e 100644 --- a/nipype/interfaces/minc/tests/test_auto_Reshape.py +++ b/nipype/interfaces/minc/tests/test_auto_Reshape.py @@ -4,40 +4,31 @@ def test_Reshape_inputs(): input_map = dict( - args=dict(argstr='%s', ), - clobber=dict( - argstr='-clobber', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - input_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), + args=dict(argstr="%s",), + clobber=dict(argstr="-clobber", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + input_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), output_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, - name_source=['input_file'], - name_template='%s_reshape.mnc', + name_source=["input_file"], + name_template="%s_reshape.mnc", position=-1, ), - verbose=dict(argstr='-verbose', ), - write_short=dict(argstr='-short', ), + verbose=dict(argstr="-verbose",), + write_short=dict(argstr="-short",), ) inputs = Reshape.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Reshape_outputs(): - output_map = dict(output_file=dict(extensions=None, ), ) + output_map = dict(output_file=dict(extensions=None,),) outputs = Reshape.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_ToEcat.py b/nipype/interfaces/minc/tests/test_auto_ToEcat.py index eb64e6fa0b..02936ae4f4 100644 --- a/nipype/interfaces/minc/tests/test_auto_ToEcat.py +++ b/nipype/interfaces/minc/tests/test_auto_ToEcat.py @@ -4,46 +4,39 @@ def test_ToEcat_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - ignore_acquisition_variable=dict( - argstr='-ignore_acquisition_variable', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + ignore_acquisition_variable=dict(argstr="-ignore_acquisition_variable",), ignore_ecat_acquisition_variable=dict( - argstr='-ignore_ecat_acquisition_variable', ), - ignore_ecat_main=dict(argstr='-ignore_ecat_main', ), - ignore_ecat_subheader_variable=dict( - argstr='-ignore_ecat_subheader_variable', ), - ignore_patient_variable=dict(argstr='-ignore_patient_variable', ), - ignore_study_variable=dict(argstr='-ignore_study_variable', ), - input_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, + argstr="-ignore_ecat_acquisition_variable", ), - no_decay_corr_fctr=dict(argstr='-no_decay_corr_fctr', ), + ignore_ecat_main=dict(argstr="-ignore_ecat_main",), + ignore_ecat_subheader_variable=dict(argstr="-ignore_ecat_subheader_variable",), + ignore_patient_variable=dict(argstr="-ignore_patient_variable",), + ignore_study_variable=dict(argstr="-ignore_study_variable",), + input_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + no_decay_corr_fctr=dict(argstr="-no_decay_corr_fctr",), output_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, keep_extension=False, - name_source=['input_file'], - name_template='%s_to_ecat.v', + name_source=["input_file"], + name_template="%s_to_ecat.v", position=-1, ), - voxels_as_integers=dict(argstr='-label', ), + voxels_as_integers=dict(argstr="-label",), ) inputs = ToEcat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ToEcat_outputs(): - output_map = dict(output_file=dict(extensions=None, ), ) + output_map = dict(output_file=dict(extensions=None,),) outputs = ToEcat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_ToRaw.py b/nipype/interfaces/minc/tests/test_auto_ToRaw.py index f0aa06d3ad..7a15e49f65 100644 --- a/nipype/interfaces/minc/tests/test_auto_ToRaw.py +++ b/nipype/interfaces/minc/tests/test_auto_ToRaw.py @@ -4,77 +4,90 @@ def test_ToRaw_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - input_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - nonormalize=dict( - argstr='-nonormalize', - xor=('normalize', 'nonormalize'), - ), - normalize=dict( - argstr='-normalize', - xor=('normalize', 'nonormalize'), - ), - out_file=dict( - argstr='> %s', - extensions=None, - genfile=True, - position=-1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + input_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + nonormalize=dict(argstr="-nonormalize", xor=("normalize", "nonormalize"),), + normalize=dict(argstr="-normalize", xor=("normalize", "nonormalize"),), + out_file=dict(argstr="> %s", extensions=None, genfile=True, position=-1,), output_file=dict( extensions=None, hash_files=False, keep_extension=False, - name_source=['input_file'], - name_template='%s.raw', + name_source=["input_file"], + name_template="%s.raw", position=-1, ), write_byte=dict( - argstr='-byte', - xor=('write_byte', 'write_short', 'write_int', 'write_long', - 'write_float', 'write_double'), + argstr="-byte", + xor=( + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + ), ), write_double=dict( - argstr='-double', - xor=('write_byte', 'write_short', 'write_int', 'write_long', - 'write_float', 'write_double'), + argstr="-double", + xor=( + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + ), ), write_float=dict( - argstr='-float', - xor=('write_byte', 'write_short', 'write_int', 'write_long', - 'write_float', 'write_double'), + argstr="-float", + xor=( + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + ), ), write_int=dict( - argstr='-int', - xor=('write_byte', 'write_short', 'write_int', 'write_long', - 'write_float', 'write_double'), + argstr="-int", + xor=( + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + ), ), write_long=dict( - argstr='-long', - xor=('write_byte', 'write_short', 'write_int', 'write_long', - 'write_float', 'write_double'), + argstr="-long", + xor=( + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + ), ), - write_range=dict(argstr='-range %s %s', ), + write_range=dict(argstr="-range %s %s",), write_short=dict( - argstr='-short', - xor=('write_byte', 'write_short', 'write_int', 'write_long', - 'write_float', 'write_double'), - ), - write_signed=dict( - argstr='-signed', - xor=('write_signed', 'write_unsigned'), + argstr="-short", + xor=( + "write_byte", + "write_short", + "write_int", + "write_long", + "write_float", + "write_double", + ), ), + write_signed=dict(argstr="-signed", xor=("write_signed", "write_unsigned"),), write_unsigned=dict( - argstr='-unsigned', - xor=('write_signed', 'write_unsigned'), + argstr="-unsigned", xor=("write_signed", "write_unsigned"), ), ) inputs = ToRaw.input_spec() @@ -82,8 +95,10 @@ def test_ToRaw_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ToRaw_outputs(): - output_map = dict(output_file=dict(extensions=None, ), ) + output_map = dict(output_file=dict(extensions=None,),) outputs = ToRaw.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_VolSymm.py b/nipype/interfaces/minc/tests/test_auto_VolSymm.py index b71a1105a3..aeb8e6d23a 100644 --- a/nipype/interfaces/minc/tests/test_auto_VolSymm.py +++ b/nipype/interfaces/minc/tests/test_auto_VolSymm.py @@ -4,63 +4,51 @@ def test_VolSymm_inputs(): input_map = dict( - args=dict(argstr='%s', ), - clobber=dict( - argstr='-clobber', - usedefault=True, - ), - config_file=dict( - argstr='-config_file %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fit_linear=dict(argstr='-linear', ), - fit_nonlinear=dict(argstr='-nonlinear', ), - input_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), + args=dict(argstr="%s",), + clobber=dict(argstr="-clobber", usedefault=True,), + config_file=dict(argstr="-config_file %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + fit_linear=dict(argstr="-linear",), + fit_nonlinear=dict(argstr="-nonlinear",), + input_file=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), input_grid_files=dict(), - nofit=dict(argstr='-nofit', ), + nofit=dict(argstr="-nofit",), output_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, - name_source=['input_file'], - name_template='%s_vol_symm.mnc', + name_source=["input_file"], + name_template="%s_vol_symm.mnc", position=-1, ), trans_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, keep_extension=False, - name_source=['input_file'], - name_template='%s_vol_symm.xfm', + name_source=["input_file"], + name_template="%s_vol_symm.xfm", position=-2, ), - verbose=dict(argstr='-verbose', ), - x=dict(argstr='-x', ), - y=dict(argstr='-y', ), - z=dict(argstr='-z', ), + verbose=dict(argstr="-verbose",), + x=dict(argstr="-x",), + y=dict(argstr="-y",), + z=dict(argstr="-z",), ) inputs = VolSymm.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_VolSymm_outputs(): output_map = dict( - output_file=dict(extensions=None, ), - output_grid=dict(extensions=None, ), - trans_file=dict(extensions=None, ), + output_file=dict(extensions=None,), + output_grid=dict(extensions=None,), + trans_file=dict(extensions=None,), ) outputs = VolSymm.output_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_Volcentre.py b/nipype/interfaces/minc/tests/test_auto_Volcentre.py index 7b43524fe4..492714adf4 100644 --- a/nipype/interfaces/minc/tests/test_auto_Volcentre.py +++ b/nipype/interfaces/minc/tests/test_auto_Volcentre.py @@ -4,42 +4,33 @@ def test_Volcentre_inputs(): input_map = dict( - args=dict(argstr='%s', ), - centre=dict(argstr='-centre %s %s %s', ), - clobber=dict( - argstr='-clobber', - usedefault=True, - ), - com=dict(argstr='-com', ), - environ=dict( - nohash=True, - usedefault=True, - ), - input_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), + args=dict(argstr="%s",), + centre=dict(argstr="-centre %s %s %s",), + clobber=dict(argstr="-clobber", usedefault=True,), + com=dict(argstr="-com",), + environ=dict(nohash=True, usedefault=True,), + input_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), output_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, - name_source=['input_file'], - name_template='%s_volcentre.mnc', + name_source=["input_file"], + name_template="%s_volcentre.mnc", position=-1, ), - verbose=dict(argstr='-verbose', ), - zero_dircos=dict(argstr='-zero_dircos', ), + verbose=dict(argstr="-verbose",), + zero_dircos=dict(argstr="-zero_dircos",), ) inputs = Volcentre.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Volcentre_outputs(): - output_map = dict(output_file=dict(extensions=None, ), ) + output_map = dict(output_file=dict(extensions=None,),) outputs = Volcentre.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Voliso.py b/nipype/interfaces/minc/tests/test_auto_Voliso.py index 873a763468..534315d0cf 100644 --- a/nipype/interfaces/minc/tests/test_auto_Voliso.py +++ b/nipype/interfaces/minc/tests/test_auto_Voliso.py @@ -4,42 +4,33 @@ def test_Voliso_inputs(): input_map = dict( - args=dict(argstr='%s', ), - avgstep=dict(argstr='--avgstep', ), - clobber=dict( - argstr='--clobber', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - input_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - maxstep=dict(argstr='--maxstep %s', ), - minstep=dict(argstr='--minstep %s', ), + args=dict(argstr="%s",), + avgstep=dict(argstr="--avgstep",), + clobber=dict(argstr="--clobber", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + input_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + maxstep=dict(argstr="--maxstep %s",), + minstep=dict(argstr="--minstep %s",), output_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, - name_source=['input_file'], - name_template='%s_voliso.mnc', + name_source=["input_file"], + name_template="%s_voliso.mnc", position=-1, ), - verbose=dict(argstr='--verbose', ), + verbose=dict(argstr="--verbose",), ) inputs = Voliso.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Voliso_outputs(): - output_map = dict(output_file=dict(extensions=None, ), ) + output_map = dict(output_file=dict(extensions=None,),) outputs = Voliso.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_Volpad.py b/nipype/interfaces/minc/tests/test_auto_Volpad.py index 910ca5d827..ce67c4ef73 100644 --- a/nipype/interfaces/minc/tests/test_auto_Volpad.py +++ b/nipype/interfaces/minc/tests/test_auto_Volpad.py @@ -4,44 +4,35 @@ def test_Volpad_inputs(): input_map = dict( - args=dict(argstr='%s', ), - auto=dict(argstr='-auto', ), - auto_freq=dict(argstr='-auto_freq %s', ), - clobber=dict( - argstr='-clobber', - usedefault=True, - ), - distance=dict(argstr='-distance %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - input_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), + args=dict(argstr="%s",), + auto=dict(argstr="-auto",), + auto_freq=dict(argstr="-auto_freq %s",), + clobber=dict(argstr="-clobber", usedefault=True,), + distance=dict(argstr="-distance %s",), + environ=dict(nohash=True, usedefault=True,), + input_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), output_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, - name_source=['input_file'], - name_template='%s_volpad.mnc', + name_source=["input_file"], + name_template="%s_volpad.mnc", position=-1, ), - smooth=dict(argstr='-smooth', ), - smooth_distance=dict(argstr='-smooth_distance %s', ), - verbose=dict(argstr='-verbose', ), + smooth=dict(argstr="-smooth",), + smooth_distance=dict(argstr="-smooth_distance %s",), + verbose=dict(argstr="-verbose",), ) inputs = Volpad.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Volpad_outputs(): - output_map = dict(output_file=dict(extensions=None, ), ) + output_map = dict(output_file=dict(extensions=None,),) outputs = Volpad.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_XfmAvg.py b/nipype/interfaces/minc/tests/test_auto_XfmAvg.py index dc8a7028e8..f688494751 100644 --- a/nipype/interfaces/minc/tests/test_auto_XfmAvg.py +++ b/nipype/interfaces/minc/tests/test_auto_XfmAvg.py @@ -4,43 +4,28 @@ def test_XfmAvg_inputs(): input_map = dict( - args=dict(argstr='%s', ), - avg_linear=dict(argstr='-avg_linear', ), - avg_nonlinear=dict(argstr='-avg_nonlinear', ), - clobber=dict( - argstr='-clobber', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - ignore_linear=dict(argstr='-ignore_linear', ), - ignore_nonlinear=dict(argstr='-ignore_nonline', ), - input_files=dict( - argstr='%s', - mandatory=True, - position=-2, - sep=' ', - ), + args=dict(argstr="%s",), + avg_linear=dict(argstr="-avg_linear",), + avg_nonlinear=dict(argstr="-avg_nonlinear",), + clobber=dict(argstr="-clobber", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + ignore_linear=dict(argstr="-ignore_linear",), + ignore_nonlinear=dict(argstr="-ignore_nonline",), + input_files=dict(argstr="%s", mandatory=True, position=-2, sep=" ",), input_grid_files=dict(), - output_file=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, - ), - verbose=dict(argstr='-verbose', ), + output_file=dict(argstr="%s", extensions=None, genfile=True, position=-1,), + verbose=dict(argstr="-verbose",), ) inputs = XfmAvg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_XfmAvg_outputs(): output_map = dict( - output_file=dict(extensions=None, ), - output_grid=dict(extensions=None, ), + output_file=dict(extensions=None,), output_grid=dict(extensions=None,), ) outputs = XfmAvg.output_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_XfmConcat.py b/nipype/interfaces/minc/tests/test_auto_XfmConcat.py index a9dfa63cad..23642895da 100644 --- a/nipype/interfaces/minc/tests/test_auto_XfmConcat.py +++ b/nipype/interfaces/minc/tests/test_auto_XfmConcat.py @@ -4,43 +4,31 @@ def test_XfmConcat_inputs(): input_map = dict( - args=dict(argstr='%s', ), - clobber=dict( - argstr='-clobber', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - input_files=dict( - argstr='%s', - mandatory=True, - position=-2, - sep=' ', - ), + args=dict(argstr="%s",), + clobber=dict(argstr="-clobber", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + input_files=dict(argstr="%s", mandatory=True, position=-2, sep=" ",), input_grid_files=dict(), output_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, hash_files=False, - name_source=['input_files'], - name_template='%s_xfmconcat.xfm', + name_source=["input_files"], + name_template="%s_xfmconcat.xfm", position=-1, ), - verbose=dict(argstr='-verbose', ), + verbose=dict(argstr="-verbose",), ) inputs = XfmConcat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_XfmConcat_outputs(): - output_map = dict( - output_file=dict(extensions=None, ), - output_grids=dict(), - ) + output_map = dict(output_file=dict(extensions=None,), output_grids=dict(),) outputs = XfmConcat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/minc/tests/test_auto_XfmInvert.py b/nipype/interfaces/minc/tests/test_auto_XfmInvert.py index 4dacaa4876..7f0c42c433 100644 --- a/nipype/interfaces/minc/tests/test_auto_XfmInvert.py +++ b/nipype/interfaces/minc/tests/test_auto_XfmInvert.py @@ -4,38 +4,23 @@ def test_XfmInvert_inputs(): input_map = dict( - args=dict(argstr='%s', ), - clobber=dict( - argstr='-clobber', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - input_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - output_file=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, - ), - verbose=dict(argstr='-verbose', ), + args=dict(argstr="%s",), + clobber=dict(argstr="-clobber", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + input_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + output_file=dict(argstr="%s", extensions=None, genfile=True, position=-1,), + verbose=dict(argstr="-verbose",), ) inputs = XfmInvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_XfmInvert_outputs(): output_map = dict( - output_file=dict(extensions=None, ), - output_grid=dict(extensions=None, ), + output_file=dict(extensions=None,), output_grid=dict(extensions=None,), ) outputs = XfmInvert.output_spec() diff --git a/nipype/interfaces/mipav/__init__.py b/nipype/interfaces/mipav/__init__.py index 2d5fce8652..2bdbfef78b 100644 --- a/nipype/interfaces/mipav/__init__.py +++ b/nipype/interfaces/mipav/__init__.py @@ -1,10 +1,21 @@ # -*- coding: utf-8 -*- from .developer import ( - JistLaminarVolumetricLayering, JistBrainMgdmSegmentation, - JistLaminarProfileGeometry, JistLaminarProfileCalculator, MedicAlgorithmN3, - JistLaminarROIAveraging, MedicAlgorithmLesionToads, - JistBrainMp2rageSkullStripping, JistCortexSurfaceMeshInflation, RandomVol, - MedicAlgorithmImageCalculator, JistBrainMp2rageDuraEstimation, - JistLaminarProfileSampling, MedicAlgorithmMipavReorient, - MedicAlgorithmSPECTRE2010, JistBrainPartialVolumeFilter, - JistIntensityMp2rageMasking, MedicAlgorithmThresholdToBinaryMask) + JistLaminarVolumetricLayering, + JistBrainMgdmSegmentation, + JistLaminarProfileGeometry, + JistLaminarProfileCalculator, + MedicAlgorithmN3, + JistLaminarROIAveraging, + MedicAlgorithmLesionToads, + JistBrainMp2rageSkullStripping, + JistCortexSurfaceMeshInflation, + RandomVol, + MedicAlgorithmImageCalculator, + JistBrainMp2rageDuraEstimation, + JistLaminarProfileSampling, + MedicAlgorithmMipavReorient, + MedicAlgorithmSPECTRE2010, + JistBrainPartialVolumeFilter, + JistIntensityMp2rageMasking, + MedicAlgorithmThresholdToBinaryMask, +) diff --git a/nipype/interfaces/mipav/developer.py b/nipype/interfaces/mipav/developer.py index ffb9e10cc3..9bc24b1a80 100644 --- a/nipype/interfaces/mipav/developer.py +++ b/nipype/interfaces/mipav/developer.py @@ -5,45 +5,54 @@ import os -from ..base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ..base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class JistLaminarVolumetricLayeringInputSpec(CommandLineInputSpec): inInner = File( - desc="Inner Distance Image (GM/WM boundary)", - exists=True, - argstr="--inInner %s") + desc="Inner Distance Image (GM/WM boundary)", exists=True, argstr="--inInner %s" + ) inOuter = File( desc="Outer Distance Image (CSF/GM boundary)", exists=True, - argstr="--inOuter %s") + argstr="--inOuter %s", + ) inNumber = traits.Int(desc="Number of layers", argstr="--inNumber %d") inMax = traits.Int( - desc="Max iterations for narrow band evolution", argstr="--inMax %d") + desc="Max iterations for narrow band evolution", argstr="--inMax %d" + ) inMin = traits.Float( - desc="Min change ratio for narrow band evolution", argstr="--inMin %f") + desc="Min change ratio for narrow band evolution", argstr="--inMin %f" + ) inLayering = traits.Enum( "distance-preserving", "volume-preserving", desc="Layering method", - argstr="--inLayering %s") + argstr="--inLayering %s", + ) inLayering2 = traits.Enum( - "outward", - "inward", - desc="Layering direction", - argstr="--inLayering2 %s") + "outward", "inward", desc="Layering direction", argstr="--inLayering2 %s" + ) incurvature = traits.Int( - desc="curvature approximation scale (voxels)", - argstr="--incurvature %d") + desc="curvature approximation scale (voxels)", argstr="--incurvature %d" + ) inratio = traits.Float( - desc="ratio smoothing kernel size (voxels)", argstr="--inratio %f") + desc="ratio smoothing kernel size (voxels)", argstr="--inratio %f" + ) inpresmooth = traits.Enum( - "true", - "false", - desc="pre-smooth cortical surfaces", - argstr="--inpresmooth %s") + "true", "false", desc="pre-smooth cortical surfaces", argstr="--inpresmooth %s" + ) inTopology = traits.Enum( "26/6", "6/26", @@ -54,35 +63,40 @@ class JistLaminarVolumetricLayeringInputSpec(CommandLineInputSpec): "wco", "no", desc="Topology", - argstr="--inTopology %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + argstr="--inTopology %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outContinuous = traits.Either( traits.Bool, File(), hash_files=False, desc="Continuous depth measurement", - argstr="--outContinuous %s") + argstr="--outContinuous %s", + ) outDiscrete = traits.Either( traits.Bool, File(), hash_files=False, desc="Discrete sampled layers", - argstr="--outDiscrete %s") + argstr="--outDiscrete %s", + ) outLayer = traits.Either( traits.Bool, File(), hash_files=False, desc="Layer boundary surfaces", - argstr="--outLayer %s") + argstr="--outLayer %s", + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistLaminarVolumetricLayeringOutputSpec(TraitedSpec): @@ -109,30 +123,27 @@ class JistLaminarVolumetricLayering(SEMLikeCommandLine): output_spec = JistLaminarVolumetricLayeringOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.laminar.JistLaminarVolumetricLayering " _outputs_filenames = { - 'outContinuous': 'outContinuous.nii', - 'outLayer': 'outLayer.nii', - 'outDiscrete': 'outDiscrete.nii' + "outContinuous": "outContinuous.nii", + "outLayer": "outLayer.nii", + "outDiscrete": "outDiscrete.nii", } _redirect_x = True class JistBrainMgdmSegmentationInputSpec(CommandLineInputSpec): - inMP2RAGE = File( - desc="MP2RAGE T1 Map Image", exists=True, argstr="--inMP2RAGE %s") + inMP2RAGE = File(desc="MP2RAGE T1 Map Image", exists=True, argstr="--inMP2RAGE %s") inMP2RAGE2 = File( - desc="MP2RAGE T1-weighted Image", - exists=True, - argstr="--inMP2RAGE2 %s") + desc="MP2RAGE T1-weighted Image", exists=True, argstr="--inMP2RAGE2 %s" + ) inPV = File(desc="PV / Dura Image", exists=True, argstr="--inPV %s") inMPRAGE = File( - desc="MPRAGE T1-weighted Image", exists=True, argstr="--inMPRAGE %s") + desc="MPRAGE T1-weighted Image", exists=True, argstr="--inMPRAGE %s" + ) inFLAIR = File(desc="FLAIR Image", exists=True, argstr="--inFLAIR %s") inAtlas = File(desc="Atlas file", exists=True, argstr="--inAtlas %s") inData = traits.Float(desc="Data weight", argstr="--inData %f") - inCurvature = traits.Float( - desc="Curvature weight", argstr="--inCurvature %f") - inPosterior = traits.Float( - desc="Posterior scale (mm)", argstr="--inPosterior %f") + inCurvature = traits.Float(desc="Curvature weight", argstr="--inCurvature %f") + inPosterior = traits.Float(desc="Posterior scale (mm)", argstr="--inPosterior %f") inMax = traits.Int(desc="Max iterations", argstr="--inMax %d") inMin = traits.Float(desc="Min change", argstr="--inMin %f") inSteps = traits.Int(desc="Steps", argstr="--inSteps %d") @@ -146,60 +157,62 @@ class JistBrainMgdmSegmentationInputSpec(CommandLineInputSpec): "wco", "no", desc="Topology", - argstr="--inTopology %s") + argstr="--inTopology %s", + ) inCompute = traits.Enum( - "true", "false", desc="Compute posteriors", argstr="--inCompute %s") + "true", "false", desc="Compute posteriors", argstr="--inCompute %s" + ) inAdjust = traits.Enum( - "true", - "false", - desc="Adjust intensity priors", - argstr="--inAdjust %s") + "true", "false", desc="Adjust intensity priors", argstr="--inAdjust %s" + ) inOutput = traits.Enum( - "segmentation", - "memberships", - desc="Output images", - argstr="--inOutput %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + "segmentation", "memberships", desc="Output images", argstr="--inOutput %s" + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outSegmented = traits.Either( traits.Bool, File(), hash_files=False, desc="Segmented Brain Image", - argstr="--outSegmented %s") + argstr="--outSegmented %s", + ) outLevelset = traits.Either( traits.Bool, File(), hash_files=False, desc="Levelset Boundary Image", - argstr="--outLevelset %s") + argstr="--outLevelset %s", + ) outPosterior2 = traits.Either( traits.Bool, File(), hash_files=False, desc="Posterior Maximum Memberships (4D)", - argstr="--outPosterior2 %s") + argstr="--outPosterior2 %s", + ) outPosterior3 = traits.Either( traits.Bool, File(), hash_files=False, desc="Posterior Maximum Labels (4D)", - argstr="--outPosterior3 %s") + argstr="--outPosterior3 %s", + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistBrainMgdmSegmentationOutputSpec(TraitedSpec): outSegmented = File(desc="Segmented Brain Image", exists=True) outLevelset = File(desc="Levelset Boundary Image", exists=True) - outPosterior2 = File( - desc="Posterior Maximum Memberships (4D)", exists=True) + outPosterior2 = File(desc="Posterior Maximum Memberships (4D)", exists=True) outPosterior3 = File(desc="Posterior Maximum Labels (4D)", exists=True) @@ -218,17 +231,16 @@ class JistBrainMgdmSegmentation(SEMLikeCommandLine): output_spec = JistBrainMgdmSegmentationOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.brain.JistBrainMgdmSegmentation " _outputs_filenames = { - 'outSegmented': 'outSegmented.nii', - 'outPosterior2': 'outPosterior2.nii', - 'outPosterior3': 'outPosterior3.nii', - 'outLevelset': 'outLevelset.nii' + "outSegmented": "outSegmented.nii", + "outPosterior2": "outPosterior2.nii", + "outPosterior3": "outPosterior3.nii", + "outLevelset": "outLevelset.nii", } _redirect_x = True class JistLaminarProfileGeometryInputSpec(CommandLineInputSpec): - inProfile = File( - desc="Profile Surface Image", exists=True, argstr="--inProfile %s") + inProfile = File(desc="Profile Surface Image", exists=True, argstr="--inProfile %s") incomputed = traits.Enum( "thickness", "curvedness", @@ -239,32 +251,27 @@ class JistLaminarProfileGeometryInputSpec(CommandLineInputSpec): "profile_curvature", "profile_torsion", desc="computed measure", - argstr="--incomputed %s") + argstr="--incomputed %s", + ) inregularization = traits.Enum( - "none", - "Gaussian", - desc="regularization", - argstr="--inregularization %s") - insmoothing = traits.Float( - desc="smoothing parameter", argstr="--insmoothing %f") - inoutside = traits.Float( - desc="outside extension (mm)", argstr="--inoutside %f") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + "none", "Gaussian", desc="regularization", argstr="--inregularization %s" + ) + insmoothing = traits.Float(desc="smoothing parameter", argstr="--insmoothing %f") + inoutside = traits.Float(desc="outside extension (mm)", argstr="--inoutside %f") + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outResult = traits.Either( - traits.Bool, - File(), - hash_files=False, - desc="Result", - argstr="--outResult %s") + traits.Bool, File(), hash_files=False, desc="Result", argstr="--outResult %s" + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistLaminarProfileGeometryOutputSpec(TraitedSpec): @@ -285,38 +292,37 @@ class JistLaminarProfileGeometry(SEMLikeCommandLine): input_spec = JistLaminarProfileGeometryInputSpec output_spec = JistLaminarProfileGeometryOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.laminar.JistLaminarProfileGeometry " - _outputs_filenames = {'outResult': 'outResult.nii'} + _outputs_filenames = {"outResult": "outResult.nii"} _redirect_x = True class JistLaminarProfileCalculatorInputSpec(CommandLineInputSpec): inIntensity = File( - desc="Intensity Profile Image", exists=True, argstr="--inIntensity %s") - inMask = File( - desc="Mask Image (opt, 3D or 4D)", exists=True, argstr="--inMask %s") + desc="Intensity Profile Image", exists=True, argstr="--inIntensity %s" + ) + inMask = File(desc="Mask Image (opt, 3D or 4D)", exists=True, argstr="--inMask %s") incomputed = traits.Enum( "mean", "stdev", "skewness", "kurtosis", desc="computed statistic", - argstr="--incomputed %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + argstr="--incomputed %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outResult = traits.Either( - traits.Bool, - File(), - hash_files=False, - desc="Result", - argstr="--outResult %s") + traits.Bool, File(), hash_files=False, desc="Result", argstr="--outResult %s" + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistLaminarProfileCalculatorOutputSpec(TraitedSpec): @@ -337,64 +343,65 @@ class JistLaminarProfileCalculator(SEMLikeCommandLine): input_spec = JistLaminarProfileCalculatorInputSpec output_spec = JistLaminarProfileCalculatorOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.laminar.JistLaminarProfileCalculator " - _outputs_filenames = {'outResult': 'outResult.nii'} + _outputs_filenames = {"outResult": "outResult.nii"} _redirect_x = True class MedicAlgorithmN3InputSpec(CommandLineInputSpec): inInput = File(desc="Input Volume", exists=True, argstr="--inInput %s") inSignal = traits.Float( - desc= - "Default = min + 1, Values at less than threshold are treated as part of the background", - argstr="--inSignal %f") - inMaximum = traits.Int( - desc="Maximum number of Iterations", argstr="--inMaximum %d") + desc="Default = min + 1, Values at less than threshold are treated as part of the background", + argstr="--inSignal %f", + ) + inMaximum = traits.Int(desc="Maximum number of Iterations", argstr="--inMaximum %d") inEnd = traits.Float( - desc= - "Usually 0.01-0.00001, The measure used to terminate the iterations is the coefficient of variation of change in field estimates between successive iterations.", - argstr="--inEnd %f") + desc="Usually 0.01-0.00001, The measure used to terminate the iterations is the coefficient of variation of change in field estimates between successive iterations.", + argstr="--inEnd %f", + ) inField = traits.Float( - desc= - "Characteristic distance over which the field varies. The distance between adjacent knots in bspline fitting with at least 4 knots going in every dimension. The default in the dialog is one third the distance (resolution * extents) of the smallest dimension.", - argstr="--inField %f") + desc="Characteristic distance over which the field varies. The distance between adjacent knots in bspline fitting with at least 4 knots going in every dimension. The default in the dialog is one third the distance (resolution * extents) of the smallest dimension.", + argstr="--inField %f", + ) inSubsample = traits.Float( - desc= - "Usually between 1-32, The factor by which the data is subsampled to a lower resolution in estimating the slowly varying non-uniformity field. Reduce sampling in the finest sampling direction by the shrink factor.", - argstr="--inSubsample %f") + desc="Usually between 1-32, The factor by which the data is subsampled to a lower resolution in estimating the slowly varying non-uniformity field. Reduce sampling in the finest sampling direction by the shrink factor.", + argstr="--inSubsample %f", + ) inKernel = traits.Float( - desc= - "Usually between 0.05-0.50, Width of deconvolution kernel used to sharpen the histogram. Larger values give faster convergence while smaller values give greater accuracy.", - argstr="--inKernel %f") - inWeiner = traits.Float( - desc="Usually between 0.0-1.0", argstr="--inWeiner %f") + desc="Usually between 0.05-0.50, Width of deconvolution kernel used to sharpen the histogram. Larger values give faster convergence while smaller values give greater accuracy.", + argstr="--inKernel %f", + ) + inWeiner = traits.Float(desc="Usually between 0.0-1.0", argstr="--inWeiner %f") inAutomatic = traits.Enum( "true", "false", - desc= - "If true determines the threshold by histogram analysis. If true a VOI cannot be used and the input threshold is ignored.", - argstr="--inAutomatic %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + desc="If true determines the threshold by histogram analysis. If true a VOI cannot be used and the input threshold is ignored.", + argstr="--inAutomatic %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outInhomogeneity = traits.Either( traits.Bool, File(), hash_files=False, desc="Inhomogeneity Corrected Volume", - argstr="--outInhomogeneity %s") + argstr="--outInhomogeneity %s", + ) outInhomogeneity2 = traits.Either( traits.Bool, File(), hash_files=False, desc="Inhomogeneity Field", - argstr="--outInhomogeneity2 %s") + argstr="--outInhomogeneity2 %s", + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class MedicAlgorithmN3OutputSpec(TraitedSpec): @@ -417,35 +424,33 @@ class MedicAlgorithmN3(SEMLikeCommandLine): output_spec = MedicAlgorithmN3OutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.ece.iacl.plugins.classification.MedicAlgorithmN3 " _outputs_filenames = { - 'outInhomogeneity2': 'outInhomogeneity2.nii', - 'outInhomogeneity': 'outInhomogeneity.nii' + "outInhomogeneity2": "outInhomogeneity2.nii", + "outInhomogeneity": "outInhomogeneity.nii", } _redirect_x = True class JistLaminarROIAveragingInputSpec(CommandLineInputSpec): inIntensity = File( - desc="Intensity Profile Image", exists=True, argstr="--inIntensity %s") + desc="Intensity Profile Image", exists=True, argstr="--inIntensity %s" + ) inROI = File(desc="ROI Mask", exists=True, argstr="--inROI %s") inROI2 = traits.Str(desc="ROI Name", argstr="--inROI2 %s") - inMask = File( - desc="Mask Image (opt, 3D or 4D)", exists=True, argstr="--inMask %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + inMask = File(desc="Mask Image (opt, 3D or 4D)", exists=True, argstr="--inMask %s") + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outROI3 = traits.Either( - traits.Bool, - File(), - hash_files=False, - desc="ROI Average", - argstr="--outROI3 %s") + traits.Bool, File(), hash_files=False, desc="ROI Average", argstr="--outROI3 %s" + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistLaminarROIAveragingOutputSpec(TraitedSpec): @@ -466,152 +471,159 @@ class JistLaminarROIAveraging(SEMLikeCommandLine): input_spec = JistLaminarROIAveragingInputSpec output_spec = JistLaminarROIAveragingOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.laminar.JistLaminarROIAveraging " - _outputs_filenames = {'outROI3': 'outROI3'} + _outputs_filenames = {"outROI3": "outROI3"} _redirect_x = True class MedicAlgorithmLesionToadsInputSpec(CommandLineInputSpec): - inT1_MPRAGE = File( - desc="T1_MPRAGE Image", exists=True, argstr="--inT1_MPRAGE %s") - inT1_SPGR = File( - desc="T1_SPGR Image", exists=True, argstr="--inT1_SPGR %s") + inT1_MPRAGE = File(desc="T1_MPRAGE Image", exists=True, argstr="--inT1_MPRAGE %s") + inT1_SPGR = File(desc="T1_SPGR Image", exists=True, argstr="--inT1_SPGR %s") inFLAIR = File(desc="FLAIR Image", exists=True, argstr="--inFLAIR %s") inAtlas = traits.Enum( - "With Lesion", "No Lesion", desc="Atlas to Use", argstr="--inAtlas %s") + "With Lesion", "No Lesion", desc="Atlas to Use", argstr="--inAtlas %s" + ) inOutput = traits.Enum( "hard segmentation", "hard segmentation+memberships", "cruise inputs", "dura removal inputs", desc="Output images", - argstr="--inOutput %s") + argstr="--inOutput %s", + ) inOutput2 = traits.Enum( "true", "false", - desc= - "Output the hard classification using maximum membership (not neceesarily topologically correct)", - argstr="--inOutput2 %s") + desc="Output the hard classification using maximum membership (not neceesarily topologically correct)", + argstr="--inOutput2 %s", + ) inCorrect = traits.Enum( - "true", - "false", - desc="Correct MR field inhomogeneity.", - argstr="--inCorrect %s") + "true", "false", desc="Correct MR field inhomogeneity.", argstr="--inCorrect %s" + ) inOutput3 = traits.Enum( "true", "false", desc="Output the estimated inhomogeneity field", - argstr="--inOutput3 %s") + argstr="--inOutput3 %s", + ) inAtlas2 = File( - desc="Atlas File - With Lesions", exists=True, argstr="--inAtlas2 %s") + desc="Atlas File - With Lesions", exists=True, argstr="--inAtlas2 %s" + ) inAtlas3 = File( desc="Atlas File - No Lesion - T1 and FLAIR", exists=True, - argstr="--inAtlas3 %s") + argstr="--inAtlas3 %s", + ) inAtlas4 = File( - desc="Atlas File - No Lesion - T1 Only", - exists=True, - argstr="--inAtlas4 %s") + desc="Atlas File - No Lesion - T1 Only", exists=True, argstr="--inAtlas4 %s" + ) inMaximum = traits.Int( - desc= - "Maximum distance from the interventricular WM boundary to downweight the lesion membership to avoid false postives", - argstr="--inMaximum %d") - inMaximum2 = traits.Int( - desc="Maximum Ventircle Distance", argstr="--inMaximum2 %d") + desc="Maximum distance from the interventricular WM boundary to downweight the lesion membership to avoid false postives", + argstr="--inMaximum %d", + ) + inMaximum2 = traits.Int(desc="Maximum Ventircle Distance", argstr="--inMaximum2 %d") inMaximum3 = traits.Int( - desc="Maximum InterVentricular Distance", argstr="--inMaximum3 %d") + desc="Maximum InterVentricular Distance", argstr="--inMaximum3 %d" + ) inInclude = traits.Enum( "true", "false", desc="Include lesion in WM class in hard classification", - argstr="--inInclude %s") + argstr="--inInclude %s", + ) inAtlas5 = traits.Float( desc="Controls the effect of the statistical atlas on the segmentation", - argstr="--inAtlas5 %f") + argstr="--inAtlas5 %f", + ) inSmooting = traits.Float( desc="Controls the effect of neighberhood voxels on the membership", - argstr="--inSmooting %f") + argstr="--inSmooting %f", + ) inMaximum4 = traits.Float( - desc= - "Maximum amount of relative change in the energy function considered as the convergence criteria", - argstr="--inMaximum4 %f") - inMaximum5 = traits.Int( - desc="Maximum iterations", argstr="--inMaximum5 %d") + desc="Maximum amount of relative change in the energy function considered as the convergence criteria", + argstr="--inMaximum4 %f", + ) + inMaximum5 = traits.Int(desc="Maximum iterations", argstr="--inMaximum5 %d") inAtlas6 = traits.Enum( - "rigid", - "multi_fully_affine", - desc="Atlas alignment", - argstr="--inAtlas6 %s") + "rigid", "multi_fully_affine", desc="Atlas alignment", argstr="--inAtlas6 %s" + ) inConnectivity = traits.Enum( "(26,6)", "(6,26)", "(6,18)", "(18,6)", desc="Connectivity (foreground,background)", - argstr="--inConnectivity %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + argstr="--inConnectivity %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outHard = traits.Either( traits.Bool, File(), hash_files=False, desc="Hard segmentation", - argstr="--outHard %s") + argstr="--outHard %s", + ) outHard2 = traits.Either( traits.Bool, File(), hash_files=False, desc="Hard segmentationfrom memberships", - argstr="--outHard2 %s") + argstr="--outHard2 %s", + ) outInhomogeneity = traits.Either( traits.Bool, File(), hash_files=False, desc="Inhomogeneity Field", - argstr="--outInhomogeneity %s") + argstr="--outInhomogeneity %s", + ) outMembership = traits.Either( traits.Bool, File(), hash_files=False, desc="Membership Functions", - argstr="--outMembership %s") + argstr="--outMembership %s", + ) outLesion = traits.Either( traits.Bool, File(), hash_files=False, desc="Lesion Segmentation", - argstr="--outLesion %s") + argstr="--outLesion %s", + ) outSulcal = traits.Either( traits.Bool, File(), hash_files=False, desc="Sulcal CSF Membership", - argstr="--outSulcal %s") + argstr="--outSulcal %s", + ) outCortical = traits.Either( traits.Bool, File(), hash_files=False, desc="Cortical GM Membership", - argstr="--outCortical %s") + argstr="--outCortical %s", + ) outFilled = traits.Either( traits.Bool, File(), hash_files=False, desc="Filled WM Membership", - argstr="--outFilled %s") + argstr="--outFilled %s", + ) outWM = traits.Either( - traits.Bool, - File(), - hash_files=False, - desc="WM Mask", - argstr="--outWM %s") + traits.Bool, File(), hash_files=False, desc="WM Mask", argstr="--outWM %s" + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class MedicAlgorithmLesionToadsOutputSpec(TraitedSpec): @@ -644,68 +656,68 @@ class MedicAlgorithmLesionToads(SEMLikeCommandLine): output_spec = MedicAlgorithmLesionToadsOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.ece.iacl.plugins.classification.MedicAlgorithmLesionToads " _outputs_filenames = { - 'outWM': 'outWM.nii', - 'outHard': 'outHard.nii', - 'outFilled': 'outFilled.nii', - 'outMembership': 'outMembership.nii', - 'outInhomogeneity': 'outInhomogeneity.nii', - 'outCortical': 'outCortical.nii', - 'outHard2': 'outHard2.nii', - 'outLesion': 'outLesion.nii', - 'outSulcal': 'outSulcal.nii' + "outWM": "outWM.nii", + "outHard": "outHard.nii", + "outFilled": "outFilled.nii", + "outMembership": "outMembership.nii", + "outInhomogeneity": "outInhomogeneity.nii", + "outCortical": "outCortical.nii", + "outHard2": "outHard2.nii", + "outLesion": "outLesion.nii", + "outSulcal": "outSulcal.nii", } _redirect_x = True class JistBrainMp2rageSkullStrippingInputSpec(CommandLineInputSpec): inSecond = File( - desc="Second inversion (Inv2) Image", - exists=True, - argstr="--inSecond %s") - inT1 = File( - desc="T1 Map (T1_Images) Image (opt)", exists=True, argstr="--inT1 %s") + desc="Second inversion (Inv2) Image", exists=True, argstr="--inSecond %s" + ) + inT1 = File(desc="T1 Map (T1_Images) Image (opt)", exists=True, argstr="--inT1 %s") inT1weighted = File( - desc="T1-weighted (UNI) Image (opt)", - exists=True, - argstr="--inT1weighted %s") - inFilter = File( - desc="Filter Image (opt)", exists=True, argstr="--inFilter %s") - inSkip = traits.Enum( - "true", "false", desc="Skip zero values", argstr="--inSkip %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + desc="T1-weighted (UNI) Image (opt)", exists=True, argstr="--inT1weighted %s" + ) + inFilter = File(desc="Filter Image (opt)", exists=True, argstr="--inFilter %s") + inSkip = traits.Enum("true", "false", desc="Skip zero values", argstr="--inSkip %s") + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outBrain = traits.Either( traits.Bool, File(), hash_files=False, desc="Brain Mask Image", - argstr="--outBrain %s") + argstr="--outBrain %s", + ) outMasked = traits.Either( traits.Bool, File(), hash_files=False, desc="Masked T1 Map Image", - argstr="--outMasked %s") + argstr="--outMasked %s", + ) outMasked2 = traits.Either( traits.Bool, File(), hash_files=False, desc="Masked T1-weighted Image", - argstr="--outMasked2 %s") + argstr="--outMasked2 %s", + ) outMasked3 = traits.Either( traits.Bool, File(), hash_files=False, desc="Masked Filter Image", - argstr="--outMasked3 %s") + argstr="--outMasked3 %s", + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistBrainMp2rageSkullStrippingOutputSpec(TraitedSpec): @@ -730,24 +742,23 @@ class JistBrainMp2rageSkullStripping(SEMLikeCommandLine): output_spec = JistBrainMp2rageSkullStrippingOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.brain.JistBrainMp2rageSkullStripping " _outputs_filenames = { - 'outBrain': 'outBrain.nii', - 'outMasked3': 'outMasked3.nii', - 'outMasked2': 'outMasked2.nii', - 'outMasked': 'outMasked.nii' + "outBrain": "outBrain.nii", + "outMasked3": "outMasked3.nii", + "outMasked2": "outMasked2.nii", + "outMasked": "outMasked.nii", } _redirect_x = True class JistCortexSurfaceMeshInflationInputSpec(CommandLineInputSpec): - inLevelset = File( - desc="Levelset Image", exists=True, argstr="--inLevelset %s") + inLevelset = File(desc="Levelset Image", exists=True, argstr="--inLevelset %s") inSOR = traits.Float(desc="SOR Parameter", argstr="--inSOR %f") - inMean = traits.Float( - desc="Mean Curvature Threshold", argstr="--inMean %f") + inMean = traits.Float(desc="Mean Curvature Threshold", argstr="--inMean %f") inStep = traits.Int(desc="Step Size", argstr="--inStep %d") inMax = traits.Int(desc="Max Iterations", argstr="--inMax %d") inLorentzian = traits.Enum( - "true", "false", desc="Lorentzian Norm", argstr="--inLorentzian %s") + "true", "false", desc="Lorentzian Norm", argstr="--inLorentzian %s" + ) inTopology = traits.Enum( "26/6", "6/26", @@ -758,29 +769,33 @@ class JistCortexSurfaceMeshInflationInputSpec(CommandLineInputSpec): "wco", "no", desc="Topology", - argstr="--inTopology %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + argstr="--inTopology %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outOriginal = traits.Either( traits.Bool, File(), hash_files=False, desc="Original Surface", - argstr="--outOriginal %s") + argstr="--outOriginal %s", + ) outInflated = traits.Either( traits.Bool, File(), hash_files=False, desc="Inflated Surface", - argstr="--outInflated %s") + argstr="--outInflated %s", + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistCortexSurfaceMeshInflationOutputSpec(TraitedSpec): @@ -805,52 +820,40 @@ class JistCortexSurfaceMeshInflation(SEMLikeCommandLine): input_spec = JistCortexSurfaceMeshInflationInputSpec output_spec = JistCortexSurfaceMeshInflationOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.cortex.JistCortexSurfaceMeshInflation " - _outputs_filenames = { - 'outOriginal': 'outOriginal', - 'outInflated': 'outInflated' - } + _outputs_filenames = {"outOriginal": "outOriginal", "outInflated": "outInflated"} _redirect_x = True class RandomVolInputSpec(CommandLineInputSpec): - inSize = traits.Int( - desc="Size of Volume in X direction", argstr="--inSize %d") - inSize2 = traits.Int( - desc="Size of Volume in Y direction", argstr="--inSize2 %d") - inSize3 = traits.Int( - desc="Size of Volume in Z direction", argstr="--inSize3 %d") - inSize4 = traits.Int( - desc="Size of Volume in t direction", argstr="--inSize4 %d") + inSize = traits.Int(desc="Size of Volume in X direction", argstr="--inSize %d") + inSize2 = traits.Int(desc="Size of Volume in Y direction", argstr="--inSize2 %d") + inSize3 = traits.Int(desc="Size of Volume in Z direction", argstr="--inSize3 %d") + inSize4 = traits.Int(desc="Size of Volume in t direction", argstr="--inSize4 %d") inStandard = traits.Int( - desc="Standard Deviation for Normal Distribution", - argstr="--inStandard %d") + desc="Standard Deviation for Normal Distribution", argstr="--inStandard %d" + ) inLambda = traits.Float( - desc="Lambda Value for Exponential Distribution", - argstr="--inLambda %f") + desc="Lambda Value for Exponential Distribution", argstr="--inLambda %f" + ) inMaximum = traits.Int(desc="Maximum Value", argstr="--inMaximum %d") inMinimum = traits.Int(desc="Minimum Value", argstr="--inMinimum %d") inField = traits.Enum( - "Uniform", - "Normal", - "Exponential", - desc="Field", - argstr="--inField %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + "Uniform", "Normal", "Exponential", desc="Field", argstr="--inField %s" + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outRand1 = traits.Either( - traits.Bool, - File(), - hash_files=False, - desc="Rand1", - argstr="--outRand1 %s") + traits.Bool, File(), hash_files=False, desc="Rand1", argstr="--outRand1 %s" + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class RandomVolOutputSpec(TraitedSpec): @@ -873,7 +876,7 @@ class RandomVol(SEMLikeCommandLine): input_spec = RandomVolInputSpec output_spec = RandomVolOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.bme.smile.demo.RandomVol " - _outputs_filenames = {'outRand1': 'outRand1.nii'} + _outputs_filenames = {"outRand1": "outRand1.nii"} _redirect_x = True @@ -888,23 +891,26 @@ class MedicAlgorithmImageCalculatorInputSpec(CommandLineInputSpec): "Min", "Max", desc="Operation", - argstr="--inOperation %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + argstr="--inOperation %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outResult = traits.Either( traits.Bool, File(), hash_files=False, desc="Result Volume", - argstr="--outResult %s") + argstr="--outResult %s", + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class MedicAlgorithmImageCalculatorOutputSpec(TraitedSpec): @@ -927,44 +933,41 @@ class MedicAlgorithmImageCalculator(SEMLikeCommandLine): input_spec = MedicAlgorithmImageCalculatorInputSpec output_spec = MedicAlgorithmImageCalculatorOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.ece.iacl.plugins.utilities.math.MedicAlgorithmImageCalculator " - _outputs_filenames = {'outResult': 'outResult.nii'} + _outputs_filenames = {"outResult": "outResult.nii"} _redirect_x = True class JistBrainMp2rageDuraEstimationInputSpec(CommandLineInputSpec): inSecond = File( - desc="Second inversion (Inv2) Image", - exists=True, - argstr="--inSecond %s") - inSkull = File( - desc="Skull Stripping Mask", exists=True, argstr="--inSkull %s") + desc="Second inversion (Inv2) Image", exists=True, argstr="--inSecond %s" + ) + inSkull = File(desc="Skull Stripping Mask", exists=True, argstr="--inSkull %s") inDistance = traits.Float( - desc="Distance to background (mm)", argstr="--inDistance %f") + desc="Distance to background (mm)", argstr="--inDistance %f" + ) inoutput = traits.Enum( "dura_region", "boundary", "dura_prior", "bg_prior", "intens_prior", - desc= - "Outputs an estimate of the dura / CSF boundary or an estimate of the entire dura region.", - argstr="--inoutput %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + desc="Outputs an estimate of the dura / CSF boundary or an estimate of the entire dura region.", + argstr="--inoutput %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outDura = traits.Either( - traits.Bool, - File(), - hash_files=False, - desc="Dura Image", - argstr="--outDura %s") + traits.Bool, File(), hash_files=False, desc="Dura Image", argstr="--outDura %s" + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistBrainMp2rageDuraEstimationOutputSpec(TraitedSpec): @@ -985,39 +988,39 @@ class JistBrainMp2rageDuraEstimation(SEMLikeCommandLine): input_spec = JistBrainMp2rageDuraEstimationInputSpec output_spec = JistBrainMp2rageDuraEstimationOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.brain.JistBrainMp2rageDuraEstimation " - _outputs_filenames = {'outDura': 'outDura.nii'} + _outputs_filenames = {"outDura": "outDura.nii"} _redirect_x = True class JistLaminarProfileSamplingInputSpec(CommandLineInputSpec): - inProfile = File( - desc="Profile Surface Image", exists=True, argstr="--inProfile %s") - inIntensity = File( - desc="Intensity Image", exists=True, argstr="--inIntensity %s") - inCortex = File( - desc="Cortex Mask (opt)", exists=True, argstr="--inCortex %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + inProfile = File(desc="Profile Surface Image", exists=True, argstr="--inProfile %s") + inIntensity = File(desc="Intensity Image", exists=True, argstr="--inIntensity %s") + inCortex = File(desc="Cortex Mask (opt)", exists=True, argstr="--inCortex %s") + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outProfilemapped = traits.Either( traits.Bool, File(), hash_files=False, desc="Profile-mapped Intensity Image", - argstr="--outProfilemapped %s") + argstr="--outProfilemapped %s", + ) outProfile2 = traits.Either( traits.Bool, File(), hash_files=False, desc="Profile 4D Mask", - argstr="--outProfile2 %s") + argstr="--outProfile2 %s", + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistLaminarProfileSamplingOutputSpec(TraitedSpec): @@ -1040,15 +1043,14 @@ class JistLaminarProfileSampling(SEMLikeCommandLine): output_spec = JistLaminarProfileSamplingOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.laminar.JistLaminarProfileSampling " _outputs_filenames = { - 'outProfile2': 'outProfile2.nii', - 'outProfilemapped': 'outProfilemapped.nii' + "outProfile2": "outProfile2.nii", + "outProfilemapped": "outProfilemapped.nii", } _redirect_x = True class MedicAlgorithmMipavReorientInputSpec(CommandLineInputSpec): - inSource = InputMultiPath( - File, desc="Source", sep=";", argstr="--inSource %s") + inSource = InputMultiPath(File, desc="Source", sep=";", argstr="--inSource %s") inTemplate = File(desc="Template", exists=True, argstr="--inTemplate %s") inNew = traits.Enum( "Dicom axial", @@ -1056,7 +1058,8 @@ class MedicAlgorithmMipavReorientInputSpec(CommandLineInputSpec): "Dicom sagittal", "User defined", desc="New image orientation", - argstr="--inNew %s") + argstr="--inNew %s", + ) inUser = traits.Enum( "Unknown", "Patient Right to Left", @@ -1066,7 +1069,8 @@ class MedicAlgorithmMipavReorientInputSpec(CommandLineInputSpec): "Patient Inferior to Superior", "Patient Superior to Inferior", desc="User defined X-axis orientation (image left to right)", - argstr="--inUser %s") + argstr="--inUser %s", + ) inUser2 = traits.Enum( "Unknown", "Patient Right to Left", @@ -1076,7 +1080,8 @@ class MedicAlgorithmMipavReorientInputSpec(CommandLineInputSpec): "Patient Inferior to Superior", "Patient Superior to Inferior", desc="User defined Y-axis orientation (image top to bottom)", - argstr="--inUser2 %s") + argstr="--inUser2 %s", + ) inUser3 = traits.Enum( "Unknown", "Patient Right to Left", @@ -1086,14 +1091,16 @@ class MedicAlgorithmMipavReorientInputSpec(CommandLineInputSpec): "Patient Inferior to Superior", "Patient Superior to Inferior", desc="User defined Z-axis orientation (into the screen)", - argstr="--inUser3 %s") + argstr="--inUser3 %s", + ) inUser4 = traits.Enum( "Axial", "Coronal", "Sagittal", "Unknown", desc="User defined Image Orientation", - argstr="--inUser4 %s") + argstr="--inUser4 %s", + ) inInterpolation = traits.Enum( "Nearest Neighbor", "Trilinear", @@ -1104,26 +1111,30 @@ class MedicAlgorithmMipavReorientInputSpec(CommandLineInputSpec): "Heptic Lagrangian", "Windowed Sinc", desc="Interpolation", - argstr="--inInterpolation %s") + argstr="--inInterpolation %s", + ) inResolution = traits.Enum( "Unchanged", "Finest cubic", "Coarsest cubic", "Same as template", desc="Resolution", - argstr="--inResolution %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + argstr="--inResolution %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outReoriented = InputMultiPath( - File, desc="Reoriented Volume", sep=";", argstr="--outReoriented %s") + File, desc="Reoriented Volume", sep=";", argstr="--outReoriented %s" + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class MedicAlgorithmMipavReorientOutputSpec(TraitedSpec): @@ -1150,86 +1161,91 @@ class MedicAlgorithmMipavReorient(SEMLikeCommandLine): class MedicAlgorithmSPECTRE2010InputSpec(CommandLineInputSpec): inInput = File( - desc="Input volume to be skullstripped.", - exists=True, - argstr="--inInput %s") + desc="Input volume to be skullstripped.", exists=True, argstr="--inInput %s" + ) inAtlas = File( - desc= - "SPECTRE atlas description file. A text file enumerating atlas files and landmarks.", + desc="SPECTRE atlas description file. A text file enumerating atlas files and landmarks.", exists=True, - argstr="--inAtlas %s") + argstr="--inAtlas %s", + ) inInitial = traits.Int( - desc= - "Erosion of the inital mask, which is based on the probability mask and the classification., The initial mask is ouput as the d0 volume at the conclusion of SPECTRE.", - argstr="--inInitial %d") + desc="Erosion of the inital mask, which is based on the probability mask and the classification., The initial mask is ouput as the d0 volume at the conclusion of SPECTRE.", + argstr="--inInitial %d", + ) inImage = traits.Enum( "T1_SPGR", "T1_ALT", "T1_MPRAGE", "T2", "FLAIR", - desc= - "Set the image modality. MP-RAGE is recommended for most T1 sequence images.", - argstr="--inImage %s") + desc="Set the image modality. MP-RAGE is recommended for most T1 sequence images.", + argstr="--inImage %s", + ) inOutput = traits.Enum( "true", "false", - desc= - "Determines if the output results are transformed back into the space of the original input image.", - argstr="--inOutput %s") + desc="Determines if the output results are transformed back into the space of the original input image.", + argstr="--inOutput %s", + ) inFind = traits.Enum( - "true", "false", desc="Find Midsaggital Plane", argstr="--inFind %s") + "true", "false", desc="Find Midsaggital Plane", argstr="--inFind %s" + ) inRun = traits.Enum( - "true", "false", desc="Run Smooth Brain Mask", argstr="--inRun %s") + "true", "false", desc="Run Smooth Brain Mask", argstr="--inRun %s" + ) inResample = traits.Enum( "true", "false", - desc= - "Determines if the data is resampled to be isotropic during the processing.", - argstr="--inResample %s") + desc="Determines if the data is resampled to be isotropic during the processing.", + argstr="--inResample %s", + ) inInitial2 = traits.Float( - desc="Initial probability threshold", argstr="--inInitial2 %f") + desc="Initial probability threshold", argstr="--inInitial2 %f" + ) inMinimum = traits.Float( - desc="Minimum probability threshold", argstr="--inMinimum %f") + desc="Minimum probability threshold", argstr="--inMinimum %f" + ) inMMC = traits.Int( - desc= - "The size of the dilation step within the Modified Morphological Closing.", - argstr="--inMMC %d") + desc="The size of the dilation step within the Modified Morphological Closing.", + argstr="--inMMC %d", + ) inMMC2 = traits.Int( - desc= - "The size of the erosion step within the Modified Morphological Closing.", - argstr="--inMMC2 %d") + desc="The size of the erosion step within the Modified Morphological Closing.", + argstr="--inMMC2 %d", + ) inInhomogeneity = traits.Enum( "true", "false", - desc= - "Set to false by default, this parameter will make FANTASM try to do inhomogeneity correction during it's iterative cycle.", - argstr="--inInhomogeneity %s") + desc="Set to false by default, this parameter will make FANTASM try to do inhomogeneity correction during it's iterative cycle.", + argstr="--inInhomogeneity %s", + ) inSmoothing = traits.Float(argstr="--inSmoothing %f") inBackground = traits.Float(argstr="--inBackground %f") inOutput2 = traits.Enum( - "true", "false", desc="Output Plane?", argstr="--inOutput2 %s") + "true", "false", desc="Output Plane?", argstr="--inOutput2 %s" + ) inOutput3 = traits.Enum( - "true", "false", desc="Output Split-Halves?", argstr="--inOutput3 %s") + "true", "false", desc="Output Split-Halves?", argstr="--inOutput3 %s" + ) inOutput4 = traits.Enum( - "true", - "false", - desc="Output Segmentation on Plane?", - argstr="--inOutput4 %s") + "true", "false", desc="Output Segmentation on Plane?", argstr="--inOutput4 %s" + ) inDegrees = traits.Enum( "Rigid - 6", "Global rescale - 7", "Specific rescale - 9", "Affine - 12", desc="Degrees of freedom", - argstr="--inDegrees %s") + argstr="--inDegrees %s", + ) inCost = traits.Enum( "Correlation ratio", "Least squares", "Normalized cross correlation", "Normalized mutual information", desc="Cost function", - argstr="--inCost %s") + argstr="--inCost %s", + ) inRegistration = traits.Enum( "Trilinear", "Bspline 3rd order", @@ -1239,7 +1255,8 @@ class MedicAlgorithmSPECTRE2010InputSpec(CommandLineInputSpec): "Heptic Lagrangian", "Windowed sinc", desc="Registration interpolation", - argstr="--inRegistration %s") + argstr="--inRegistration %s", + ) inOutput5 = traits.Enum( "Trilinear", "Bspline 3rd order", @@ -1250,137 +1267,145 @@ class MedicAlgorithmSPECTRE2010InputSpec(CommandLineInputSpec): "Windowed sinc", "Nearest Neighbor", desc="Output interpolation", - argstr="--inOutput5 %s") + argstr="--inOutput5 %s", + ) inApply = traits.Enum( - "All", "X", "Y", "Z", desc="Apply rotation", argstr="--inApply %s") + "All", "X", "Y", "Z", desc="Apply rotation", argstr="--inApply %s" + ) inMinimum2 = traits.Float(desc="Minimum angle", argstr="--inMinimum2 %f") inMaximum = traits.Float(desc="Maximum angle", argstr="--inMaximum %f") - inCoarse = traits.Float( - desc="Coarse angle increment", argstr="--inCoarse %f") + inCoarse = traits.Float(desc="Coarse angle increment", argstr="--inCoarse %f") inFine = traits.Float(desc="Fine angle increment", argstr="--inFine %f") inMultiple = traits.Int( - desc="Multiple of tolerance to bracket the minimum", - argstr="--inMultiple %d") + desc="Multiple of tolerance to bracket the minimum", argstr="--inMultiple %d" + ) inNumber = traits.Int(desc="Number of iterations", argstr="--inNumber %d") inNumber2 = traits.Int( - desc="Number of minima from Level 8 to test at Level 4", - argstr="--inNumber2 %d") + desc="Number of minima from Level 8 to test at Level 4", argstr="--inNumber2 %d" + ) inUse = traits.Enum( "true", "false", - desc= - "Use the max of the min resolutions of the two datasets when resampling", - argstr="--inUse %s") + desc="Use the max of the min resolutions of the two datasets when resampling", + argstr="--inUse %s", + ) inSubsample = traits.Enum( - "true", - "false", - desc="Subsample image for speed", - argstr="--inSubsample %s") + "true", "false", desc="Subsample image for speed", argstr="--inSubsample %s" + ) inSkip = traits.Enum( "true", "false", desc="Skip multilevel search (Assume images are close to alignment)", - argstr="--inSkip %s") + argstr="--inSkip %s", + ) inMultithreading = traits.Enum( "true", "false", - desc= - "Set to false by default, this parameter controls the multithreaded behavior of the linear registration.", - argstr="--inMultithreading %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + desc="Set to false by default, this parameter controls the multithreaded behavior of the linear registration.", + argstr="--inMultithreading %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outOriginal = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "If Output in Original Space Flag is true then outputs the original input volume. Otherwise outputs the axialy reoriented input volume.", - argstr="--outOriginal %s") + desc="If Output in Original Space Flag is true then outputs the original input volume. Otherwise outputs the axialy reoriented input volume.", + argstr="--outOriginal %s", + ) outStripped = traits.Either( traits.Bool, File(), hash_files=False, desc="Skullstripped result of the input volume with just the brain.", - argstr="--outStripped %s") + argstr="--outStripped %s", + ) outMask = traits.Either( traits.Bool, File(), hash_files=False, desc="Binary Mask of the skullstripped result with just the brain", - argstr="--outMask %s") + argstr="--outMask %s", + ) outPrior = traits.Either( traits.Bool, File(), hash_files=False, desc="Probability prior from the atlas registrations", - argstr="--outPrior %s") + argstr="--outPrior %s", + ) outFANTASM = traits.Either( traits.Bool, File(), hash_files=False, desc="Tissue classification of of the whole input volume.", - argstr="--outFANTASM %s") + argstr="--outFANTASM %s", + ) outd0 = traits.Either( traits.Bool, File(), hash_files=False, desc="Initial Brainmask", - argstr="--outd0 %s") + argstr="--outd0 %s", + ) outMidsagittal = traits.Either( traits.Bool, File(), hash_files=False, desc="Plane dividing the brain hemispheres", - argstr="--outMidsagittal %s") + argstr="--outMidsagittal %s", + ) outSplitHalves = traits.Either( traits.Bool, File(), hash_files=False, desc="Skullstripped mask of the brain with the hemispheres divided.", - argstr="--outSplitHalves %s") + argstr="--outSplitHalves %s", + ) outSegmentation = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "2D image showing the tissue classification on the midsagittal plane", - argstr="--outSegmentation %s") + desc="2D image showing the tissue classification on the midsagittal plane", + argstr="--outSegmentation %s", + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class MedicAlgorithmSPECTRE2010OutputSpec(TraitedSpec): outOriginal = File( - desc= - "If Output in Original Space Flag is true then outputs the original input volume. Otherwise outputs the axialy reoriented input volume.", - exists=True) + desc="If Output in Original Space Flag is true then outputs the original input volume. Otherwise outputs the axialy reoriented input volume.", + exists=True, + ) outStripped = File( desc="Skullstripped result of the input volume with just the brain.", - exists=True) + exists=True, + ) outMask = File( - desc="Binary Mask of the skullstripped result with just the brain", - exists=True) - outPrior = File( - desc="Probability prior from the atlas registrations", exists=True) + desc="Binary Mask of the skullstripped result with just the brain", exists=True + ) + outPrior = File(desc="Probability prior from the atlas registrations", exists=True) outFANTASM = File( - desc="Tissue classification of of the whole input volume.", - exists=True) + desc="Tissue classification of of the whole input volume.", exists=True + ) outd0 = File(desc="Initial Brainmask", exists=True) - outMidsagittal = File( - desc="Plane dividing the brain hemispheres", exists=True) + outMidsagittal = File(desc="Plane dividing the brain hemispheres", exists=True) outSplitHalves = File( desc="Skullstripped mask of the brain with the hemispheres divided.", - exists=True) + exists=True, + ) outSegmentation = File( - desc= - "2D image showing the tissue classification on the midsagittal plane", - exists=True) + desc="2D image showing the tissue classification on the midsagittal plane", + exists=True, + ) class MedicAlgorithmSPECTRE2010(SEMLikeCommandLine): @@ -1409,15 +1434,15 @@ class MedicAlgorithmSPECTRE2010(SEMLikeCommandLine): output_spec = MedicAlgorithmSPECTRE2010OutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.ece.iacl.plugins.segmentation.skull_strip.MedicAlgorithmSPECTRE2010 " _outputs_filenames = { - 'outd0': 'outd0.nii', - 'outOriginal': 'outOriginal.nii', - 'outMask': 'outMask.nii', - 'outSplitHalves': 'outSplitHalves.nii', - 'outMidsagittal': 'outMidsagittal.nii', - 'outPrior': 'outPrior.nii', - 'outFANTASM': 'outFANTASM.nii', - 'outSegmentation': 'outSegmentation.nii', - 'outStripped': 'outStripped.nii' + "outd0": "outd0.nii", + "outOriginal": "outOriginal.nii", + "outMask": "outMask.nii", + "outSplitHalves": "outSplitHalves.nii", + "outMidsagittal": "outMidsagittal.nii", + "outPrior": "outPrior.nii", + "outFANTASM": "outFANTASM.nii", + "outSegmentation": "outSegmentation.nii", + "outStripped": "outStripped.nii", } _redirect_x = True @@ -1428,27 +1453,30 @@ class JistBrainPartialVolumeFilterInputSpec(CommandLineInputSpec): "bright", "dark", "both", - desc= - "Outputs the raw intensity values or a probability score for the partial volume regions.", - argstr="--inPV %s") + desc="Outputs the raw intensity values or a probability score for the partial volume regions.", + argstr="--inPV %s", + ) inoutput = traits.Enum( - "probability", "intensity", desc="output", argstr="--inoutput %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + "probability", "intensity", desc="output", argstr="--inoutput %s" + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outPartial = traits.Either( traits.Bool, File(), hash_files=False, desc="Partial Volume Image", - argstr="--outPartial %s") + argstr="--outPartial %s", + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistBrainPartialVolumeFilterOutputSpec(TraitedSpec): @@ -1469,71 +1497,74 @@ class JistBrainPartialVolumeFilter(SEMLikeCommandLine): input_spec = JistBrainPartialVolumeFilterInputSpec output_spec = JistBrainPartialVolumeFilterOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.brain.JistBrainPartialVolumeFilter " - _outputs_filenames = {'outPartial': 'outPartial.nii'} + _outputs_filenames = {"outPartial": "outPartial.nii"} _redirect_x = True class JistIntensityMp2rageMaskingInputSpec(CommandLineInputSpec): inSecond = File( - desc="Second inversion (Inv2) Image", - exists=True, - argstr="--inSecond %s") + desc="Second inversion (Inv2) Image", exists=True, argstr="--inSecond %s" + ) inQuantitative = File( desc="Quantitative T1 Map (T1_Images) Image", exists=True, - argstr="--inQuantitative %s") + argstr="--inQuantitative %s", + ) inT1weighted = File( - desc="T1-weighted (UNI) Image", - exists=True, - argstr="--inT1weighted %s") + desc="T1-weighted (UNI) Image", exists=True, argstr="--inT1weighted %s" + ) inBackground = traits.Enum( "exponential", "half-normal", - desc= - "Model distribution for background noise (default is half-normal, exponential is more stringent).", - argstr="--inBackground %s") - inSkip = traits.Enum( - "true", "false", desc="Skip zero values", argstr="--inSkip %s") + desc="Model distribution for background noise (default is half-normal, exponential is more stringent).", + argstr="--inBackground %s", + ) + inSkip = traits.Enum("true", "false", desc="Skip zero values", argstr="--inSkip %s") inMasking = traits.Enum( "binary", "proba", - desc= - "Whether to use a binary threshold or a weighted average based on the probability.", - argstr="--inMasking %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + desc="Whether to use a binary threshold or a weighted average based on the probability.", + argstr="--inMasking %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outSignal = traits.Either( traits.Bool, File(), hash_files=False, desc="Signal Proba Image", - argstr="--outSignal_Proba %s") + argstr="--outSignal_Proba %s", + ) outSignal2 = traits.Either( traits.Bool, File(), hash_files=False, desc="Signal Mask Image", - argstr="--outSignal_Mask %s") + argstr="--outSignal_Mask %s", + ) outMasked = traits.Either( traits.Bool, File(), hash_files=False, desc="Masked T1 Map Image", - argstr="--outMasked_T1_Map %s") + argstr="--outMasked_T1_Map %s", + ) outMasked2 = traits.Either( traits.Bool, File(), hash_files=False, desc="Masked Iso Image", - argstr="--outMasked_T1weighted %s") + argstr="--outMasked_T1weighted %s", + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class JistIntensityMp2rageMaskingOutputSpec(TraitedSpec): @@ -1558,38 +1589,38 @@ class JistIntensityMp2rageMasking(SEMLikeCommandLine): output_spec = JistIntensityMp2rageMaskingOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.intensity.JistIntensityMp2rageMasking " _outputs_filenames = { - 'outSignal2': 'outSignal2.nii', - 'outSignal': 'outSignal.nii', - 'outMasked2': 'outMasked2.nii', - 'outMasked': 'outMasked.nii' + "outSignal2": "outSignal2.nii", + "outSignal": "outSignal.nii", + "outMasked2": "outMasked2.nii", + "outMasked": "outMasked.nii", } _redirect_x = True class MedicAlgorithmThresholdToBinaryMaskInputSpec(CommandLineInputSpec): - inLabel = InputMultiPath( - File, desc="Input volumes", sep=";", argstr="--inLabel %s") - inMinimum = traits.Float( - desc="Minimum threshold value.", argstr="--inMinimum %f") - inMaximum = traits.Float( - desc="Maximum threshold value.", argstr="--inMaximum %f") + inLabel = InputMultiPath(File, desc="Input volumes", sep=";", argstr="--inLabel %s") + inMinimum = traits.Float(desc="Minimum threshold value.", argstr="--inMinimum %f") + inMaximum = traits.Float(desc="Maximum threshold value.", argstr="--inMaximum %f") inUse = traits.Enum( "true", "false", desc="Use the images max intensity as the max value of the range.", - argstr="--inUse %s") - xPrefExt = traits.Enum( - "nrrd", desc="Output File Type", argstr="--xPrefExt %s") + argstr="--inUse %s", + ) + xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outBinary = InputMultiPath( - File, desc="Binary Mask", sep=";", argstr="--outBinary %s") + File, desc="Binary Mask", sep=";", argstr="--outBinary %s" + ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( - desc="Set default maximum heap size", argstr="-xDefaultMem %d") + desc="Set default maximum heap size", argstr="-xDefaultMem %d" + ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", - usedefault=True) + usedefault=True, + ) class MedicAlgorithmThresholdToBinaryMaskOutputSpec(TraitedSpec): diff --git a/nipype/interfaces/mipav/generate_classes.py b/nipype/interfaces/mipav/generate_classes.py index df48a2d2da..55f0f6a5db 100644 --- a/nipype/interfaces/mipav/generate_classes.py +++ b/nipype/interfaces/mipav/generate_classes.py @@ -7,49 +7,54 @@ # every tool in the modules list must be found on the default path # AND calling the module with --xml must be supported and compliant. modules_list = [ - 'edu.jhu.bme.smile.demo.RandomVol', - 'de.mpg.cbs.jist.laminar.JistLaminarProfileCalculator', - 'de.mpg.cbs.jist.laminar.JistLaminarProfileSampling', - 'de.mpg.cbs.jist.laminar.JistLaminarROIAveraging', - 'de.mpg.cbs.jist.laminar.JistLaminarVolumetricLayering', - 'de.mpg.cbs.jist.laminar.JistLaminarProfileGeometry', - 'de.mpg.cbs.jist.brain.JistBrainMgdmSegmentation', - 'de.mpg.cbs.jist.brain.JistBrainMp2rageSkullStripping', - 'de.mpg.cbs.jist.brain.JistBrainPartialVolumeFilter', - 'de.mpg.cbs.jist.brain.JistBrainMp2rageDuraEstimation' + "edu.jhu.bme.smile.demo.RandomVol", + "de.mpg.cbs.jist.laminar.JistLaminarProfileCalculator", + "de.mpg.cbs.jist.laminar.JistLaminarProfileSampling", + "de.mpg.cbs.jist.laminar.JistLaminarROIAveraging", + "de.mpg.cbs.jist.laminar.JistLaminarVolumetricLayering", + "de.mpg.cbs.jist.laminar.JistLaminarProfileGeometry", + "de.mpg.cbs.jist.brain.JistBrainMgdmSegmentation", + "de.mpg.cbs.jist.brain.JistBrainMp2rageSkullStripping", + "de.mpg.cbs.jist.brain.JistBrainPartialVolumeFilter", + "de.mpg.cbs.jist.brain.JistBrainMp2rageDuraEstimation", ] modules_from_chris = [ - 'edu.jhu.ece.iacl.plugins.segmentation.skull_strip.MedicAlgorithmSPECTRE2010', - 'edu.jhu.ece.iacl.plugins.utilities.volume.MedicAlgorithmMipavReorient', - 'edu.jhu.ece.iacl.plugins.utilities.math.MedicAlgorithmImageCalculator', - 'de.mpg.cbs.jist.brain.JistBrainMp2rageDuraEstimation', - 'de.mpg.cbs.jist.brain.JistBrainPartialVolumeFilter', - 'edu.jhu.ece.iacl.plugins.utilities.volume.MedicAlgorithmThresholdToBinaryMask', + "edu.jhu.ece.iacl.plugins.segmentation.skull_strip.MedicAlgorithmSPECTRE2010", + "edu.jhu.ece.iacl.plugins.utilities.volume.MedicAlgorithmMipavReorient", + "edu.jhu.ece.iacl.plugins.utilities.math.MedicAlgorithmImageCalculator", + "de.mpg.cbs.jist.brain.JistBrainMp2rageDuraEstimation", + "de.mpg.cbs.jist.brain.JistBrainPartialVolumeFilter", + "edu.jhu.ece.iacl.plugins.utilities.volume.MedicAlgorithmThresholdToBinaryMask", # 'de.mpg.cbs.jist.cortex.JistCortexFullCRUISE', # waiting for http://www.nitrc.org/tracker/index.php?func=detail&aid=7236&group_id=228&atid=942 to be fixed - 'de.mpg.cbs.jist.cortex.JistCortexSurfaceMeshInflation' + "de.mpg.cbs.jist.cortex.JistCortexSurfaceMeshInflation", ] modules_from_julia = [ - 'de.mpg.cbs.jist.intensity.JistIntensityMp2rageMasking', - 'edu.jhu.ece.iacl.plugins.segmentation.skull_strip.MedicAlgorithmSPECTRE2010' + "de.mpg.cbs.jist.intensity.JistIntensityMp2rageMasking", + "edu.jhu.ece.iacl.plugins.segmentation.skull_strip.MedicAlgorithmSPECTRE2010", ] modules_from_leonie = [ - 'edu.jhu.ece.iacl.plugins.classification.MedicAlgorithmLesionToads' + "edu.jhu.ece.iacl.plugins.classification.MedicAlgorithmLesionToads" ] modules_from_yasinyazici = [ - 'edu.jhu.ece.iacl.plugins.classification.MedicAlgorithmN3' + "edu.jhu.ece.iacl.plugins.classification.MedicAlgorithmN3" ] modules_list = list( - set(modules_list).union(modules_from_chris).union(modules_from_leonie) - .union(modules_from_julia).union(modules_from_yasinyazici).union( - modules_list)) + set(modules_list) + .union(modules_from_chris) + .union(modules_from_leonie) + .union(modules_from_julia) + .union(modules_from_yasinyazici) + .union(modules_list) + ) generate_all_classes( modules_list=modules_list, launcher=["java edu.jhu.ece.iacl.jist.cli.run"], redirect_x=True, - mipav_hacks=True) + mipav_hacks=True, + ) diff --git a/nipype/interfaces/mipav/tests/test_auto_JistBrainMgdmSegmentation.py b/nipype/interfaces/mipav/tests/test_auto_JistBrainMgdmSegmentation.py index 70ff508dfb..d97c5d904b 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistBrainMgdmSegmentation.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistBrainMgdmSegmentation.py @@ -4,80 +4,46 @@ def test_JistBrainMgdmSegmentation_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inAdjust=dict(argstr='--inAdjust %s', ), - inAtlas=dict( - argstr='--inAtlas %s', - extensions=None, - ), - inCompute=dict(argstr='--inCompute %s', ), - inCurvature=dict(argstr='--inCurvature %f', ), - inData=dict(argstr='--inData %f', ), - inFLAIR=dict( - argstr='--inFLAIR %s', - extensions=None, - ), - inMP2RAGE=dict( - argstr='--inMP2RAGE %s', - extensions=None, - ), - inMP2RAGE2=dict( - argstr='--inMP2RAGE2 %s', - extensions=None, - ), - inMPRAGE=dict( - argstr='--inMPRAGE %s', - extensions=None, - ), - inMax=dict(argstr='--inMax %d', ), - inMin=dict(argstr='--inMin %f', ), - inOutput=dict(argstr='--inOutput %s', ), - inPV=dict( - argstr='--inPV %s', - extensions=None, - ), - inPosterior=dict(argstr='--inPosterior %f', ), - inSteps=dict(argstr='--inSteps %d', ), - inTopology=dict(argstr='--inTopology %s', ), - null=dict(argstr='--null %s', ), - outLevelset=dict( - argstr='--outLevelset %s', - hash_files=False, - ), - outPosterior2=dict( - argstr='--outPosterior2 %s', - hash_files=False, - ), - outPosterior3=dict( - argstr='--outPosterior3 %s', - hash_files=False, - ), - outSegmented=dict( - argstr='--outSegmented %s', - hash_files=False, - ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), - xMaxProcess=dict( - argstr='-xMaxProcess %d', - usedefault=True, - ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inAdjust=dict(argstr="--inAdjust %s",), + inAtlas=dict(argstr="--inAtlas %s", extensions=None,), + inCompute=dict(argstr="--inCompute %s",), + inCurvature=dict(argstr="--inCurvature %f",), + inData=dict(argstr="--inData %f",), + inFLAIR=dict(argstr="--inFLAIR %s", extensions=None,), + inMP2RAGE=dict(argstr="--inMP2RAGE %s", extensions=None,), + inMP2RAGE2=dict(argstr="--inMP2RAGE2 %s", extensions=None,), + inMPRAGE=dict(argstr="--inMPRAGE %s", extensions=None,), + inMax=dict(argstr="--inMax %d",), + inMin=dict(argstr="--inMin %f",), + inOutput=dict(argstr="--inOutput %s",), + inPV=dict(argstr="--inPV %s", extensions=None,), + inPosterior=dict(argstr="--inPosterior %f",), + inSteps=dict(argstr="--inSteps %d",), + inTopology=dict(argstr="--inTopology %s",), + null=dict(argstr="--null %s",), + outLevelset=dict(argstr="--outLevelset %s", hash_files=False,), + outPosterior2=dict(argstr="--outPosterior2 %s", hash_files=False,), + outPosterior3=dict(argstr="--outPosterior3 %s", hash_files=False,), + outSegmented=dict(argstr="--outSegmented %s", hash_files=False,), + xDefaultMem=dict(argstr="-xDefaultMem %d",), + xMaxProcess=dict(argstr="-xMaxProcess %d", usedefault=True,), + xPrefExt=dict(argstr="--xPrefExt %s",), ) inputs = JistBrainMgdmSegmentation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistBrainMgdmSegmentation_outputs(): output_map = dict( - outLevelset=dict(extensions=None, ), - outPosterior2=dict(extensions=None, ), - outPosterior3=dict(extensions=None, ), - outSegmented=dict(extensions=None, ), + outLevelset=dict(extensions=None,), + outPosterior2=dict(extensions=None,), + outPosterior3=dict(extensions=None,), + outSegmented=dict(extensions=None,), ) outputs = JistBrainMgdmSegmentation.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_JistBrainMp2rageDuraEstimation.py b/nipype/interfaces/mipav/tests/test_auto_JistBrainMp2rageDuraEstimation.py index 3fcbeaf418..fa55aa0d75 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistBrainMp2rageDuraEstimation.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistBrainMp2rageDuraEstimation.py @@ -4,40 +4,27 @@ def test_JistBrainMp2rageDuraEstimation_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inDistance=dict(argstr='--inDistance %f', ), - inSecond=dict( - argstr='--inSecond %s', - extensions=None, - ), - inSkull=dict( - argstr='--inSkull %s', - extensions=None, - ), - inoutput=dict(argstr='--inoutput %s', ), - null=dict(argstr='--null %s', ), - outDura=dict( - argstr='--outDura %s', - hash_files=False, - ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), - xMaxProcess=dict( - argstr='-xMaxProcess %d', - usedefault=True, - ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inDistance=dict(argstr="--inDistance %f",), + inSecond=dict(argstr="--inSecond %s", extensions=None,), + inSkull=dict(argstr="--inSkull %s", extensions=None,), + inoutput=dict(argstr="--inoutput %s",), + null=dict(argstr="--null %s",), + outDura=dict(argstr="--outDura %s", hash_files=False,), + xDefaultMem=dict(argstr="-xDefaultMem %d",), + xMaxProcess=dict(argstr="-xMaxProcess %d", usedefault=True,), + xPrefExt=dict(argstr="--xPrefExt %s",), ) inputs = JistBrainMp2rageDuraEstimation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistBrainMp2rageDuraEstimation_outputs(): - output_map = dict(outDura=dict(extensions=None, ), ) + output_map = dict(outDura=dict(extensions=None,),) outputs = JistBrainMp2rageDuraEstimation.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mipav/tests/test_auto_JistBrainMp2rageSkullStripping.py b/nipype/interfaces/mipav/tests/test_auto_JistBrainMp2rageSkullStripping.py index 378a5eb7bb..5f4a6eb616 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistBrainMp2rageSkullStripping.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistBrainMp2rageSkullStripping.py @@ -4,63 +4,35 @@ def test_JistBrainMp2rageSkullStripping_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inFilter=dict( - argstr='--inFilter %s', - extensions=None, - ), - inSecond=dict( - argstr='--inSecond %s', - extensions=None, - ), - inSkip=dict(argstr='--inSkip %s', ), - inT1=dict( - argstr='--inT1 %s', - extensions=None, - ), - inT1weighted=dict( - argstr='--inT1weighted %s', - extensions=None, - ), - null=dict(argstr='--null %s', ), - outBrain=dict( - argstr='--outBrain %s', - hash_files=False, - ), - outMasked=dict( - argstr='--outMasked %s', - hash_files=False, - ), - outMasked2=dict( - argstr='--outMasked2 %s', - hash_files=False, - ), - outMasked3=dict( - argstr='--outMasked3 %s', - hash_files=False, - ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), - xMaxProcess=dict( - argstr='-xMaxProcess %d', - usedefault=True, - ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inFilter=dict(argstr="--inFilter %s", extensions=None,), + inSecond=dict(argstr="--inSecond %s", extensions=None,), + inSkip=dict(argstr="--inSkip %s",), + inT1=dict(argstr="--inT1 %s", extensions=None,), + inT1weighted=dict(argstr="--inT1weighted %s", extensions=None,), + null=dict(argstr="--null %s",), + outBrain=dict(argstr="--outBrain %s", hash_files=False,), + outMasked=dict(argstr="--outMasked %s", hash_files=False,), + outMasked2=dict(argstr="--outMasked2 %s", hash_files=False,), + outMasked3=dict(argstr="--outMasked3 %s", hash_files=False,), + xDefaultMem=dict(argstr="-xDefaultMem %d",), + xMaxProcess=dict(argstr="-xMaxProcess %d", usedefault=True,), + xPrefExt=dict(argstr="--xPrefExt %s",), ) inputs = JistBrainMp2rageSkullStripping.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistBrainMp2rageSkullStripping_outputs(): output_map = dict( - outBrain=dict(extensions=None, ), - outMasked=dict(extensions=None, ), - outMasked2=dict(extensions=None, ), - outMasked3=dict(extensions=None, ), + outBrain=dict(extensions=None,), + outMasked=dict(extensions=None,), + outMasked2=dict(extensions=None,), + outMasked3=dict(extensions=None,), ) outputs = JistBrainMp2rageSkullStripping.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_JistBrainPartialVolumeFilter.py b/nipype/interfaces/mipav/tests/test_auto_JistBrainPartialVolumeFilter.py index 229d58ec2b..016b18f1e7 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistBrainPartialVolumeFilter.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistBrainPartialVolumeFilter.py @@ -4,36 +4,26 @@ def test_JistBrainPartialVolumeFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inInput=dict( - argstr='--inInput %s', - extensions=None, - ), - inPV=dict(argstr='--inPV %s', ), - inoutput=dict(argstr='--inoutput %s', ), - null=dict(argstr='--null %s', ), - outPartial=dict( - argstr='--outPartial %s', - hash_files=False, - ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), - xMaxProcess=dict( - argstr='-xMaxProcess %d', - usedefault=True, - ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inInput=dict(argstr="--inInput %s", extensions=None,), + inPV=dict(argstr="--inPV %s",), + inoutput=dict(argstr="--inoutput %s",), + null=dict(argstr="--null %s",), + outPartial=dict(argstr="--outPartial %s", hash_files=False,), + xDefaultMem=dict(argstr="-xDefaultMem %d",), + xMaxProcess=dict(argstr="-xMaxProcess %d", usedefault=True,), + xPrefExt=dict(argstr="--xPrefExt %s",), ) inputs = JistBrainPartialVolumeFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistBrainPartialVolumeFilter_outputs(): - output_map = dict(outPartial=dict(extensions=None, ), ) + output_map = dict(outPartial=dict(extensions=None,),) outputs = JistBrainPartialVolumeFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mipav/tests/test_auto_JistCortexSurfaceMeshInflation.py b/nipype/interfaces/mipav/tests/test_auto_JistCortexSurfaceMeshInflation.py index 2790fa75b4..88ae68161c 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistCortexSurfaceMeshInflation.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistCortexSurfaceMeshInflation.py @@ -4,46 +4,32 @@ def test_JistCortexSurfaceMeshInflation_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inLevelset=dict( - argstr='--inLevelset %s', - extensions=None, - ), - inLorentzian=dict(argstr='--inLorentzian %s', ), - inMax=dict(argstr='--inMax %d', ), - inMean=dict(argstr='--inMean %f', ), - inSOR=dict(argstr='--inSOR %f', ), - inStep=dict(argstr='--inStep %d', ), - inTopology=dict(argstr='--inTopology %s', ), - null=dict(argstr='--null %s', ), - outInflated=dict( - argstr='--outInflated %s', - hash_files=False, - ), - outOriginal=dict( - argstr='--outOriginal %s', - hash_files=False, - ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), - xMaxProcess=dict( - argstr='-xMaxProcess %d', - usedefault=True, - ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inLevelset=dict(argstr="--inLevelset %s", extensions=None,), + inLorentzian=dict(argstr="--inLorentzian %s",), + inMax=dict(argstr="--inMax %d",), + inMean=dict(argstr="--inMean %f",), + inSOR=dict(argstr="--inSOR %f",), + inStep=dict(argstr="--inStep %d",), + inTopology=dict(argstr="--inTopology %s",), + null=dict(argstr="--null %s",), + outInflated=dict(argstr="--outInflated %s", hash_files=False,), + outOriginal=dict(argstr="--outOriginal %s", hash_files=False,), + xDefaultMem=dict(argstr="-xDefaultMem %d",), + xMaxProcess=dict(argstr="-xMaxProcess %d", usedefault=True,), + xPrefExt=dict(argstr="--xPrefExt %s",), ) inputs = JistCortexSurfaceMeshInflation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistCortexSurfaceMeshInflation_outputs(): output_map = dict( - outInflated=dict(extensions=None, ), - outOriginal=dict(extensions=None, ), + outInflated=dict(extensions=None,), outOriginal=dict(extensions=None,), ) outputs = JistCortexSurfaceMeshInflation.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_JistIntensityMp2rageMasking.py b/nipype/interfaces/mipav/tests/test_auto_JistIntensityMp2rageMasking.py index ae294ff2d1..aaab939380 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistIntensityMp2rageMasking.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistIntensityMp2rageMasking.py @@ -4,61 +4,36 @@ def test_JistIntensityMp2rageMasking_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inBackground=dict(argstr='--inBackground %s', ), - inMasking=dict(argstr='--inMasking %s', ), - inQuantitative=dict( - argstr='--inQuantitative %s', - extensions=None, - ), - inSecond=dict( - argstr='--inSecond %s', - extensions=None, - ), - inSkip=dict(argstr='--inSkip %s', ), - inT1weighted=dict( - argstr='--inT1weighted %s', - extensions=None, - ), - null=dict(argstr='--null %s', ), - outMasked=dict( - argstr='--outMasked_T1_Map %s', - hash_files=False, - ), - outMasked2=dict( - argstr='--outMasked_T1weighted %s', - hash_files=False, - ), - outSignal=dict( - argstr='--outSignal_Proba %s', - hash_files=False, - ), - outSignal2=dict( - argstr='--outSignal_Mask %s', - hash_files=False, - ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), - xMaxProcess=dict( - argstr='-xMaxProcess %d', - usedefault=True, - ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inBackground=dict(argstr="--inBackground %s",), + inMasking=dict(argstr="--inMasking %s",), + inQuantitative=dict(argstr="--inQuantitative %s", extensions=None,), + inSecond=dict(argstr="--inSecond %s", extensions=None,), + inSkip=dict(argstr="--inSkip %s",), + inT1weighted=dict(argstr="--inT1weighted %s", extensions=None,), + null=dict(argstr="--null %s",), + outMasked=dict(argstr="--outMasked_T1_Map %s", hash_files=False,), + outMasked2=dict(argstr="--outMasked_T1weighted %s", hash_files=False,), + outSignal=dict(argstr="--outSignal_Proba %s", hash_files=False,), + outSignal2=dict(argstr="--outSignal_Mask %s", hash_files=False,), + xDefaultMem=dict(argstr="-xDefaultMem %d",), + xMaxProcess=dict(argstr="-xMaxProcess %d", usedefault=True,), + xPrefExt=dict(argstr="--xPrefExt %s",), ) inputs = JistIntensityMp2rageMasking.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistIntensityMp2rageMasking_outputs(): output_map = dict( - outMasked=dict(extensions=None, ), - outMasked2=dict(extensions=None, ), - outSignal=dict(extensions=None, ), - outSignal2=dict(extensions=None, ), + outMasked=dict(extensions=None,), + outMasked2=dict(extensions=None,), + outSignal=dict(extensions=None,), + outSignal2=dict(extensions=None,), ) outputs = JistIntensityMp2rageMasking.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileCalculator.py b/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileCalculator.py index a9695e39ce..d8447b9773 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileCalculator.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileCalculator.py @@ -4,39 +4,26 @@ def test_JistLaminarProfileCalculator_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inIntensity=dict( - argstr='--inIntensity %s', - extensions=None, - ), - inMask=dict( - argstr='--inMask %s', - extensions=None, - ), - incomputed=dict(argstr='--incomputed %s', ), - null=dict(argstr='--null %s', ), - outResult=dict( - argstr='--outResult %s', - hash_files=False, - ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), - xMaxProcess=dict( - argstr='-xMaxProcess %d', - usedefault=True, - ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inIntensity=dict(argstr="--inIntensity %s", extensions=None,), + inMask=dict(argstr="--inMask %s", extensions=None,), + incomputed=dict(argstr="--incomputed %s",), + null=dict(argstr="--null %s",), + outResult=dict(argstr="--outResult %s", hash_files=False,), + xDefaultMem=dict(argstr="-xDefaultMem %d",), + xMaxProcess=dict(argstr="-xMaxProcess %d", usedefault=True,), + xPrefExt=dict(argstr="--xPrefExt %s",), ) inputs = JistLaminarProfileCalculator.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistLaminarProfileCalculator_outputs(): - output_map = dict(outResult=dict(extensions=None, ), ) + output_map = dict(outResult=dict(extensions=None,),) outputs = JistLaminarProfileCalculator.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileGeometry.py b/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileGeometry.py index 7b0dc2c788..4a0c4f392a 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileGeometry.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileGeometry.py @@ -4,38 +4,28 @@ def test_JistLaminarProfileGeometry_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inProfile=dict( - argstr='--inProfile %s', - extensions=None, - ), - incomputed=dict(argstr='--incomputed %s', ), - inoutside=dict(argstr='--inoutside %f', ), - inregularization=dict(argstr='--inregularization %s', ), - insmoothing=dict(argstr='--insmoothing %f', ), - null=dict(argstr='--null %s', ), - outResult=dict( - argstr='--outResult %s', - hash_files=False, - ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), - xMaxProcess=dict( - argstr='-xMaxProcess %d', - usedefault=True, - ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inProfile=dict(argstr="--inProfile %s", extensions=None,), + incomputed=dict(argstr="--incomputed %s",), + inoutside=dict(argstr="--inoutside %f",), + inregularization=dict(argstr="--inregularization %s",), + insmoothing=dict(argstr="--insmoothing %f",), + null=dict(argstr="--null %s",), + outResult=dict(argstr="--outResult %s", hash_files=False,), + xDefaultMem=dict(argstr="-xDefaultMem %d",), + xMaxProcess=dict(argstr="-xMaxProcess %d", usedefault=True,), + xPrefExt=dict(argstr="--xPrefExt %s",), ) inputs = JistLaminarProfileGeometry.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistLaminarProfileGeometry_outputs(): - output_map = dict(outResult=dict(extensions=None, ), ) + output_map = dict(outResult=dict(extensions=None,),) outputs = JistLaminarProfileGeometry.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileSampling.py b/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileSampling.py index 7ef82a5920..3ad28793f0 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileSampling.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileSampling.py @@ -4,48 +4,28 @@ def test_JistLaminarProfileSampling_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inCortex=dict( - argstr='--inCortex %s', - extensions=None, - ), - inIntensity=dict( - argstr='--inIntensity %s', - extensions=None, - ), - inProfile=dict( - argstr='--inProfile %s', - extensions=None, - ), - null=dict(argstr='--null %s', ), - outProfile2=dict( - argstr='--outProfile2 %s', - hash_files=False, - ), - outProfilemapped=dict( - argstr='--outProfilemapped %s', - hash_files=False, - ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), - xMaxProcess=dict( - argstr='-xMaxProcess %d', - usedefault=True, - ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inCortex=dict(argstr="--inCortex %s", extensions=None,), + inIntensity=dict(argstr="--inIntensity %s", extensions=None,), + inProfile=dict(argstr="--inProfile %s", extensions=None,), + null=dict(argstr="--null %s",), + outProfile2=dict(argstr="--outProfile2 %s", hash_files=False,), + outProfilemapped=dict(argstr="--outProfilemapped %s", hash_files=False,), + xDefaultMem=dict(argstr="-xDefaultMem %d",), + xMaxProcess=dict(argstr="-xMaxProcess %d", usedefault=True,), + xPrefExt=dict(argstr="--xPrefExt %s",), ) inputs = JistLaminarProfileSampling.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistLaminarProfileSampling_outputs(): output_map = dict( - outProfile2=dict(extensions=None, ), - outProfilemapped=dict(extensions=None, ), + outProfile2=dict(extensions=None,), outProfilemapped=dict(extensions=None,), ) outputs = JistLaminarProfileSampling.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_JistLaminarROIAveraging.py b/nipype/interfaces/mipav/tests/test_auto_JistLaminarROIAveraging.py index 4536d3a58c..7120db3045 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistLaminarROIAveraging.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistLaminarROIAveraging.py @@ -4,43 +4,27 @@ def test_JistLaminarROIAveraging_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inIntensity=dict( - argstr='--inIntensity %s', - extensions=None, - ), - inMask=dict( - argstr='--inMask %s', - extensions=None, - ), - inROI=dict( - argstr='--inROI %s', - extensions=None, - ), - inROI2=dict(argstr='--inROI2 %s', ), - null=dict(argstr='--null %s', ), - outROI3=dict( - argstr='--outROI3 %s', - hash_files=False, - ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), - xMaxProcess=dict( - argstr='-xMaxProcess %d', - usedefault=True, - ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inIntensity=dict(argstr="--inIntensity %s", extensions=None,), + inMask=dict(argstr="--inMask %s", extensions=None,), + inROI=dict(argstr="--inROI %s", extensions=None,), + inROI2=dict(argstr="--inROI2 %s",), + null=dict(argstr="--null %s",), + outROI3=dict(argstr="--outROI3 %s", hash_files=False,), + xDefaultMem=dict(argstr="-xDefaultMem %d",), + xMaxProcess=dict(argstr="-xMaxProcess %d", usedefault=True,), + xPrefExt=dict(argstr="--xPrefExt %s",), ) inputs = JistLaminarROIAveraging.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistLaminarROIAveraging_outputs(): - output_map = dict(outROI3=dict(extensions=None, ), ) + output_map = dict(outROI3=dict(extensions=None,),) outputs = JistLaminarROIAveraging.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mipav/tests/test_auto_JistLaminarVolumetricLayering.py b/nipype/interfaces/mipav/tests/test_auto_JistLaminarVolumetricLayering.py index 17d44c7cec..7b57e483ef 100644 --- a/nipype/interfaces/mipav/tests/test_auto_JistLaminarVolumetricLayering.py +++ b/nipype/interfaces/mipav/tests/test_auto_JistLaminarVolumetricLayering.py @@ -4,58 +4,39 @@ def test_JistLaminarVolumetricLayering_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inInner=dict( - argstr='--inInner %s', - extensions=None, - ), - inLayering=dict(argstr='--inLayering %s', ), - inLayering2=dict(argstr='--inLayering2 %s', ), - inMax=dict(argstr='--inMax %d', ), - inMin=dict(argstr='--inMin %f', ), - inNumber=dict(argstr='--inNumber %d', ), - inOuter=dict( - argstr='--inOuter %s', - extensions=None, - ), - inTopology=dict(argstr='--inTopology %s', ), - incurvature=dict(argstr='--incurvature %d', ), - inpresmooth=dict(argstr='--inpresmooth %s', ), - inratio=dict(argstr='--inratio %f', ), - null=dict(argstr='--null %s', ), - outContinuous=dict( - argstr='--outContinuous %s', - hash_files=False, - ), - outDiscrete=dict( - argstr='--outDiscrete %s', - hash_files=False, - ), - outLayer=dict( - argstr='--outLayer %s', - hash_files=False, - ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), - xMaxProcess=dict( - argstr='-xMaxProcess %d', - usedefault=True, - ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inInner=dict(argstr="--inInner %s", extensions=None,), + inLayering=dict(argstr="--inLayering %s",), + inLayering2=dict(argstr="--inLayering2 %s",), + inMax=dict(argstr="--inMax %d",), + inMin=dict(argstr="--inMin %f",), + inNumber=dict(argstr="--inNumber %d",), + inOuter=dict(argstr="--inOuter %s", extensions=None,), + inTopology=dict(argstr="--inTopology %s",), + incurvature=dict(argstr="--incurvature %d",), + inpresmooth=dict(argstr="--inpresmooth %s",), + inratio=dict(argstr="--inratio %f",), + null=dict(argstr="--null %s",), + outContinuous=dict(argstr="--outContinuous %s", hash_files=False,), + outDiscrete=dict(argstr="--outDiscrete %s", hash_files=False,), + outLayer=dict(argstr="--outLayer %s", hash_files=False,), + xDefaultMem=dict(argstr="-xDefaultMem %d",), + xMaxProcess=dict(argstr="-xMaxProcess %d", usedefault=True,), + xPrefExt=dict(argstr="--xPrefExt %s",), ) inputs = JistLaminarVolumetricLayering.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JistLaminarVolumetricLayering_outputs(): output_map = dict( - outContinuous=dict(extensions=None, ), - outDiscrete=dict(extensions=None, ), - outLayer=dict(extensions=None, ), + outContinuous=dict(extensions=None,), + outDiscrete=dict(extensions=None,), + outLayer=dict(extensions=None,), ) outputs = JistLaminarVolumetricLayering.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmImageCalculator.py b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmImageCalculator.py index 353a521492..9d7ff807d6 100644 --- a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmImageCalculator.py +++ b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmImageCalculator.py @@ -4,39 +4,26 @@ def test_MedicAlgorithmImageCalculator_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inOperation=dict(argstr='--inOperation %s', ), - inVolume=dict( - argstr='--inVolume %s', - extensions=None, - ), - inVolume2=dict( - argstr='--inVolume2 %s', - extensions=None, - ), - null=dict(argstr='--null %s', ), - outResult=dict( - argstr='--outResult %s', - hash_files=False, - ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), - xMaxProcess=dict( - argstr='-xMaxProcess %d', - usedefault=True, - ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inOperation=dict(argstr="--inOperation %s",), + inVolume=dict(argstr="--inVolume %s", extensions=None,), + inVolume2=dict(argstr="--inVolume2 %s", extensions=None,), + null=dict(argstr="--null %s",), + outResult=dict(argstr="--outResult %s", hash_files=False,), + xDefaultMem=dict(argstr="-xDefaultMem %d",), + xMaxProcess=dict(argstr="-xMaxProcess %d", usedefault=True,), + xPrefExt=dict(argstr="--xPrefExt %s",), ) inputs = MedicAlgorithmImageCalculator.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MedicAlgorithmImageCalculator_outputs(): - output_map = dict(outResult=dict(extensions=None, ), ) + output_map = dict(outResult=dict(extensions=None,),) outputs = MedicAlgorithmImageCalculator.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmLesionToads.py b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmLesionToads.py index 8024590f92..f9036d1207 100644 --- a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmLesionToads.py +++ b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmLesionToads.py @@ -4,110 +4,61 @@ def test_MedicAlgorithmLesionToads_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inAtlas=dict(argstr='--inAtlas %s', ), - inAtlas2=dict( - argstr='--inAtlas2 %s', - extensions=None, - ), - inAtlas3=dict( - argstr='--inAtlas3 %s', - extensions=None, - ), - inAtlas4=dict( - argstr='--inAtlas4 %s', - extensions=None, - ), - inAtlas5=dict(argstr='--inAtlas5 %f', ), - inAtlas6=dict(argstr='--inAtlas6 %s', ), - inConnectivity=dict(argstr='--inConnectivity %s', ), - inCorrect=dict(argstr='--inCorrect %s', ), - inFLAIR=dict( - argstr='--inFLAIR %s', - extensions=None, - ), - inInclude=dict(argstr='--inInclude %s', ), - inMaximum=dict(argstr='--inMaximum %d', ), - inMaximum2=dict(argstr='--inMaximum2 %d', ), - inMaximum3=dict(argstr='--inMaximum3 %d', ), - inMaximum4=dict(argstr='--inMaximum4 %f', ), - inMaximum5=dict(argstr='--inMaximum5 %d', ), - inOutput=dict(argstr='--inOutput %s', ), - inOutput2=dict(argstr='--inOutput2 %s', ), - inOutput3=dict(argstr='--inOutput3 %s', ), - inSmooting=dict(argstr='--inSmooting %f', ), - inT1_MPRAGE=dict( - argstr='--inT1_MPRAGE %s', - extensions=None, - ), - inT1_SPGR=dict( - argstr='--inT1_SPGR %s', - extensions=None, - ), - null=dict(argstr='--null %s', ), - outCortical=dict( - argstr='--outCortical %s', - hash_files=False, - ), - outFilled=dict( - argstr='--outFilled %s', - hash_files=False, - ), - outHard=dict( - argstr='--outHard %s', - hash_files=False, - ), - outHard2=dict( - argstr='--outHard2 %s', - hash_files=False, - ), - outInhomogeneity=dict( - argstr='--outInhomogeneity %s', - hash_files=False, - ), - outLesion=dict( - argstr='--outLesion %s', - hash_files=False, - ), - outMembership=dict( - argstr='--outMembership %s', - hash_files=False, - ), - outSulcal=dict( - argstr='--outSulcal %s', - hash_files=False, - ), - outWM=dict( - argstr='--outWM %s', - hash_files=False, - ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), - xMaxProcess=dict( - argstr='-xMaxProcess %d', - usedefault=True, - ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inAtlas=dict(argstr="--inAtlas %s",), + inAtlas2=dict(argstr="--inAtlas2 %s", extensions=None,), + inAtlas3=dict(argstr="--inAtlas3 %s", extensions=None,), + inAtlas4=dict(argstr="--inAtlas4 %s", extensions=None,), + inAtlas5=dict(argstr="--inAtlas5 %f",), + inAtlas6=dict(argstr="--inAtlas6 %s",), + inConnectivity=dict(argstr="--inConnectivity %s",), + inCorrect=dict(argstr="--inCorrect %s",), + inFLAIR=dict(argstr="--inFLAIR %s", extensions=None,), + inInclude=dict(argstr="--inInclude %s",), + inMaximum=dict(argstr="--inMaximum %d",), + inMaximum2=dict(argstr="--inMaximum2 %d",), + inMaximum3=dict(argstr="--inMaximum3 %d",), + inMaximum4=dict(argstr="--inMaximum4 %f",), + inMaximum5=dict(argstr="--inMaximum5 %d",), + inOutput=dict(argstr="--inOutput %s",), + inOutput2=dict(argstr="--inOutput2 %s",), + inOutput3=dict(argstr="--inOutput3 %s",), + inSmooting=dict(argstr="--inSmooting %f",), + inT1_MPRAGE=dict(argstr="--inT1_MPRAGE %s", extensions=None,), + inT1_SPGR=dict(argstr="--inT1_SPGR %s", extensions=None,), + null=dict(argstr="--null %s",), + outCortical=dict(argstr="--outCortical %s", hash_files=False,), + outFilled=dict(argstr="--outFilled %s", hash_files=False,), + outHard=dict(argstr="--outHard %s", hash_files=False,), + outHard2=dict(argstr="--outHard2 %s", hash_files=False,), + outInhomogeneity=dict(argstr="--outInhomogeneity %s", hash_files=False,), + outLesion=dict(argstr="--outLesion %s", hash_files=False,), + outMembership=dict(argstr="--outMembership %s", hash_files=False,), + outSulcal=dict(argstr="--outSulcal %s", hash_files=False,), + outWM=dict(argstr="--outWM %s", hash_files=False,), + xDefaultMem=dict(argstr="-xDefaultMem %d",), + xMaxProcess=dict(argstr="-xMaxProcess %d", usedefault=True,), + xPrefExt=dict(argstr="--xPrefExt %s",), ) inputs = MedicAlgorithmLesionToads.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MedicAlgorithmLesionToads_outputs(): output_map = dict( - outCortical=dict(extensions=None, ), - outFilled=dict(extensions=None, ), - outHard=dict(extensions=None, ), - outHard2=dict(extensions=None, ), - outInhomogeneity=dict(extensions=None, ), - outLesion=dict(extensions=None, ), - outMembership=dict(extensions=None, ), - outSulcal=dict(extensions=None, ), - outWM=dict(extensions=None, ), + outCortical=dict(extensions=None,), + outFilled=dict(extensions=None,), + outHard=dict(extensions=None,), + outHard2=dict(extensions=None,), + outInhomogeneity=dict(extensions=None,), + outLesion=dict(extensions=None,), + outMembership=dict(extensions=None,), + outSulcal=dict(extensions=None,), + outWM=dict(extensions=None,), ) outputs = MedicAlgorithmLesionToads.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmMipavReorient.py b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmMipavReorient.py index 14a9829a11..0c941fdbc7 100644 --- a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmMipavReorient.py +++ b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmMipavReorient.py @@ -4,43 +4,30 @@ def test_MedicAlgorithmMipavReorient_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inInterpolation=dict(argstr='--inInterpolation %s', ), - inNew=dict(argstr='--inNew %s', ), - inResolution=dict(argstr='--inResolution %s', ), - inSource=dict( - argstr='--inSource %s', - sep=';', - ), - inTemplate=dict( - argstr='--inTemplate %s', - extensions=None, - ), - inUser=dict(argstr='--inUser %s', ), - inUser2=dict(argstr='--inUser2 %s', ), - inUser3=dict(argstr='--inUser3 %s', ), - inUser4=dict(argstr='--inUser4 %s', ), - null=dict(argstr='--null %s', ), - outReoriented=dict( - argstr='--outReoriented %s', - sep=';', - ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), - xMaxProcess=dict( - argstr='-xMaxProcess %d', - usedefault=True, - ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inInterpolation=dict(argstr="--inInterpolation %s",), + inNew=dict(argstr="--inNew %s",), + inResolution=dict(argstr="--inResolution %s",), + inSource=dict(argstr="--inSource %s", sep=";",), + inTemplate=dict(argstr="--inTemplate %s", extensions=None,), + inUser=dict(argstr="--inUser %s",), + inUser2=dict(argstr="--inUser2 %s",), + inUser3=dict(argstr="--inUser3 %s",), + inUser4=dict(argstr="--inUser4 %s",), + null=dict(argstr="--null %s",), + outReoriented=dict(argstr="--outReoriented %s", sep=";",), + xDefaultMem=dict(argstr="-xDefaultMem %d",), + xMaxProcess=dict(argstr="-xMaxProcess %d", usedefault=True,), + xPrefExt=dict(argstr="--xPrefExt %s",), ) inputs = MedicAlgorithmMipavReorient.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MedicAlgorithmMipavReorient_outputs(): output_map = dict() outputs = MedicAlgorithmMipavReorient.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmN3.py b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmN3.py index f1623bb19b..d9a8a25023 100644 --- a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmN3.py +++ b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmN3.py @@ -4,48 +4,35 @@ def test_MedicAlgorithmN3_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inAutomatic=dict(argstr='--inAutomatic %s', ), - inEnd=dict(argstr='--inEnd %f', ), - inField=dict(argstr='--inField %f', ), - inInput=dict( - argstr='--inInput %s', - extensions=None, - ), - inKernel=dict(argstr='--inKernel %f', ), - inMaximum=dict(argstr='--inMaximum %d', ), - inSignal=dict(argstr='--inSignal %f', ), - inSubsample=dict(argstr='--inSubsample %f', ), - inWeiner=dict(argstr='--inWeiner %f', ), - null=dict(argstr='--null %s', ), - outInhomogeneity=dict( - argstr='--outInhomogeneity %s', - hash_files=False, - ), - outInhomogeneity2=dict( - argstr='--outInhomogeneity2 %s', - hash_files=False, - ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), - xMaxProcess=dict( - argstr='-xMaxProcess %d', - usedefault=True, - ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inAutomatic=dict(argstr="--inAutomatic %s",), + inEnd=dict(argstr="--inEnd %f",), + inField=dict(argstr="--inField %f",), + inInput=dict(argstr="--inInput %s", extensions=None,), + inKernel=dict(argstr="--inKernel %f",), + inMaximum=dict(argstr="--inMaximum %d",), + inSignal=dict(argstr="--inSignal %f",), + inSubsample=dict(argstr="--inSubsample %f",), + inWeiner=dict(argstr="--inWeiner %f",), + null=dict(argstr="--null %s",), + outInhomogeneity=dict(argstr="--outInhomogeneity %s", hash_files=False,), + outInhomogeneity2=dict(argstr="--outInhomogeneity2 %s", hash_files=False,), + xDefaultMem=dict(argstr="-xDefaultMem %d",), + xMaxProcess=dict(argstr="-xMaxProcess %d", usedefault=True,), + xPrefExt=dict(argstr="--xPrefExt %s",), ) inputs = MedicAlgorithmN3.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MedicAlgorithmN3_outputs(): output_map = dict( - outInhomogeneity=dict(extensions=None, ), - outInhomogeneity2=dict(extensions=None, ), + outInhomogeneity=dict(extensions=None,), + outInhomogeneity2=dict(extensions=None,), ) outputs = MedicAlgorithmN3.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmSPECTRE2010.py b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmSPECTRE2010.py index e2de49a746..8e51a9c7bc 100644 --- a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmSPECTRE2010.py +++ b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmSPECTRE2010.py @@ -4,111 +4,74 @@ def test_MedicAlgorithmSPECTRE2010_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inApply=dict(argstr='--inApply %s', ), - inAtlas=dict( - argstr='--inAtlas %s', - extensions=None, - ), - inBackground=dict(argstr='--inBackground %f', ), - inCoarse=dict(argstr='--inCoarse %f', ), - inCost=dict(argstr='--inCost %s', ), - inDegrees=dict(argstr='--inDegrees %s', ), - inFind=dict(argstr='--inFind %s', ), - inFine=dict(argstr='--inFine %f', ), - inImage=dict(argstr='--inImage %s', ), - inInhomogeneity=dict(argstr='--inInhomogeneity %s', ), - inInitial=dict(argstr='--inInitial %d', ), - inInitial2=dict(argstr='--inInitial2 %f', ), - inInput=dict( - argstr='--inInput %s', - extensions=None, - ), - inMMC=dict(argstr='--inMMC %d', ), - inMMC2=dict(argstr='--inMMC2 %d', ), - inMaximum=dict(argstr='--inMaximum %f', ), - inMinimum=dict(argstr='--inMinimum %f', ), - inMinimum2=dict(argstr='--inMinimum2 %f', ), - inMultiple=dict(argstr='--inMultiple %d', ), - inMultithreading=dict(argstr='--inMultithreading %s', ), - inNumber=dict(argstr='--inNumber %d', ), - inNumber2=dict(argstr='--inNumber2 %d', ), - inOutput=dict(argstr='--inOutput %s', ), - inOutput2=dict(argstr='--inOutput2 %s', ), - inOutput3=dict(argstr='--inOutput3 %s', ), - inOutput4=dict(argstr='--inOutput4 %s', ), - inOutput5=dict(argstr='--inOutput5 %s', ), - inRegistration=dict(argstr='--inRegistration %s', ), - inResample=dict(argstr='--inResample %s', ), - inRun=dict(argstr='--inRun %s', ), - inSkip=dict(argstr='--inSkip %s', ), - inSmoothing=dict(argstr='--inSmoothing %f', ), - inSubsample=dict(argstr='--inSubsample %s', ), - inUse=dict(argstr='--inUse %s', ), - null=dict(argstr='--null %s', ), - outFANTASM=dict( - argstr='--outFANTASM %s', - hash_files=False, - ), - outMask=dict( - argstr='--outMask %s', - hash_files=False, - ), - outMidsagittal=dict( - argstr='--outMidsagittal %s', - hash_files=False, - ), - outOriginal=dict( - argstr='--outOriginal %s', - hash_files=False, - ), - outPrior=dict( - argstr='--outPrior %s', - hash_files=False, - ), - outSegmentation=dict( - argstr='--outSegmentation %s', - hash_files=False, - ), - outSplitHalves=dict( - argstr='--outSplitHalves %s', - hash_files=False, - ), - outStripped=dict( - argstr='--outStripped %s', - hash_files=False, - ), - outd0=dict( - argstr='--outd0 %s', - hash_files=False, - ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), - xMaxProcess=dict( - argstr='-xMaxProcess %d', - usedefault=True, - ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inApply=dict(argstr="--inApply %s",), + inAtlas=dict(argstr="--inAtlas %s", extensions=None,), + inBackground=dict(argstr="--inBackground %f",), + inCoarse=dict(argstr="--inCoarse %f",), + inCost=dict(argstr="--inCost %s",), + inDegrees=dict(argstr="--inDegrees %s",), + inFind=dict(argstr="--inFind %s",), + inFine=dict(argstr="--inFine %f",), + inImage=dict(argstr="--inImage %s",), + inInhomogeneity=dict(argstr="--inInhomogeneity %s",), + inInitial=dict(argstr="--inInitial %d",), + inInitial2=dict(argstr="--inInitial2 %f",), + inInput=dict(argstr="--inInput %s", extensions=None,), + inMMC=dict(argstr="--inMMC %d",), + inMMC2=dict(argstr="--inMMC2 %d",), + inMaximum=dict(argstr="--inMaximum %f",), + inMinimum=dict(argstr="--inMinimum %f",), + inMinimum2=dict(argstr="--inMinimum2 %f",), + inMultiple=dict(argstr="--inMultiple %d",), + inMultithreading=dict(argstr="--inMultithreading %s",), + inNumber=dict(argstr="--inNumber %d",), + inNumber2=dict(argstr="--inNumber2 %d",), + inOutput=dict(argstr="--inOutput %s",), + inOutput2=dict(argstr="--inOutput2 %s",), + inOutput3=dict(argstr="--inOutput3 %s",), + inOutput4=dict(argstr="--inOutput4 %s",), + inOutput5=dict(argstr="--inOutput5 %s",), + inRegistration=dict(argstr="--inRegistration %s",), + inResample=dict(argstr="--inResample %s",), + inRun=dict(argstr="--inRun %s",), + inSkip=dict(argstr="--inSkip %s",), + inSmoothing=dict(argstr="--inSmoothing %f",), + inSubsample=dict(argstr="--inSubsample %s",), + inUse=dict(argstr="--inUse %s",), + null=dict(argstr="--null %s",), + outFANTASM=dict(argstr="--outFANTASM %s", hash_files=False,), + outMask=dict(argstr="--outMask %s", hash_files=False,), + outMidsagittal=dict(argstr="--outMidsagittal %s", hash_files=False,), + outOriginal=dict(argstr="--outOriginal %s", hash_files=False,), + outPrior=dict(argstr="--outPrior %s", hash_files=False,), + outSegmentation=dict(argstr="--outSegmentation %s", hash_files=False,), + outSplitHalves=dict(argstr="--outSplitHalves %s", hash_files=False,), + outStripped=dict(argstr="--outStripped %s", hash_files=False,), + outd0=dict(argstr="--outd0 %s", hash_files=False,), + xDefaultMem=dict(argstr="-xDefaultMem %d",), + xMaxProcess=dict(argstr="-xMaxProcess %d", usedefault=True,), + xPrefExt=dict(argstr="--xPrefExt %s",), ) inputs = MedicAlgorithmSPECTRE2010.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MedicAlgorithmSPECTRE2010_outputs(): output_map = dict( - outFANTASM=dict(extensions=None, ), - outMask=dict(extensions=None, ), - outMidsagittal=dict(extensions=None, ), - outOriginal=dict(extensions=None, ), - outPrior=dict(extensions=None, ), - outSegmentation=dict(extensions=None, ), - outSplitHalves=dict(extensions=None, ), - outStripped=dict(extensions=None, ), - outd0=dict(extensions=None, ), + outFANTASM=dict(extensions=None,), + outMask=dict(extensions=None,), + outMidsagittal=dict(extensions=None,), + outOriginal=dict(extensions=None,), + outPrior=dict(extensions=None,), + outSegmentation=dict(extensions=None,), + outSplitHalves=dict(extensions=None,), + outStripped=dict(extensions=None,), + outd0=dict(extensions=None,), ) outputs = MedicAlgorithmSPECTRE2010.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmThresholdToBinaryMask.py b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmThresholdToBinaryMask.py index e4bfae7e0f..ca0179d231 100644 --- a/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmThresholdToBinaryMask.py +++ b/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmThresholdToBinaryMask.py @@ -4,35 +4,25 @@ def test_MedicAlgorithmThresholdToBinaryMask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inLabel=dict( - argstr='--inLabel %s', - sep=';', - ), - inMaximum=dict(argstr='--inMaximum %f', ), - inMinimum=dict(argstr='--inMinimum %f', ), - inUse=dict(argstr='--inUse %s', ), - null=dict(argstr='--null %s', ), - outBinary=dict( - argstr='--outBinary %s', - sep=';', - ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), - xMaxProcess=dict( - argstr='-xMaxProcess %d', - usedefault=True, - ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inLabel=dict(argstr="--inLabel %s", sep=";",), + inMaximum=dict(argstr="--inMaximum %f",), + inMinimum=dict(argstr="--inMinimum %f",), + inUse=dict(argstr="--inUse %s",), + null=dict(argstr="--null %s",), + outBinary=dict(argstr="--outBinary %s", sep=";",), + xDefaultMem=dict(argstr="-xDefaultMem %d",), + xMaxProcess=dict(argstr="-xMaxProcess %d", usedefault=True,), + xPrefExt=dict(argstr="--xPrefExt %s",), ) inputs = MedicAlgorithmThresholdToBinaryMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MedicAlgorithmThresholdToBinaryMask_outputs(): output_map = dict() outputs = MedicAlgorithmThresholdToBinaryMask.output_spec() diff --git a/nipype/interfaces/mipav/tests/test_auto_RandomVol.py b/nipype/interfaces/mipav/tests/test_auto_RandomVol.py index 8c9662c9be..1dd7520626 100644 --- a/nipype/interfaces/mipav/tests/test_auto_RandomVol.py +++ b/nipype/interfaces/mipav/tests/test_auto_RandomVol.py @@ -4,39 +4,32 @@ def test_RandomVol_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inField=dict(argstr='--inField %s', ), - inLambda=dict(argstr='--inLambda %f', ), - inMaximum=dict(argstr='--inMaximum %d', ), - inMinimum=dict(argstr='--inMinimum %d', ), - inSize=dict(argstr='--inSize %d', ), - inSize2=dict(argstr='--inSize2 %d', ), - inSize3=dict(argstr='--inSize3 %d', ), - inSize4=dict(argstr='--inSize4 %d', ), - inStandard=dict(argstr='--inStandard %d', ), - null=dict(argstr='--null %s', ), - outRand1=dict( - argstr='--outRand1 %s', - hash_files=False, - ), - xDefaultMem=dict(argstr='-xDefaultMem %d', ), - xMaxProcess=dict( - argstr='-xMaxProcess %d', - usedefault=True, - ), - xPrefExt=dict(argstr='--xPrefExt %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inField=dict(argstr="--inField %s",), + inLambda=dict(argstr="--inLambda %f",), + inMaximum=dict(argstr="--inMaximum %d",), + inMinimum=dict(argstr="--inMinimum %d",), + inSize=dict(argstr="--inSize %d",), + inSize2=dict(argstr="--inSize2 %d",), + inSize3=dict(argstr="--inSize3 %d",), + inSize4=dict(argstr="--inSize4 %d",), + inStandard=dict(argstr="--inStandard %d",), + null=dict(argstr="--null %s",), + outRand1=dict(argstr="--outRand1 %s", hash_files=False,), + xDefaultMem=dict(argstr="-xDefaultMem %d",), + xMaxProcess=dict(argstr="-xMaxProcess %d", usedefault=True,), + xPrefExt=dict(argstr="--xPrefExt %s",), ) inputs = RandomVol.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RandomVol_outputs(): - output_map = dict(outRand1=dict(extensions=None, ), ) + output_map = dict(outRand1=dict(extensions=None,),) outputs = RandomVol.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mixins/__init__.py b/nipype/interfaces/mixins/__init__.py index 587d3a22a8..a64dc34ff2 100644 --- a/nipype/interfaces/mixins/__init__.py +++ b/nipype/interfaces/mixins/__init__.py @@ -1,2 +1,5 @@ from .reporting import ( - ReportCapableInterface, ReportCapableInputSpec, ReportCapableOutputSpec) + ReportCapableInterface, + ReportCapableInputSpec, + ReportCapableOutputSpec, +) diff --git a/nipype/interfaces/mixins/reporting.py b/nipype/interfaces/mixins/reporting.py index fecdb337c5..182738ca64 100644 --- a/nipype/interfaces/mixins/reporting.py +++ b/nipype/interfaces/mixins/reporting.py @@ -7,23 +7,27 @@ from abc import abstractmethod from ... import logging -from ..base import ( - File, BaseInterface, BaseInterfaceInputSpec, TraitedSpec) +from ..base import File, BaseInterface, BaseInterfaceInputSpec, TraitedSpec -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") class ReportCapableInputSpec(BaseInterfaceInputSpec): - out_report = File('report', usedefault=True, hash_files=False, - desc='filename for the visual report') + out_report = File( + "report", + usedefault=True, + hash_files=False, + desc="filename for the visual report", + ) class ReportCapableOutputSpec(TraitedSpec): - out_report = File(desc='filename for the visual report') + out_report = File(desc="filename for the visual report") class ReportCapableInterface(BaseInterface): """Mixin to enable reporting for Nipype interfaces""" + _out_report = None def __init__(self, generate_report=False, **kwargs): @@ -39,8 +43,9 @@ def _post_run_hook(self, runtime): self._out_report = self.inputs.out_report if not os.path.isabs(self._out_report): - self._out_report = os.path.abspath(os.path.join(runtime.cwd, - self._out_report)) + self._out_report = os.path.abspath( + os.path.join(runtime.cwd, self._out_report) + ) self._generate_report() @@ -52,7 +57,7 @@ def _list_outputs(self): except NotImplementedError: outputs = {} if self._out_report is not None: - outputs['out_report'] = self._out_report + outputs["out_report"] = self._out_report return outputs @abstractmethod diff --git a/nipype/interfaces/mne/base.py b/nipype/interfaces/mne/base.py index 175a73e126..f410da794a 100644 --- a/nipype/interfaces/mne/base.py +++ b/nipype/interfaces/mne/base.py @@ -4,74 +4,86 @@ from ... import logging from ...utils.filemanip import simplify_list -from ..base import (traits, File, Directory, TraitedSpec, OutputMultiPath) +from ..base import traits, File, Directory, TraitedSpec, OutputMultiPath from ..freesurfer.base import FSCommand, FSTraitedSpec -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") class WatershedBEMInputSpec(FSTraitedSpec): subject_id = traits.Str( - argstr='--subject %s', + argstr="--subject %s", mandatory=True, - desc='Subject ID (must have a complete Freesurfer directory)') + desc="Subject ID (must have a complete Freesurfer directory)", + ) subjects_dir = Directory( exists=True, mandatory=True, usedefault=True, - desc='Path to Freesurfer subjects directory') + desc="Path to Freesurfer subjects directory", + ) volume = traits.Enum( - 'T1', - 'aparc+aseg', - 'aseg', - 'brain', - 'orig', - 'brainmask', - 'ribbon', - argstr='--volume %s', + "T1", + "aparc+aseg", + "aseg", + "brain", + "orig", + "brainmask", + "ribbon", + argstr="--volume %s", usedefault=True, - desc='The volume from the "mri" directory to use (defaults to T1)') + desc='The volume from the "mri" directory to use (defaults to T1)', + ) overwrite = traits.Bool( True, usedefault=True, - argstr='--overwrite', - desc='Overwrites the existing files') + argstr="--overwrite", + desc="Overwrites the existing files", + ) atlas_mode = traits.Bool( - argstr='--atlas', - desc='Use atlas mode for registration (default: no rigid alignment)') + argstr="--atlas", + desc="Use atlas mode for registration (default: no rigid alignment)", + ) class WatershedBEMOutputSpec(TraitedSpec): mesh_files = OutputMultiPath( File(exists=True), - desc=('Paths to the output meshes (brain, inner ' - 'skull, outer skull, outer skin)')) + desc=( + "Paths to the output meshes (brain, inner " + "skull, outer skull, outer skin)" + ), + ) brain_surface = File( - exists=True, - loc='bem/watershed', - desc='Brain surface (in Freesurfer format)') + exists=True, loc="bem/watershed", desc="Brain surface (in Freesurfer format)" + ) inner_skull_surface = File( exists=True, - loc='bem/watershed', - desc='Inner skull surface (in Freesurfer format)') + loc="bem/watershed", + desc="Inner skull surface (in Freesurfer format)", + ) outer_skull_surface = File( exists=True, - loc='bem/watershed', - desc='Outer skull surface (in Freesurfer format)') + loc="bem/watershed", + desc="Outer skull surface (in Freesurfer format)", + ) outer_skin_surface = File( exists=True, - loc='bem/watershed', - desc='Outer skin surface (in Freesurfer format)') + loc="bem/watershed", + desc="Outer skin surface (in Freesurfer format)", + ) fif_file = File( exists=True, - loc='bem', - altkey='fif', - desc='"fif" format file for EEG processing in MNE') + loc="bem", + altkey="fif", + desc='"fif" format file for EEG processing in MNE', + ) cor_files = OutputMultiPath( File(exists=True), - loc='bem/watershed/ws', - altkey='COR', - desc='"COR" format files') + loc="bem/watershed/ws", + altkey="COR", + desc='"COR" format files', + ) class WatershedBEM(FSCommand): @@ -90,18 +102,18 @@ class WatershedBEM(FSCommand): """ - _cmd = 'mne watershed_bem' + _cmd = "mne watershed_bem" input_spec = WatershedBEMInputSpec output_spec = WatershedBEMOutputSpec - _additional_metadata = ['loc', 'altkey'] + _additional_metadata = ["loc", "altkey"] def _get_files(self, path, key, dirval, altkey=None): - globsuffix = '*' - globprefix = '*' + globsuffix = "*" + globprefix = "*" keydir = op.join(path, dirval) if altkey: key = altkey - globpattern = op.join(keydir, ''.join((globprefix, key, globsuffix))) + globpattern = op.join(keydir, "".join((globprefix, key, globsuffix))) return glob.glob(globpattern) def _list_outputs(self): @@ -111,10 +123,13 @@ def _list_outputs(self): output_traits = self._outputs() mesh_paths = [] for k in list(outputs.keys()): - if k != 'mesh_files': - val = self._get_files(subject_path, k, - output_traits.traits()[k].loc, - output_traits.traits()[k].altkey) + if k != "mesh_files": + val = self._get_files( + subject_path, + k, + output_traits.traits()[k].loc, + output_traits.traits()[k].altkey, + ) if val: value_list = simplify_list(val) if isinstance(value_list, list): @@ -126,7 +141,7 @@ def _list_outputs(self): else: raise TypeError outputs[k] = out_files - if not k.rfind('surface') == -1: + if not k.rfind("surface") == -1: mesh_paths.append(out_files) - outputs['mesh_files'] = mesh_paths + outputs["mesh_files"] = mesh_paths return outputs diff --git a/nipype/interfaces/mne/tests/test_auto_WatershedBEM.py b/nipype/interfaces/mne/tests/test_auto_WatershedBEM.py index 2664a7d437..30e6a1de7e 100644 --- a/nipype/interfaces/mne/tests/test_auto_WatershedBEM.py +++ b/nipype/interfaces/mne/tests/test_auto_WatershedBEM.py @@ -4,62 +4,30 @@ def test_WatershedBEM_inputs(): input_map = dict( - args=dict(argstr='%s', ), - atlas_mode=dict(argstr='--atlas', ), - environ=dict( - nohash=True, - usedefault=True, - ), - overwrite=dict( - argstr='--overwrite', - usedefault=True, - ), - subject_id=dict( - argstr='--subject %s', - mandatory=True, - ), - subjects_dir=dict( - mandatory=True, - usedefault=True, - ), - volume=dict( - argstr='--volume %s', - usedefault=True, - ), + args=dict(argstr="%s",), + atlas_mode=dict(argstr="--atlas",), + environ=dict(nohash=True, usedefault=True,), + overwrite=dict(argstr="--overwrite", usedefault=True,), + subject_id=dict(argstr="--subject %s", mandatory=True,), + subjects_dir=dict(mandatory=True, usedefault=True,), + volume=dict(argstr="--volume %s", usedefault=True,), ) inputs = WatershedBEM.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_WatershedBEM_outputs(): output_map = dict( - brain_surface=dict( - extensions=None, - loc='bem/watershed', - ), - cor_files=dict( - altkey='COR', - loc='bem/watershed/ws', - ), - fif_file=dict( - altkey='fif', - extensions=None, - loc='bem', - ), - inner_skull_surface=dict( - extensions=None, - loc='bem/watershed', - ), + brain_surface=dict(extensions=None, loc="bem/watershed",), + cor_files=dict(altkey="COR", loc="bem/watershed/ws",), + fif_file=dict(altkey="fif", extensions=None, loc="bem",), + inner_skull_surface=dict(extensions=None, loc="bem/watershed",), mesh_files=dict(), - outer_skin_surface=dict( - extensions=None, - loc='bem/watershed', - ), - outer_skull_surface=dict( - extensions=None, - loc='bem/watershed', - ), + outer_skin_surface=dict(extensions=None, loc="bem/watershed",), + outer_skull_surface=dict(extensions=None, loc="bem/watershed",), ) outputs = WatershedBEM.output_spec() diff --git a/nipype/interfaces/mrtrix/__init__.py b/nipype/interfaces/mrtrix/__init__.py index ea066d4cd8..917d576eda 100644 --- a/nipype/interfaces/mrtrix/__init__.py +++ b/nipype/interfaces/mrtrix/__init__.py @@ -1,15 +1,35 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from .tracking import (Tracks2Prob, FilterTracks, StreamlineTrack, - DiffusionTensorStreamlineTrack, - SphericallyDeconvolutedStreamlineTrack, - ProbabilisticSphericallyDeconvolutedStreamlineTrack) -from .tensors import (FSL2MRTrix, ConstrainedSphericalDeconvolution, - DWI2SphericalHarmonicsImage, EstimateResponseForSH, - GenerateDirections, FindShPeaks, Directions2Amplitude) -from .preprocess import (MRConvert, MRMultiply, MRTrixViewer, MRTrixInfo, - GenerateWhiteMatterMask, DWI2Tensor, - Tensor2ApparentDiffusion, Tensor2FractionalAnisotropy, - Tensor2Vector, MedianFilter3D, Erode, Threshold) +from .tracking import ( + Tracks2Prob, + FilterTracks, + StreamlineTrack, + DiffusionTensorStreamlineTrack, + SphericallyDeconvolutedStreamlineTrack, + ProbabilisticSphericallyDeconvolutedStreamlineTrack, +) +from .tensors import ( + FSL2MRTrix, + ConstrainedSphericalDeconvolution, + DWI2SphericalHarmonicsImage, + EstimateResponseForSH, + GenerateDirections, + FindShPeaks, + Directions2Amplitude, +) +from .preprocess import ( + MRConvert, + MRMultiply, + MRTrixViewer, + MRTrixInfo, + GenerateWhiteMatterMask, + DWI2Tensor, + Tensor2ApparentDiffusion, + Tensor2FractionalAnisotropy, + Tensor2Vector, + MedianFilter3D, + Erode, + Threshold, +) from .convert import MRTrix2TrackVis diff --git a/nipype/interfaces/mrtrix/convert.py b/nipype/interfaces/mrtrix/convert.py index 53dbbffcb1..b2314271c4 100644 --- a/nipype/interfaces/mrtrix/convert.py +++ b/nipype/interfaces/mrtrix/convert.py @@ -14,12 +14,13 @@ from ..base import TraitedSpec, File, isdefined from ..dipy.base import DipyBaseInterface, HAVE_DIPY as have_dipy -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") def get_vox_dims(volume): import nibabel as nb from nipype.utils import NUMPY_MMAP + if isinstance(volume, list): volume = volume[0] nii = nb.load(volume, mmap=NUMPY_MMAP) @@ -31,6 +32,7 @@ def get_vox_dims(volume): def get_data_dims(volume): import nibabel as nb from nipype.utils import NUMPY_MMAP + if isinstance(volume, list): volume = volume[0] nii = nb.load(volume, mmap=NUMPY_MMAP) @@ -41,10 +43,10 @@ def get_data_dims(volume): def transform_to_affine(streams, header, affine): from dipy.tracking.utils import move_streamlines + rotation, scale = np.linalg.qr(affine) streams = move_streamlines(streams, rotation) - scale[0:3, 0:3] = np.dot(scale[0:3, 0:3], - np.diag(1. / header['voxel_size'])) + scale[0:3, 0:3] = np.dot(scale[0:3, 0:3], np.diag(1.0 / header["voxel_size"])) scale[0:3, 3] = abs(scale[0:3, 3]) streams = move_streamlines(streams, scale) return streams @@ -57,58 +59,57 @@ def read_mrtrix_tracks(in_file, as_generator=True): def read_mrtrix_header(in_file): - fileobj = open(in_file, 'rb') + fileobj = open(in_file, "rb") header = {} - iflogger.info('Reading header data...') + iflogger.info("Reading header data...") for line in fileobj: line = line.decode() - if line == 'END\n': - iflogger.info('Reached the end of the header!') + if line == "END\n": + iflogger.info("Reached the end of the header!") break - elif ': ' in line: - line = line.replace('\n', '') + elif ": " in line: + line = line.replace("\n", "") line = line.replace("'", "") - key = line.split(': ')[0] - value = line.split(': ')[1] + key = line.split(": ")[0] + value = line.split(": ")[1] header[key] = value iflogger.info('...adding "%s" to header for key "%s"', value, key) fileobj.close() - header['count'] = int(header['count'].replace('\n', '')) - header['offset'] = int(header['file'].replace('.', '')) + header["count"] = int(header["count"].replace("\n", "")) + header["offset"] = int(header["file"].replace(".", "")) return header def read_mrtrix_streamlines(in_file, header, as_generator=True): - offset = header['offset'] - stream_count = header['count'] - fileobj = open(in_file, 'rb') + offset = header["offset"] + stream_count = header["count"] + fileobj = open(in_file, "rb") fileobj.seek(offset) endianness = native_code - f4dt = np.dtype(endianness + 'f4') + f4dt = np.dtype(endianness + "f4") pt_cols = 3 bytesize = pt_cols * 4 def points_per_track(offset): track_points = [] - iflogger.info('Identifying the number of points per tract...') + iflogger.info("Identifying the number of points per tract...") all_str = fileobj.read() num_triplets = int(len(all_str) / bytesize) - pts = np.ndarray( - shape=(num_triplets, pt_cols), dtype='f4', buffer=all_str) + pts = np.ndarray(shape=(num_triplets, pt_cols), dtype="f4", buffer=all_str) nonfinite_list = np.where(np.invert(np.isfinite(pts[:, 2]))) nonfinite_list = list(nonfinite_list[0])[ - 0:-1] # Converts numpy array to list, removes the last value + 0:-1 + ] # Converts numpy array to list, removes the last value for idx, value in enumerate(nonfinite_list): if idx == 0: track_points.append(nonfinite_list[idx]) else: - track_points.append( - nonfinite_list[idx] - nonfinite_list[idx - 1] - 1) + track_points.append(nonfinite_list[idx] - nonfinite_list[idx - 1] - 1) return track_points, nonfinite_list def track_gen(track_points): n_streams = 0 - iflogger.info('Reading tracks...') + iflogger.info("Reading tracks...") while True: try: n_pts = track_points[n_streams] @@ -118,13 +119,14 @@ def track_gen(track_points): nan_str = fileobj.read(bytesize) if len(pts_str) < (n_pts * bytesize): if not n_streams == stream_count: - raise HeaderError('Expecting %s points, found only %s' % - (stream_count, n_streams)) - iflogger.error('Expecting %s points, found only %s', - stream_count, n_streams) + raise HeaderError( + "Expecting %s points, found only %s" % (stream_count, n_streams) + ) + iflogger.error( + "Expecting %s points, found only %s", stream_count, n_streams + ) break - pts = np.ndarray( - shape=(n_pts, pt_cols), dtype=f4dt, buffer=pts_str) + pts = np.ndarray(shape=(n_pts, pt_cols), dtype=f4dt, buffer=pts_str) nan_pt = np.ndarray(shape=(1, pt_cols), dtype=f4dt, buffer=nan_str) if np.isfinite(nan_pt[0][0]): raise ValueError @@ -133,15 +135,14 @@ def track_gen(track_points): yield xyz n_streams += 1 if n_streams == stream_count: - iflogger.info('100%% : %i tracks read', n_streams) + iflogger.info("100%% : %i tracks read", n_streams) raise StopIteration try: if n_streams % int(stream_count / 100) == 0: percent = int(float(n_streams) / float(stream_count) * 100) - iflogger.info('%i%% : %i tracks read', percent, n_streams) + iflogger.info("%i%% : %i tracks read", percent, n_streams) except ZeroDivisionError: - iflogger.info('%i stream read out of %i', n_streams, - stream_count) + iflogger.info("%i stream read out of %i", n_streams, stream_count) track_points, nonfinite_list = points_per_track(offset) fileobj.seek(offset) @@ -155,22 +156,22 @@ class MRTrix2TrackVisInputSpec(TraitedSpec): in_file = File( exists=True, mandatory=True, - desc='The input file for the tracks in MRTrix (.tck) format') - image_file = File( - exists=True, desc='The image the tracks were generated from') + desc="The input file for the tracks in MRTrix (.tck) format", + ) + image_file = File(exists=True, desc="The image the tracks were generated from") matrix_file = File( exists=True, - desc= - 'A transformation matrix to apply to the tracts after they have been generated (from FLIRT - affine transformation from image_file to registration_image_file)' + desc="A transformation matrix to apply to the tracts after they have been generated (from FLIRT - affine transformation from image_file to registration_image_file)", ) registration_image_file = File( - exists=True, - desc='The final image the tracks should be registered to.') + exists=True, desc="The final image the tracks should be registered to." + ) out_filename = File( - 'converted.trk', + "converted.trk", genfile=True, usedefault=True, - desc='The output filename for the tracks in TrackVis (.trk) format') + desc="The output filename for the tracks in TrackVis (.trk) format", + ) class MRTrix2TrackVisOutputSpec(TraitedSpec): @@ -189,12 +190,13 @@ class MRTrix2TrackVis(DipyBaseInterface): >>> tck2trk.inputs.image_file = 'diffusion.nii' >>> tck2trk.run() # doctest: +SKIP """ + input_spec = MRTrix2TrackVisInputSpec output_spec = MRTrix2TrackVisOutputSpec def _run_interface(self, runtime): - from dipy.tracking.utils import move_streamlines, \ - affine_from_fsl_mat_file + from dipy.tracking.utils import move_streamlines, affine_from_fsl_mat_file + dx, dy, dz = get_data_dims(self.inputs.image_file) vx, vy, vz = get_vox_dims(self.inputs.image_file) image_file = nb.load(self.inputs.image_file) @@ -202,81 +204,82 @@ def _run_interface(self, runtime): out_filename = op.abspath(self.inputs.out_filename) # Reads MRTrix tracks - header, streamlines = read_mrtrix_tracks( - self.inputs.in_file, as_generator=True) - iflogger.info('MRTrix Header:') + header, streamlines = read_mrtrix_tracks(self.inputs.in_file, as_generator=True) + iflogger.info("MRTrix Header:") iflogger.info(header) # Writes to Trackvis trk_header = nb.trackvis.empty_header() - trk_header['dim'] = [dx, dy, dz] - trk_header['voxel_size'] = [vx, vy, vz] - trk_header['n_count'] = header['count'] + trk_header["dim"] = [dx, dy, dz] + trk_header["voxel_size"] = [vx, vy, vz] + trk_header["n_count"] = header["count"] if isdefined(self.inputs.matrix_file) and isdefined( - self.inputs.registration_image_file): - iflogger.info('Applying transformation from matrix file %s', - self.inputs.matrix_file) + self.inputs.registration_image_file + ): + iflogger.info( + "Applying transformation from matrix file %s", self.inputs.matrix_file + ) xfm = np.genfromtxt(self.inputs.matrix_file) iflogger.info(xfm) - registration_image_file = nb.load( - self.inputs.registration_image_file) + registration_image_file = nb.load(self.inputs.registration_image_file) reg_affine = registration_image_file.affine - r_dx, r_dy, r_dz = get_data_dims( - self.inputs.registration_image_file) - r_vx, r_vy, r_vz = get_vox_dims( - self.inputs.registration_image_file) - iflogger.info('Using affine from registration image file %s', - self.inputs.registration_image_file) + r_dx, r_dy, r_dz = get_data_dims(self.inputs.registration_image_file) + r_vx, r_vy, r_vz = get_vox_dims(self.inputs.registration_image_file) + iflogger.info( + "Using affine from registration image file %s", + self.inputs.registration_image_file, + ) iflogger.info(reg_affine) - trk_header['vox_to_ras'] = reg_affine - trk_header['dim'] = [r_dx, r_dy, r_dz] - trk_header['voxel_size'] = [r_vx, r_vy, r_vz] + trk_header["vox_to_ras"] = reg_affine + trk_header["dim"] = [r_dx, r_dy, r_dz] + trk_header["voxel_size"] = [r_vx, r_vy, r_vz] - affine = np.dot(affine, np.diag(1. / np.array([vx, vy, vz, 1]))) + affine = np.dot(affine, np.diag(1.0 / np.array([vx, vy, vz, 1]))) transformed_streamlines = transform_to_affine( - streamlines, trk_header, affine) + streamlines, trk_header, affine + ) - aff = affine_from_fsl_mat_file(xfm, [vx, vy, vz], - [r_vx, r_vy, r_vz]) + aff = affine_from_fsl_mat_file(xfm, [vx, vy, vz], [r_vx, r_vy, r_vz]) iflogger.info(aff) axcode = aff2axcodes(reg_affine) - trk_header['voxel_order'] = axcode[0] + axcode[1] + axcode[2] + trk_header["voxel_order"] = axcode[0] + axcode[1] + axcode[2] final_streamlines = move_streamlines(transformed_streamlines, aff) trk_tracks = ((ii, None, None) for ii in final_streamlines) trk.write(out_filename, trk_tracks, trk_header) - iflogger.info('Saving transformed Trackvis file as %s', - out_filename) - iflogger.info('New TrackVis Header:') + iflogger.info("Saving transformed Trackvis file as %s", out_filename) + iflogger.info("New TrackVis Header:") iflogger.info(trk_header) else: iflogger.info( - 'Applying transformation from scanner coordinates to %s', - self.inputs.image_file) + "Applying transformation from scanner coordinates to %s", + self.inputs.image_file, + ) axcode = aff2axcodes(affine) - trk_header['voxel_order'] = axcode[0] + axcode[1] + axcode[2] - trk_header['vox_to_ras'] = affine + trk_header["voxel_order"] = axcode[0] + axcode[1] + axcode[2] + trk_header["vox_to_ras"] = affine transformed_streamlines = transform_to_affine( - streamlines, trk_header, affine) + streamlines, trk_header, affine + ) trk_tracks = ((ii, None, None) for ii in transformed_streamlines) trk.write(out_filename, trk_tracks, trk_header) - iflogger.info('Saving Trackvis file as %s', out_filename) - iflogger.info('TrackVis Header:') + iflogger.info("Saving Trackvis file as %s", out_filename) + iflogger.info("TrackVis Header:") iflogger.info(trk_header) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = op.abspath(self.inputs.out_filename) + outputs["out_file"] = op.abspath(self.inputs.out_filename) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '.trk' + return name + ".trk" diff --git a/nipype/interfaces/mrtrix/preprocess.py b/nipype/interfaces/mrtrix/preprocess.py index adc2ef27d7..48f4bde719 100644 --- a/nipype/interfaces/mrtrix/preprocess.py +++ b/nipype/interfaces/mrtrix/preprocess.py @@ -5,47 +5,52 @@ import os.path as op from ...utils.filemanip import split_filename -from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec, - File, InputMultiPath, isdefined) +from ..base import ( + CommandLineInputSpec, + CommandLine, + traits, + TraitedSpec, + File, + InputMultiPath, + isdefined, +) class MRConvertInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='voxel-order data filename') - out_filename = File( - genfile=True, argstr='%s', position=-1, desc='Output filename') + desc="voxel-order data filename", + ) + out_filename = File(genfile=True, argstr="%s", position=-1, desc="Output filename") extract_at_axis = traits.Enum( 1, 2, 3, - argstr='-coord %s', + argstr="-coord %s", position=1, - desc= - '"Extract data only at the coordinates specified. This option specifies the Axis. Must be used in conjunction with extract_at_coordinate.' + desc='"Extract data only at the coordinates specified. This option specifies the Axis. Must be used in conjunction with extract_at_coordinate.', ) extract_at_coordinate = traits.List( traits.Float, - argstr='%s', - sep=',', + argstr="%s", + sep=",", position=2, minlen=1, maxlen=3, - desc= - '"Extract data only at the coordinates specified. This option specifies the coordinates. Must be used in conjunction with extract_at_axis. Three comma-separated numbers giving the size of each voxel in mm.' + desc='"Extract data only at the coordinates specified. This option specifies the coordinates. Must be used in conjunction with extract_at_axis. Three comma-separated numbers giving the size of each voxel in mm.', ) voxel_dims = traits.List( traits.Float, - argstr='-vox %s', - sep=',', + argstr="-vox %s", + sep=",", position=3, minlen=3, maxlen=3, - desc= - 'Three comma-separated numbers giving the size of each voxel in mm.') + desc="Three comma-separated numbers giving the size of each voxel in mm.", + ) output_datatype = traits.Enum( "nii", "float", @@ -54,10 +59,9 @@ class MRConvertInputSpec(CommandLineInputSpec): "int", "long", "double", - argstr='-output %s', + argstr="-output %s", position=2, - desc= - '"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"' + desc='"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"', ) # , usedefault=True) extension = traits.Enum( "mif", @@ -69,9 +73,9 @@ class MRConvertInputSpec(CommandLineInputSpec): "long", "double", position=2, - desc= - '"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"', - usedefault=True) + desc='"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"', + usedefault=True, + ) layout = traits.Enum( "nii", "float", @@ -80,33 +84,34 @@ class MRConvertInputSpec(CommandLineInputSpec): "int", "long", "double", - argstr='-output %s', + argstr="-output %s", position=2, - desc= - 'specify the layout of the data in memory. The actual layout produced will depend on whether the output image format can support it.' + desc="specify the layout of the data in memory. The actual layout produced will depend on whether the output image format can support it.", ) resample = traits.Float( - argstr='-scale %d', + argstr="-scale %d", position=3, - units='mm', - desc='Apply scaling to the intensity values.') + units="mm", + desc="Apply scaling to the intensity values.", + ) offset_bias = traits.Float( - argstr='-scale %d', + argstr="-scale %d", position=3, - units='mm', - desc='Apply offset to the intensity values.') + units="mm", + desc="Apply offset to the intensity values.", + ) replace_NaN_with_zero = traits.Bool( - argstr='-zero', position=3, desc="Replace all NaN values with zero.") + argstr="-zero", position=3, desc="Replace all NaN values with zero." + ) prs = traits.Bool( - argstr='-prs', + argstr="-prs", position=3, - desc= - "Assume that the DW gradients are specified in the PRS frame (Siemens DICOM only)." + desc="Assume that the DW gradients are specified in the PRS frame (Siemens DICOM only).", ) class MRConvertOutputSpec(TraitedSpec): - converted = File(exists=True, desc='path/name of 4D volume in voxel order') + converted = File(exists=True, desc="path/name of 4D volume in voxel order") class MRConvert(CommandLine): @@ -128,21 +133,21 @@ class MRConvert(CommandLine): >>> mrconvert.run() # doctest: +SKIP """ - _cmd = 'mrconvert' + _cmd = "mrconvert" input_spec = MRConvertInputSpec output_spec = MRConvertOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['converted'] = self.inputs.out_filename - if not isdefined(outputs['converted']): - outputs['converted'] = op.abspath(self._gen_outfilename()) + outputs["converted"] = self.inputs.out_filename + if not isdefined(outputs["converted"]): + outputs["converted"] = op.abspath(self._gen_outfilename()) else: - outputs['converted'] = op.abspath(outputs['converted']) + outputs["converted"] = op.abspath(outputs["converted"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None @@ -152,67 +157,76 @@ def _gen_outfilename(self): if isdefined(self.inputs.out_filename): outname = self.inputs.out_filename else: - outname = name + '_mrconvert.' + self.inputs.extension + outname = name + "_mrconvert." + self.inputs.extension return outname class DWI2TensorInputSpec(CommandLineInputSpec): in_file = InputMultiPath( File(exists=True), - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Diffusion-weighted images') + desc="Diffusion-weighted images", + ) out_filename = File( name_template="%s_tensor.mif", name_source="in_file", output_name="tensor", - argstr='%s', - desc='Output tensor filename', - position=-1) + argstr="%s", + desc="Output tensor filename", + position=-1, + ) encoding_file = File( - argstr='-grad %s', + argstr="-grad %s", position=2, - desc=('Encoding file supplied as a 4xN text file with ' - 'each line is in the format [ X Y Z b ], where ' - '[ X Y Z ] describe the direction of the applied ' - 'gradient, and b gives the b-value in units ' - '(1000 s/mm^2). See FSL2MRTrix()')) + desc=( + "Encoding file supplied as a 4xN text file with " + "each line is in the format [ X Y Z b ], where " + "[ X Y Z ] describe the direction of the applied " + "gradient, and b gives the b-value in units " + "(1000 s/mm^2). See FSL2MRTrix()" + ), + ) ignore_slice_by_volume = traits.List( traits.Int, - argstr='-ignoreslices %s', - sep=' ', + argstr="-ignoreslices %s", + sep=" ", position=2, minlen=2, maxlen=2, - desc=('Requires two values (i.e. [34 ' - '1] for [Slice Volume] Ignores ' - 'the image slices specified ' - 'when computing the tensor. ' - 'Slice here means the z ' - 'coordinate of the slice to be ' - 'ignored.')) + desc=( + "Requires two values (i.e. [34 " + "1] for [Slice Volume] Ignores " + "the image slices specified " + "when computing the tensor. " + "Slice here means the z " + "coordinate of the slice to be " + "ignored." + ), + ) ignore_volumes = traits.List( traits.Int, - argstr='-ignorevolumes %s', - sep=' ', + argstr="-ignorevolumes %s", + sep=" ", position=2, minlen=1, - desc=('Requires two values (i.e. [2 5 6] for ' - '[Volumes] Ignores the image volumes ' - 'specified when computing the tensor.')) + desc=( + "Requires two values (i.e. [2 5 6] for " + "[Volumes] Ignores the image volumes " + "specified when computing the tensor." + ), + ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc=("Do not display information messages or progress " - "status.")) - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc=("Do not display information messages or progress " "status."), + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class DWI2TensorOutputSpec(TraitedSpec): - tensor = File( - exists=True, desc='path/name of output diffusion tensor image') + tensor = File(exists=True, desc="path/name of output diffusion tensor image") class DWI2Tensor(CommandLine): @@ -231,7 +245,7 @@ class DWI2Tensor(CommandLine): >>> dwi2tensor.run() # doctest: +SKIP """ - _cmd = 'dwi2tensor' + _cmd = "dwi2tensor" input_spec = DWI2TensorInputSpec output_spec = DWI2TensorOutputSpec @@ -239,25 +253,26 @@ class DWI2Tensor(CommandLine): class Tensor2VectorInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Diffusion tensor image') + desc="Diffusion tensor image", + ) out_filename = File( - genfile=True, argstr='%s', position=-1, desc='Output vector filename') + genfile=True, argstr="%s", position=-1, desc="Output vector filename" + ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc="Do not display information messages or progress status.") - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc="Do not display information messages or progress status.", + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class Tensor2VectorOutputSpec(TraitedSpec): vector = File( exists=True, - desc= - 'the output image of the major eigenvectors of the diffusion tensor image.' + desc="the output image of the major eigenvectors of the diffusion tensor image.", ) @@ -274,55 +289,56 @@ class Tensor2Vector(CommandLine): >>> tensor2vector.run() # doctest: +SKIP """ - _cmd = 'tensor2vector' + _cmd = "tensor2vector" input_spec = Tensor2VectorInputSpec output_spec = Tensor2VectorOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['vector'] = self.inputs.out_filename - if not isdefined(outputs['vector']): - outputs['vector'] = op.abspath(self._gen_outfilename()) + outputs["vector"] = self.inputs.out_filename + if not isdefined(outputs["vector"]): + outputs["vector"] = op.abspath(self._gen_outfilename()) else: - outputs['vector'] = op.abspath(outputs['vector']) + outputs["vector"] = op.abspath(outputs["vector"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_vector.mif' + return name + "_vector.mif" class Tensor2FractionalAnisotropyInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Diffusion tensor image') + desc="Diffusion tensor image", + ) out_filename = File( genfile=True, - argstr='%s', + argstr="%s", position=-1, - desc='Output Fractional Anisotropy filename') + desc="Output Fractional Anisotropy filename", + ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc="Do not display information messages or progress status.") - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc="Do not display information messages or progress status.", + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class Tensor2FractionalAnisotropyOutputSpec(TraitedSpec): FA = File( exists=True, - desc= - 'the output image of the major eigenvectors of the diffusion tensor image.' + desc="the output image of the major eigenvectors of the diffusion tensor image.", ) @@ -339,55 +355,56 @@ class Tensor2FractionalAnisotropy(CommandLine): >>> tensor2FA.run() # doctest: +SKIP """ - _cmd = 'tensor2FA' + _cmd = "tensor2FA" input_spec = Tensor2FractionalAnisotropyInputSpec output_spec = Tensor2FractionalAnisotropyOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['FA'] = self.inputs.out_filename - if not isdefined(outputs['FA']): - outputs['FA'] = op.abspath(self._gen_outfilename()) + outputs["FA"] = self.inputs.out_filename + if not isdefined(outputs["FA"]): + outputs["FA"] = op.abspath(self._gen_outfilename()) else: - outputs['FA'] = op.abspath(outputs['FA']) + outputs["FA"] = op.abspath(outputs["FA"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_FA.mif' + return name + "_FA.mif" class Tensor2ApparentDiffusionInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Diffusion tensor image') + desc="Diffusion tensor image", + ) out_filename = File( genfile=True, - argstr='%s', + argstr="%s", position=-1, - desc='Output Fractional Anisotropy filename') + desc="Output Fractional Anisotropy filename", + ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc="Do not display information messages or progress status.") - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc="Do not display information messages or progress status.", + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class Tensor2ApparentDiffusionOutputSpec(TraitedSpec): ADC = File( exists=True, - desc= - 'the output image of the major eigenvectors of the diffusion tensor image.' + desc="the output image of the major eigenvectors of the diffusion tensor image.", ) @@ -404,49 +421,51 @@ class Tensor2ApparentDiffusion(CommandLine): >>> tensor2ADC.run() # doctest: +SKIP """ - _cmd = 'tensor2ADC' + _cmd = "tensor2ADC" input_spec = Tensor2ApparentDiffusionInputSpec output_spec = Tensor2ApparentDiffusionOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['ADC'] = self.inputs.out_filename - if not isdefined(outputs['ADC']): - outputs['ADC'] = op.abspath(self._gen_outfilename()) + outputs["ADC"] = self.inputs.out_filename + if not isdefined(outputs["ADC"]): + outputs["ADC"] = op.abspath(self._gen_outfilename()) else: - outputs['ADC'] = op.abspath(outputs['ADC']) + outputs["ADC"] = op.abspath(outputs["ADC"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_ADC.mif' + return name + "_ADC.mif" class MRMultiplyInputSpec(CommandLineInputSpec): in_files = InputMultiPath( File(exists=True), - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Input images to be multiplied') + desc="Input images to be multiplied", + ) out_filename = File( - genfile=True, argstr='%s', position=-1, desc='Output image filename') + genfile=True, argstr="%s", position=-1, desc="Output image filename" + ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc="Do not display information messages or progress status.") - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc="Do not display information messages or progress status.", + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class MRMultiplyOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output image of the multiplication') + out_file = File(exists=True, desc="the output image of the multiplication") class MRMultiply(CommandLine): @@ -462,43 +481,44 @@ class MRMultiply(CommandLine): >>> MRmult.run() # doctest: +SKIP """ - _cmd = 'mrmult' + _cmd = "mrmult" input_spec = MRMultiplyInputSpec output_spec = MRMultiplyOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_filename - if not isdefined(outputs['out_file']): - outputs['out_file'] = op.abspath(self._gen_outfilename()) + outputs["out_file"] = self.inputs.out_filename + if not isdefined(outputs["out_file"]): + outputs["out_file"] = op.abspath(self._gen_outfilename()) else: - outputs['out_file'] = op.abspath(outputs['out_file']) + outputs["out_file"] = op.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_files[0]) - return name + '_MRMult.mif' + return name + "_MRMult.mif" class MRTrixViewerInputSpec(CommandLineInputSpec): in_files = InputMultiPath( File(exists=True), - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Input images to be viewed') + desc="Input images to be viewed", + ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc="Do not display information messages or progress status.") - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc="Do not display information messages or progress status.", + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class MRTrixViewerOutputSpec(TraitedSpec): @@ -518,7 +538,7 @@ class MRTrixViewer(CommandLine): >>> MRview.run() # doctest: +SKIP """ - _cmd = 'mrview' + _cmd = "mrview" input_spec = MRTrixViewerInputSpec output_spec = MRTrixViewerOutputSpec @@ -529,10 +549,11 @@ def _list_outputs(self): class MRTrixInfoInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Input images to be read') + desc="Input images to be read", + ) class MRTrixInfoOutputSpec(TraitedSpec): @@ -552,7 +573,7 @@ class MRTrixInfo(CommandLine): >>> MRinfo.run() # doctest: +SKIP """ - _cmd = 'mrinfo' + _cmd = "mrinfo" input_spec = MRTrixInfoInputSpec output_spec = MRTrixInfoOutputSpec @@ -563,38 +584,35 @@ def _list_outputs(self): class GenerateWhiteMatterMaskInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-3, - desc='Diffusion-weighted images') + desc="Diffusion-weighted images", + ) binary_mask = File( - exists=True, - argstr='%s', - mandatory=True, - position=-2, - desc='Binary brain mask') + exists=True, argstr="%s", mandatory=True, position=-2, desc="Binary brain mask" + ) out_WMProb_filename = File( genfile=True, - argstr='%s', + argstr="%s", position=-1, - desc='Output WM probability image filename') + desc="Output WM probability image filename", + ) encoding_file = File( exists=True, - argstr='-grad %s', + argstr="-grad %s", mandatory=True, position=1, - desc= - 'Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix' + desc="Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix", ) noise_level_margin = traits.Float( - argstr='-margin %s', - desc= - 'Specify the width of the margin on either side of the image to be used to estimate the noise level (default = 10)' + argstr="-margin %s", + desc="Specify the width of the margin on either side of the image to be used to estimate the noise level (default = 10)", ) class GenerateWhiteMatterMaskOutputSpec(TraitedSpec): - WMprobabilitymap = File(exists=True, desc='WMprobabilitymap') + WMprobabilitymap = File(exists=True, desc="WMprobabilitymap") class GenerateWhiteMatterMask(CommandLine): @@ -611,51 +629,53 @@ class GenerateWhiteMatterMask(CommandLine): >>> genWM.run() # doctest: +SKIP """ - _cmd = 'gen_WM_mask' + _cmd = "gen_WM_mask" input_spec = GenerateWhiteMatterMaskInputSpec output_spec = GenerateWhiteMatterMaskOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['WMprobabilitymap'] = op.abspath(self._gen_outfilename()) + outputs["WMprobabilitymap"] = op.abspath(self._gen_outfilename()) return outputs def _gen_filename(self, name): - if name == 'out_WMProb_filename': + if name == "out_WMProb_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_WMProb.mif' + return name + "_WMProb.mif" class ErodeInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Input mask image to be eroded') + desc="Input mask image to be eroded", + ) out_filename = File( - genfile=True, argstr='%s', position=-1, desc='Output image filename') + genfile=True, argstr="%s", position=-1, desc="Output image filename" + ) number_of_passes = traits.Int( - argstr='-npass %s', desc='the number of passes (default: 1)') + argstr="-npass %s", desc="the number of passes (default: 1)" + ) dilate = traits.Bool( - argstr='-dilate', - position=1, - desc="Perform dilation rather than erosion") + argstr="-dilate", position=1, desc="Perform dilation rather than erosion" + ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc="Do not display information messages or progress status.") - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc="Do not display information messages or progress status.", + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class ErodeOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output image') + out_file = File(exists=True, desc="the output image") class Erode(CommandLine): @@ -670,64 +690,63 @@ class Erode(CommandLine): >>> erode.inputs.in_file = 'mask.mif' >>> erode.run() # doctest: +SKIP """ - _cmd = 'erode' + + _cmd = "erode" input_spec = ErodeInputSpec output_spec = ErodeOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_filename - if not isdefined(outputs['out_file']): - outputs['out_file'] = op.abspath(self._gen_outfilename()) + outputs["out_file"] = self.inputs.out_filename + if not isdefined(outputs["out_file"]): + outputs["out_file"] = op.abspath(self._gen_outfilename()) else: - outputs['out_file'] = op.abspath(outputs['out_file']) + outputs["out_file"] = op.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_erode.mif' + return name + "_erode.mif" class ThresholdInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='The input image to be thresholded') + desc="The input image to be thresholded", + ) out_filename = File( - genfile=True, - argstr='%s', - position=-1, - desc='The output binary image mask.') + genfile=True, argstr="%s", position=-1, desc="The output binary image mask." + ) absolute_threshold_value = traits.Float( - argstr='-abs %s', - desc='Specify threshold value as absolute intensity.') + argstr="-abs %s", desc="Specify threshold value as absolute intensity." + ) percentage_threshold_value = traits.Float( - argstr='-percent %s', - desc= - 'Specify threshold value as a percentage of the peak intensity in the input image.' + argstr="-percent %s", + desc="Specify threshold value as a percentage of the peak intensity in the input image.", ) - invert = traits.Bool( - argstr='-invert', position=1, desc="Invert output binary mask") + invert = traits.Bool(argstr="-invert", position=1, desc="Invert output binary mask") replace_zeros_with_NaN = traits.Bool( - argstr='-nan', position=1, desc="Replace all zero values with NaN") + argstr="-nan", position=1, desc="Replace all zero values with NaN" + ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc="Do not display information messages or progress status.") - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc="Do not display information messages or progress status.", + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class ThresholdOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='The output binary image mask.') + out_file = File(exists=True, desc="The output binary image mask.") class Threshold(CommandLine): @@ -748,49 +767,51 @@ class Threshold(CommandLine): >>> thresh.run() # doctest: +SKIP """ - _cmd = 'threshold' + _cmd = "threshold" input_spec = ThresholdInputSpec output_spec = ThresholdOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_filename - if not isdefined(outputs['out_file']): - outputs['out_file'] = op.abspath(self._gen_outfilename()) + outputs["out_file"] = self.inputs.out_filename + if not isdefined(outputs["out_file"]): + outputs["out_file"] = op.abspath(self._gen_outfilename()) else: - outputs['out_file'] = op.abspath(outputs['out_file']) + outputs["out_file"] = op.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_thresh.mif' + return name + "_thresh.mif" class MedianFilter3DInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Input images to be smoothed') + desc="Input images to be smoothed", + ) out_filename = File( - genfile=True, argstr='%s', position=-1, desc='Output image filename') + genfile=True, argstr="%s", position=-1, desc="Output image filename" + ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc="Do not display information messages or progress status.") - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc="Do not display information messages or progress status.", + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class MedianFilter3DOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output image') + out_file = File(exists=True, desc="the output image") class MedianFilter3D(CommandLine): @@ -806,82 +827,82 @@ class MedianFilter3D(CommandLine): >>> median3d.run() # doctest: +SKIP """ - _cmd = 'median3D' + _cmd = "median3D" input_spec = MedianFilter3DInputSpec output_spec = MedianFilter3DOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_filename - if not isdefined(outputs['out_file']): - outputs['out_file'] = op.abspath(self._gen_outfilename()) + outputs["out_file"] = self.inputs.out_filename + if not isdefined(outputs["out_file"]): + outputs["out_file"] = op.abspath(self._gen_outfilename()) else: - outputs['out_file'] = op.abspath(outputs['out_file']) + outputs["out_file"] = op.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_median3D.mif' + return name + "_median3D.mif" class MRTransformInputSpec(CommandLineInputSpec): in_files = InputMultiPath( File(exists=True), - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Input images to be transformed') - out_filename = File( - genfile=True, argstr='%s', position=-1, desc='Output image') + desc="Input images to be transformed", + ) + out_filename = File(genfile=True, argstr="%s", position=-1, desc="Output image") invert = traits.Bool( - argstr='-inverse', + argstr="-inverse", position=1, - desc="Invert the specified transform before using it") + desc="Invert the specified transform before using it", + ) replace_transform = traits.Bool( - argstr='-replace', + argstr="-replace", position=1, - desc= - "replace the current transform by that specified, rather than applying it to the current transform" + desc="replace the current transform by that specified, rather than applying it to the current transform", ) transformation_file = File( exists=True, - argstr='-transform %s', + argstr="-transform %s", position=1, - desc='The transform to apply, in the form of a 4x4 ascii file.') + desc="The transform to apply, in the form of a 4x4 ascii file.", + ) template_image = File( exists=True, - argstr='-template %s', + argstr="-template %s", position=1, - desc='Reslice the input image to match the specified template image.') + desc="Reslice the input image to match the specified template image.", + ) reference_image = File( exists=True, - argstr='-reference %s', + argstr="-reference %s", position=1, - desc= - 'in case the transform supplied maps from the input image onto a reference image, use this option to specify the reference. Note that this implicitly sets the -replace option.' + desc="in case the transform supplied maps from the input image onto a reference image, use this option to specify the reference. Note that this implicitly sets the -replace option.", ) flip_x = traits.Bool( - argstr='-flipx', + argstr="-flipx", position=1, - desc= - "assume the transform is supplied assuming a coordinate system with the x-axis reversed relative to the MRtrix convention (i.e. x increases from right to left). This is required to handle transform matrices produced by FSL's FLIRT command. This is only used in conjunction with the -reference option." + desc="assume the transform is supplied assuming a coordinate system with the x-axis reversed relative to the MRtrix convention (i.e. x increases from right to left). This is required to handle transform matrices produced by FSL's FLIRT command. This is only used in conjunction with the -reference option.", ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc="Do not display information messages or progress status.") - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc="Do not display information messages or progress status.", + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class MRTransformOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output image of the transformation') + out_file = File(exists=True, desc="the output image of the transformation") class MRTransform(CommandLine): @@ -896,25 +917,25 @@ class MRTransform(CommandLine): >>> MRxform.run() # doctest: +SKIP """ - _cmd = 'mrtransform' + _cmd = "mrtransform" input_spec = MRTransformInputSpec output_spec = MRTransformOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_filename - if not isdefined(outputs['out_file']): - outputs['out_file'] = op.abspath(self._gen_outfilename()) + outputs["out_file"] = self.inputs.out_filename + if not isdefined(outputs["out_file"]): + outputs["out_file"] = op.abspath(self._gen_outfilename()) else: - outputs['out_file'] = op.abspath(outputs['out_file']) + outputs["out_file"] = op.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_files[0]) - return name + '_MRTransform.mif' + return name + "_MRTransform.mif" diff --git a/nipype/interfaces/mrtrix/tensors.py b/nipype/interfaces/mrtrix/tensors.py index 6751b6196d..d7cbb5f7a8 100644 --- a/nipype/interfaces/mrtrix/tensors.py +++ b/nipype/interfaces/mrtrix/tensors.py @@ -7,42 +7,46 @@ from ... import logging from ...utils.filemanip import split_filename -from ..base import (CommandLineInputSpec, CommandLine, BaseInterface, traits, - File, TraitedSpec, isdefined) -iflogger = logging.getLogger('nipype.interface') +from ..base import ( + CommandLineInputSpec, + CommandLine, + BaseInterface, + traits, + File, + TraitedSpec, + isdefined, +) + +iflogger = logging.getLogger("nipype.interface") class DWI2SphericalHarmonicsImageInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='Diffusion-weighted images') - out_filename = File( - genfile=True, argstr='%s', position=-1, desc='Output filename') + desc="Diffusion-weighted images", + ) + out_filename = File(genfile=True, argstr="%s", position=-1, desc="Output filename") encoding_file = File( exists=True, - argstr='-grad %s', + argstr="-grad %s", mandatory=True, position=1, - desc= - 'Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix' + desc="Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix", ) maximum_harmonic_order = traits.Float( - argstr='-lmax %s', - desc= - 'set the maximum harmonic order for the output series. By default, the program will use the highest possible lmax given the number of diffusion-weighted images.' + argstr="-lmax %s", + desc="set the maximum harmonic order for the output series. By default, the program will use the highest possible lmax given the number of diffusion-weighted images.", ) normalise = traits.Bool( - argstr='-normalise', - position=3, - desc="normalise the DW signal to the b=0 image") + argstr="-normalise", position=3, desc="normalise the DW signal to the b=0 image" + ) class DWI2SphericalHarmonicsImageOutputSpec(TraitedSpec): - spherical_harmonics_image = File( - exists=True, desc='Spherical harmonics image') + spherical_harmonics_image = File(exists=True, desc="Spherical harmonics image") class DWI2SphericalHarmonicsImage(CommandLine): @@ -83,110 +87,100 @@ class DWI2SphericalHarmonicsImage(CommandLine): >>> dwi2SH.inputs.encoding_file = 'encoding.txt' >>> dwi2SH.run() # doctest: +SKIP """ - _cmd = 'dwi2SH' + + _cmd = "dwi2SH" input_spec = DWI2SphericalHarmonicsImageInputSpec output_spec = DWI2SphericalHarmonicsImageOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['spherical_harmonics_image'] = self.inputs.out_filename - if not isdefined(outputs['spherical_harmonics_image']): - outputs['spherical_harmonics_image'] = op.abspath( - self._gen_outfilename()) + outputs["spherical_harmonics_image"] = self.inputs.out_filename + if not isdefined(outputs["spherical_harmonics_image"]): + outputs["spherical_harmonics_image"] = op.abspath(self._gen_outfilename()) else: - outputs['spherical_harmonics_image'] = op.abspath( - outputs['spherical_harmonics_image']) + outputs["spherical_harmonics_image"] = op.abspath( + outputs["spherical_harmonics_image"] + ) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_SH.mif' + return name + "_SH.mif" class ConstrainedSphericalDeconvolutionInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-3, - desc='diffusion-weighted image') + desc="diffusion-weighted image", + ) response_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc= - 'the diffusion-weighted signal response function for a single fibre population (see EstimateResponse)' + desc="the diffusion-weighted signal response function for a single fibre population (see EstimateResponse)", ) - out_filename = File( - genfile=True, argstr='%s', position=-1, desc='Output filename') + out_filename = File(genfile=True, argstr="%s", position=-1, desc="Output filename") mask_image = File( exists=True, - argstr='-mask %s', + argstr="-mask %s", position=2, - desc= - 'only perform computation within the specified binary brain mask image' + desc="only perform computation within the specified binary brain mask image", ) encoding_file = File( exists=True, - argstr='-grad %s', + argstr="-grad %s", position=1, - desc= - 'Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix' + desc="Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix", ) filter_file = File( exists=True, - argstr='-filter %s', + argstr="-filter %s", position=-2, - desc= - 'a text file containing the filtering coefficients for each even harmonic order.' - 'the linear frequency filtering parameters used for the initial linear spherical deconvolution step (default = [ 1 1 1 0 0 ]).' + desc="a text file containing the filtering coefficients for each even harmonic order." + "the linear frequency filtering parameters used for the initial linear spherical deconvolution step (default = [ 1 1 1 0 0 ]).", ) lambda_value = traits.Float( - argstr='-lambda %s', - desc= - 'the regularisation parameter lambda that controls the strength of the constraint (default = 1.0).' + argstr="-lambda %s", + desc="the regularisation parameter lambda that controls the strength of the constraint (default = 1.0).", ) maximum_harmonic_order = traits.Int( - argstr='-lmax %s', - desc= - 'set the maximum harmonic order for the output series. By default, the program will use the highest possible lmax given the number of diffusion-weighted images.' + argstr="-lmax %s", + desc="set the maximum harmonic order for the output series. By default, the program will use the highest possible lmax given the number of diffusion-weighted images.", ) threshold_value = traits.Float( - argstr='-threshold %s', - desc= - 'the threshold below which the amplitude of the FOD is assumed to be zero, expressed as a fraction of the mean value of the initial FOD (default = 0.1)' + argstr="-threshold %s", + desc="the threshold below which the amplitude of the FOD is assumed to be zero, expressed as a fraction of the mean value of the initial FOD (default = 0.1)", ) iterations = traits.Int( - argstr='-niter %s', - desc= - 'the maximum number of iterations to perform for each voxel (default = 50)' + argstr="-niter %s", + desc="the maximum number of iterations to perform for each voxel (default = 50)", ) - debug = traits.Bool(argstr='-debug', desc='Display debugging messages.') + debug = traits.Bool(argstr="-debug", desc="Display debugging messages.") directions_file = File( exists=True, - argstr='-directions %s', + argstr="-directions %s", position=-2, - desc= - 'a text file containing the [ el az ] pairs for the directions: Specify the directions over which to apply the non-negativity constraint (by default, the built-in 300 direction set is used)' + desc="a text file containing the [ el az ] pairs for the directions: Specify the directions over which to apply the non-negativity constraint (by default, the built-in 300 direction set is used)", ) normalise = traits.Bool( - argstr='-normalise', - position=3, - desc="normalise the DW signal to the b=0 image") + argstr="-normalise", position=3, desc="normalise the DW signal to the b=0 image" + ) class ConstrainedSphericalDeconvolutionOutputSpec(TraitedSpec): - spherical_harmonics_image = File( - exists=True, desc='Spherical harmonics image') + spherical_harmonics_image = File(exists=True, desc="Spherical harmonics image") class ConstrainedSphericalDeconvolution(CommandLine): @@ -221,72 +215,70 @@ class ConstrainedSphericalDeconvolution(CommandLine): >>> csdeconv.run() # doctest: +SKIP """ - _cmd = 'csdeconv' + _cmd = "csdeconv" input_spec = ConstrainedSphericalDeconvolutionInputSpec output_spec = ConstrainedSphericalDeconvolutionOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['spherical_harmonics_image'] = self.inputs.out_filename - if not isdefined(outputs['spherical_harmonics_image']): - outputs['spherical_harmonics_image'] = op.abspath( - self._gen_outfilename()) + outputs["spherical_harmonics_image"] = self.inputs.out_filename + if not isdefined(outputs["spherical_harmonics_image"]): + outputs["spherical_harmonics_image"] = op.abspath(self._gen_outfilename()) else: - outputs['spherical_harmonics_image'] = op.abspath( - outputs['spherical_harmonics_image']) + outputs["spherical_harmonics_image"] = op.abspath( + outputs["spherical_harmonics_image"] + ) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_CSD.mif' + return name + "_CSD.mif" class EstimateResponseForSHInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-3, - desc='Diffusion-weighted images') + desc="Diffusion-weighted images", + ) mask_image = File( exists=True, mandatory=True, - argstr='%s', + argstr="%s", position=-2, - desc= - 'only perform computation within the specified binary brain mask image' + desc="only perform computation within the specified binary brain mask image", ) - out_filename = File( - genfile=True, argstr='%s', position=-1, desc='Output filename') + out_filename = File(genfile=True, argstr="%s", position=-1, desc="Output filename") encoding_file = File( exists=True, - argstr='-grad %s', + argstr="-grad %s", mandatory=True, position=1, - desc= - 'Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix' + desc="Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix", ) maximum_harmonic_order = traits.Int( - argstr='-lmax %s', - desc= - 'set the maximum harmonic order for the output series. By default, the program will use the highest possible lmax given the number of diffusion-weighted images.' + argstr="-lmax %s", + desc="set the maximum harmonic order for the output series. By default, the program will use the highest possible lmax given the number of diffusion-weighted images.", ) normalise = traits.Bool( - argstr='-normalise', desc='normalise the DW signal to the b=0 image') + argstr="-normalise", desc="normalise the DW signal to the b=0 image" + ) quiet = traits.Bool( - argstr='-quiet', - desc='Do not display information messages or progress status.') - debug = traits.Bool(argstr='-debug', desc='Display debugging messages.') + argstr="-quiet", desc="Do not display information messages or progress status." + ) + debug = traits.Bool(argstr="-debug", desc="Display debugging messages.") class EstimateResponseForSHOutputSpec(TraitedSpec): - response = File(exists=True, desc='Spherical harmonics image') + response = File(exists=True, desc="Spherical harmonics image") class EstimateResponseForSH(CommandLine): @@ -303,28 +295,29 @@ class EstimateResponseForSH(CommandLine): >>> estresp.inputs.encoding_file = 'encoding.txt' >>> estresp.run() # doctest: +SKIP """ - _cmd = 'estimate_response' + + _cmd = "estimate_response" input_spec = EstimateResponseForSHInputSpec output_spec = EstimateResponseForSHOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['response'] = self.inputs.out_filename - if not isdefined(outputs['response']): - outputs['response'] = op.abspath(self._gen_outfilename()) + outputs["response"] = self.inputs.out_filename + if not isdefined(outputs["response"]): + outputs["response"] = op.abspath(self._gen_outfilename()) else: - outputs['response'] = op.abspath(outputs['response']) + outputs["response"] = op.abspath(outputs["response"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_ER.txt' + return name + "_ER.txt" def concat_files(bvec_file, bval_file, invert_x, invert_y, invert_z): @@ -334,42 +327,47 @@ def concat_files(bvec_file, bval_file, invert_x, invert_y, invert_z): bvecs = np.transpose(bvecs) if invert_x: bvecs[0, :] = -bvecs[0, :] - iflogger.info('Inverting b-vectors in the x direction') + iflogger.info("Inverting b-vectors in the x direction") if invert_y: bvecs[1, :] = -bvecs[1, :] - iflogger.info('Inverting b-vectors in the y direction') + iflogger.info("Inverting b-vectors in the y direction") if invert_z: bvecs[2, :] = -bvecs[2, :] - iflogger.info('Inverting b-vectors in the z direction') + iflogger.info("Inverting b-vectors in the z direction") iflogger.info(np.shape(bvecs)) iflogger.info(np.shape(bvals)) encoding = np.transpose(np.vstack((bvecs, bvals))) _, bvec, _ = split_filename(bvec_file) _, bval, _ = split_filename(bval_file) - out_encoding_file = bvec + '_' + bval + '.txt' + out_encoding_file = bvec + "_" + bval + ".txt" np.savetxt(out_encoding_file, encoding) return out_encoding_file class FSL2MRTrixInputSpec(TraitedSpec): bvec_file = File( - exists=True, mandatory=True, desc='FSL b-vectors file (3xN text file)') + exists=True, mandatory=True, desc="FSL b-vectors file (3xN text file)" + ) bval_file = File( - exists=True, mandatory=True, desc='FSL b-values file (1xN text file)') + exists=True, mandatory=True, desc="FSL b-values file (1xN text file)" + ) invert_x = traits.Bool( - False, usedefault=True, desc='Inverts the b-vectors along the x-axis') + False, usedefault=True, desc="Inverts the b-vectors along the x-axis" + ) invert_y = traits.Bool( - False, usedefault=True, desc='Inverts the b-vectors along the y-axis') + False, usedefault=True, desc="Inverts the b-vectors along the y-axis" + ) invert_z = traits.Bool( - False, usedefault=True, desc='Inverts the b-vectors along the z-axis') - out_encoding_file = File(genfile=True, desc='Output encoding filename') + False, usedefault=True, desc="Inverts the b-vectors along the z-axis" + ) + out_encoding_file = File(genfile=True, desc="Output encoding filename") class FSL2MRTrixOutputSpec(TraitedSpec): encoding_file = File( - desc= - 'The gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient' - 'and b gives the b-value in units (1000 s/mm^2).') + desc="The gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient" + "and b gives the b-value in units (1000 s/mm^2)." + ) class FSL2MRTrix(BaseInterface): @@ -389,23 +387,27 @@ class FSL2MRTrix(BaseInterface): >>> fsl2mrtrix.inputs.invert_y = True >>> fsl2mrtrix.run() # doctest: +SKIP """ + input_spec = FSL2MRTrixInputSpec output_spec = FSL2MRTrixOutputSpec def _run_interface(self, runtime): - encoding = concat_files(self.inputs.bvec_file, self.inputs.bval_file, - self.inputs.invert_x, self.inputs.invert_y, - self.inputs.invert_z) + encoding = concat_files( + self.inputs.bvec_file, + self.inputs.bval_file, + self.inputs.invert_x, + self.inputs.invert_y, + self.inputs.invert_z, + ) return runtime def _list_outputs(self): outputs = self.output_spec().get() - outputs['encoding_file'] = op.abspath( - self._gen_filename('out_encoding_file')) + outputs["encoding_file"] = op.abspath(self._gen_filename("out_encoding_file")) return outputs def _gen_filename(self, name): - if name == 'out_encoding_file': + if name == "out_encoding_file": return self._gen_outfilename() else: return None @@ -413,40 +415,40 @@ def _gen_filename(self, name): def _gen_outfilename(self): _, bvec, _ = split_filename(self.inputs.bvec_file) _, bval, _ = split_filename(self.inputs.bval_file) - return bvec + '_' + bval + '.txt' + return bvec + "_" + bval + ".txt" class GenerateDirectionsInputSpec(CommandLineInputSpec): num_dirs = traits.Int( mandatory=True, - argstr='%s', + argstr="%s", position=-2, - desc='the number of directions to generate.') + desc="the number of directions to generate.", + ) power = traits.Float( - argstr='-power %s', - desc='specify exponent to use for repulsion power law.') + argstr="-power %s", desc="specify exponent to use for repulsion power law." + ) niter = traits.Int( - argstr='-niter %s', - desc='specify the maximum number of iterations to perform.') - display_info = traits.Bool( - argstr='-info', desc='Display information messages.') + argstr="-niter %s", desc="specify the maximum number of iterations to perform." + ) + display_info = traits.Bool(argstr="-info", desc="Display information messages.") quiet_display = traits.Bool( - argstr='-quiet', - desc='do not display information messages or progress status.') - display_debug = traits.Bool( - argstr='-debug', desc='Display debugging messages.') + argstr="-quiet", desc="do not display information messages or progress status." + ) + display_debug = traits.Bool(argstr="-debug", desc="Display debugging messages.") out_file = File( - name_source=['num_dirs'], - name_template='directions_%d.txt', - argstr='%s', + name_source=["num_dirs"], + name_template="directions_%d.txt", + argstr="%s", hash_files=False, position=-1, - desc='the text file to write the directions to, as [ az el ] pairs.') + desc="the text file to write the directions to, as [ az el ] pairs.", + ) class GenerateDirectionsOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='directions file') + out_file = File(exists=True, desc="directions file") class GenerateDirections(CommandLine): @@ -462,7 +464,7 @@ class GenerateDirections(CommandLine): >>> gendir.run() # doctest: +SKIP """ - _cmd = 'gendir' + _cmd = "gendir" input_spec = GenerateDirectionsInputSpec output_spec = GenerateDirectionsOutputSpec @@ -470,58 +472,57 @@ class GenerateDirections(CommandLine): class FindShPeaksInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-3, - desc='the input image of SH coefficients.') + desc="the input image of SH coefficients.", + ) directions_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='the set of directions to use as seeds for the peak finding') + desc="the set of directions to use as seeds for the peak finding", + ) peaks_image = File( exists=True, - argstr='-peaks %s', - desc= - 'the program will try to find the peaks that most closely match those in the image provided' + argstr="-peaks %s", + desc="the program will try to find the peaks that most closely match those in the image provided", ) num_peaks = traits.Int( - argstr='-num %s', desc='the number of peaks to extract (default is 3)') + argstr="-num %s", desc="the number of peaks to extract (default is 3)" + ) peak_directions = traits.List( traits.Float, - argstr='-direction %s', - sep=' ', + argstr="-direction %s", + sep=" ", minlen=2, maxlen=2, - desc= - 'phi theta. the direction of a peak to estimate. The algorithm will attempt to find the same number of peaks as have been specified using this option ' - ' phi: the azimuthal angle of the direction (in degrees). theta: the elevation angle of the direction (in degrees, from the vertical z-axis)' + desc="phi theta. the direction of a peak to estimate. The algorithm will attempt to find the same number of peaks as have been specified using this option " + " phi: the azimuthal angle of the direction (in degrees). theta: the elevation angle of the direction (in degrees, from the vertical z-axis)", ) peak_threshold = traits.Float( - argstr='-threshold %s', - desc= - 'only peak amplitudes greater than the threshold will be considered') - display_info = traits.Bool( - argstr='-info', desc='Display information messages.') + argstr="-threshold %s", + desc="only peak amplitudes greater than the threshold will be considered", + ) + display_info = traits.Bool(argstr="-info", desc="Display information messages.") quiet_display = traits.Bool( - argstr='-quiet', - desc='do not display information messages or progress status.') - display_debug = traits.Bool( - argstr='-debug', desc='Display debugging messages.') + argstr="-quiet", desc="do not display information messages or progress status." + ) + display_debug = traits.Bool(argstr="-debug", desc="Display debugging messages.") out_file = File( name_template="%s_peak_dirs.mif", keep_extension=False, - argstr='%s', + argstr="%s", hash_files=False, position=-1, - desc= - 'the output image. Each volume corresponds to the x, y & z component of each peak direction vector in turn', - name_source=["in_file"]) + desc="the output image. Each volume corresponds to the x, y & z component of each peak direction vector in turn", + name_source=["in_file"], + ) class FindShPeaksOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='Peak directions image') + out_file = File(exists=True, desc="Peak directions image") class FindShPeaks(CommandLine): @@ -539,7 +540,7 @@ class FindShPeaks(CommandLine): >>> shpeaks.run() # doctest: +SKIP """ - _cmd = 'find_SH_peaks' + _cmd = "find_SH_peaks" input_spec = FindShPeaksInputSpec output_spec = FindShPeaksOutputSpec @@ -547,49 +548,46 @@ class FindShPeaks(CommandLine): class Directions2AmplitudeInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc= - 'the input directions image. Each volume corresponds to the x, y & z component of each direction vector in turn.' + desc="the input directions image. Each volume corresponds to the x, y & z component of each direction vector in turn.", ) peaks_image = File( exists=True, - argstr='-peaks %s', - desc= - 'the program will try to find the peaks that most closely match those in the image provided' + argstr="-peaks %s", + desc="the program will try to find the peaks that most closely match those in the image provided", ) num_peaks = traits.Int( - argstr='-num %s', desc='the number of peaks to extract (default is 3)') + argstr="-num %s", desc="the number of peaks to extract (default is 3)" + ) peak_directions = traits.List( traits.Float, - argstr='-direction %s', - sep=' ', + argstr="-direction %s", + sep=" ", minlen=2, maxlen=2, - desc= - 'phi theta. the direction of a peak to estimate. The algorithm will attempt to find the same number of peaks as have been specified using this option ' - ' phi: the azimuthal angle of the direction (in degrees). theta: the elevation angle of the direction (in degrees, from the vertical z-axis)' + desc="phi theta. the direction of a peak to estimate. The algorithm will attempt to find the same number of peaks as have been specified using this option " + " phi: the azimuthal angle of the direction (in degrees). theta: the elevation angle of the direction (in degrees, from the vertical z-axis)", ) - display_info = traits.Bool( - argstr='-info', desc='Display information messages.') + display_info = traits.Bool(argstr="-info", desc="Display information messages.") quiet_display = traits.Bool( - argstr='-quiet', - desc='do not display information messages or progress status.') - display_debug = traits.Bool( - argstr='-debug', desc='Display debugging messages.') + argstr="-quiet", desc="do not display information messages or progress status." + ) + display_debug = traits.Bool(argstr="-debug", desc="Display debugging messages.") out_file = File( name_template="%s_amplitudes.mif", keep_extension=False, - argstr='%s', + argstr="%s", hash_files=False, position=-1, - desc='the output amplitudes image', - name_source=["in_file"]) + desc="the output amplitudes image", + name_source=["in_file"], + ) class Directions2AmplitudeOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='amplitudes image') + out_file = File(exists=True, desc="amplitudes image") class Directions2Amplitude(CommandLine): @@ -605,6 +603,6 @@ class Directions2Amplitude(CommandLine): >>> amplitudes.run() # doctest: +SKIP """ - _cmd = 'dir2amp' + _cmd = "dir2amp" input_spec = Directions2AmplitudeInputSpec output_spec = Directions2AmplitudeOutputSpec diff --git a/nipype/interfaces/mrtrix/tests/test_auto_ConstrainedSphericalDeconvolution.py b/nipype/interfaces/mrtrix/tests/test_auto_ConstrainedSphericalDeconvolution.py index ba11991004..cee549fae9 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_ConstrainedSphericalDeconvolution.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_ConstrainedSphericalDeconvolution.py @@ -4,66 +4,31 @@ def test_ConstrainedSphericalDeconvolution_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debug=dict(argstr='-debug', ), - directions_file=dict( - argstr='-directions %s', - extensions=None, - position=-2, - ), - encoding_file=dict( - argstr='-grad %s', - extensions=None, - position=1, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - filter_file=dict( - argstr='-filter %s', - extensions=None, - position=-2, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), - iterations=dict(argstr='-niter %s', ), - lambda_value=dict(argstr='-lambda %s', ), - mask_image=dict( - argstr='-mask %s', - extensions=None, - position=2, - ), - maximum_harmonic_order=dict(argstr='-lmax %s', ), - normalise=dict( - argstr='-normalise', - position=3, - ), - out_filename=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, - ), - response_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - threshold_value=dict(argstr='-threshold %s', ), + args=dict(argstr="%s",), + debug=dict(argstr="-debug",), + directions_file=dict(argstr="-directions %s", extensions=None, position=-2,), + encoding_file=dict(argstr="-grad %s", extensions=None, position=1,), + environ=dict(nohash=True, usedefault=True,), + filter_file=dict(argstr="-filter %s", extensions=None, position=-2,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + iterations=dict(argstr="-niter %s",), + lambda_value=dict(argstr="-lambda %s",), + mask_image=dict(argstr="-mask %s", extensions=None, position=2,), + maximum_harmonic_order=dict(argstr="-lmax %s",), + normalise=dict(argstr="-normalise", position=3,), + out_filename=dict(argstr="%s", extensions=None, genfile=True, position=-1,), + response_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + threshold_value=dict(argstr="-threshold %s",), ) inputs = ConstrainedSphericalDeconvolution.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ConstrainedSphericalDeconvolution_outputs(): - output_map = dict(spherical_harmonics_image=dict(extensions=None, ), ) + output_map = dict(spherical_harmonics_image=dict(extensions=None,),) outputs = ConstrainedSphericalDeconvolution.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_DWI2SphericalHarmonicsImage.py b/nipype/interfaces/mrtrix/tests/test_auto_DWI2SphericalHarmonicsImage.py index 12246fa6c8..7bd1399f30 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_DWI2SphericalHarmonicsImage.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_DWI2SphericalHarmonicsImage.py @@ -4,42 +4,25 @@ def test_DWI2SphericalHarmonicsImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), encoding_file=dict( - argstr='-grad %s', - extensions=None, - mandatory=True, - position=1, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - maximum_harmonic_order=dict(argstr='-lmax %s', ), - normalise=dict( - argstr='-normalise', - position=3, - ), - out_filename=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, + argstr="-grad %s", extensions=None, mandatory=True, position=1, ), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + maximum_harmonic_order=dict(argstr="-lmax %s",), + normalise=dict(argstr="-normalise", position=3,), + out_filename=dict(argstr="%s", extensions=None, genfile=True, position=-1,), ) inputs = DWI2SphericalHarmonicsImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWI2SphericalHarmonicsImage_outputs(): - output_map = dict(spherical_harmonics_image=dict(extensions=None, ), ) + output_map = dict(spherical_harmonics_image=dict(extensions=None,),) outputs = DWI2SphericalHarmonicsImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_DWI2Tensor.py b/nipype/interfaces/mrtrix/tests/test_auto_DWI2Tensor.py index 69f3be425f..476f68e9ef 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_DWI2Tensor.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_DWI2Tensor.py @@ -4,55 +4,32 @@ def test_DWI2Tensor_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debug=dict( - argstr='-debug', - position=1, - ), - encoding_file=dict( - argstr='-grad %s', - extensions=None, - position=2, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - ignore_slice_by_volume=dict( - argstr='-ignoreslices %s', - position=2, - sep=' ', - ), - ignore_volumes=dict( - argstr='-ignorevolumes %s', - position=2, - sep=' ', - ), - in_file=dict( - argstr='%s', - mandatory=True, - position=-2, - ), + args=dict(argstr="%s",), + debug=dict(argstr="-debug", position=1,), + encoding_file=dict(argstr="-grad %s", extensions=None, position=2,), + environ=dict(nohash=True, usedefault=True,), + ignore_slice_by_volume=dict(argstr="-ignoreslices %s", position=2, sep=" ",), + ignore_volumes=dict(argstr="-ignorevolumes %s", position=2, sep=" ",), + in_file=dict(argstr="%s", mandatory=True, position=-2,), out_filename=dict( - argstr='%s', + argstr="%s", extensions=None, - name_source='in_file', - name_template='%s_tensor.mif', - output_name='tensor', + name_source="in_file", + name_template="%s_tensor.mif", + output_name="tensor", position=-1, ), - quiet=dict( - argstr='-quiet', - position=1, - ), + quiet=dict(argstr="-quiet", position=1,), ) inputs = DWI2Tensor.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWI2Tensor_outputs(): - output_map = dict(tensor=dict(extensions=None, ), ) + output_map = dict(tensor=dict(extensions=None,),) outputs = DWI2Tensor.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_DiffusionTensorStreamlineTrack.py b/nipype/interfaces/mrtrix/tests/test_auto_DiffusionTensorStreamlineTrack.py index 35b5ec1063..66122fcfdb 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_DiffusionTensorStreamlineTrack.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_DiffusionTensorStreamlineTrack.py @@ -4,126 +4,84 @@ def test_DiffusionTensorStreamlineTrack_inputs(): input_map = dict( - args=dict(argstr='%s', ), - cutoff_value=dict( - argstr='-cutoff %s', - units='NA', - ), - desired_number_of_tracks=dict(argstr='-number %d', ), - do_not_precompute=dict(argstr='-noprecomputed', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + cutoff_value=dict(argstr="-cutoff %s", units="NA",), + desired_number_of_tracks=dict(argstr="-number %d",), + do_not_precompute=dict(argstr="-noprecomputed",), + environ=dict(nohash=True, usedefault=True,), exclude_file=dict( - argstr='-exclude %s', - extensions=None, - xor=['exclude_file', 'exclude_spec'], + argstr="-exclude %s", extensions=None, xor=["exclude_file", "exclude_spec"], ), exclude_spec=dict( - argstr='-exclude %s', + argstr="-exclude %s", position=2, - sep=',', - units='mm', - xor=['exclude_file', 'exclude_spec'], + sep=",", + units="mm", + xor=["exclude_file", "exclude_spec"], ), gradient_encoding_file=dict( - argstr='-grad %s', - extensions=None, - mandatory=True, - position=-2, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, + argstr="-grad %s", extensions=None, mandatory=True, position=-2, ), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), include_file=dict( - argstr='-include %s', - extensions=None, - xor=['include_file', 'include_spec'], + argstr="-include %s", extensions=None, xor=["include_file", "include_spec"], ), include_spec=dict( - argstr='-include %s', + argstr="-include %s", position=2, - sep=',', - units='mm', - xor=['include_file', 'include_spec'], - ), - initial_cutoff_value=dict( - argstr='-initcutoff %s', - units='NA', - ), - initial_direction=dict( - argstr='-initdirection %s', - units='voxels', - ), - inputmodel=dict( - argstr='%s', - position=-3, - usedefault=True, + sep=",", + units="mm", + xor=["include_file", "include_spec"], ), + initial_cutoff_value=dict(argstr="-initcutoff %s", units="NA",), + initial_direction=dict(argstr="-initdirection %s", units="voxels",), + inputmodel=dict(argstr="%s", position=-3, usedefault=True,), mask_file=dict( - argstr='-mask %s', - extensions=None, - xor=['mask_file', 'mask_spec'], + argstr="-mask %s", extensions=None, xor=["mask_file", "mask_spec"], ), mask_spec=dict( - argstr='-mask %s', + argstr="-mask %s", position=2, - sep=',', - units='mm', - xor=['mask_file', 'mask_spec'], - ), - maximum_number_of_tracks=dict(argstr='-maxnum %d', ), - maximum_tract_length=dict( - argstr='-length %s', - units='mm', - ), - minimum_radius_of_curvature=dict( - argstr='-curvature %s', - units='mm', - ), - minimum_tract_length=dict( - argstr='-minlength %s', - units='mm', - ), - no_mask_interpolation=dict(argstr='-nomaskinterp', ), + sep=",", + units="mm", + xor=["mask_file", "mask_spec"], + ), + maximum_number_of_tracks=dict(argstr="-maxnum %d",), + maximum_tract_length=dict(argstr="-length %s", units="mm",), + minimum_radius_of_curvature=dict(argstr="-curvature %s", units="mm",), + minimum_tract_length=dict(argstr="-minlength %s", units="mm",), + no_mask_interpolation=dict(argstr="-nomaskinterp",), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, - name_source=['in_file'], - name_template='%s_tracked.tck', - output_name='tracked', + name_source=["in_file"], + name_template="%s_tracked.tck", + output_name="tracked", position=-1, ), seed_file=dict( - argstr='-seed %s', - extensions=None, - xor=['seed_file', 'seed_spec'], + argstr="-seed %s", extensions=None, xor=["seed_file", "seed_spec"], ), seed_spec=dict( - argstr='-seed %s', + argstr="-seed %s", position=2, - sep=',', - units='mm', - xor=['seed_file', 'seed_spec'], - ), - step_size=dict( - argstr='-step %s', - units='mm', + sep=",", + units="mm", + xor=["seed_file", "seed_spec"], ), - stop=dict(argstr='-stop', ), - unidirectional=dict(argstr='-unidirectional', ), + step_size=dict(argstr="-step %s", units="mm",), + stop=dict(argstr="-stop",), + unidirectional=dict(argstr="-unidirectional",), ) inputs = DiffusionTensorStreamlineTrack.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DiffusionTensorStreamlineTrack_outputs(): - output_map = dict(tracked=dict(extensions=None, ), ) + output_map = dict(tracked=dict(extensions=None,),) outputs = DiffusionTensorStreamlineTrack.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_Directions2Amplitude.py b/nipype/interfaces/mrtrix/tests/test_auto_Directions2Amplitude.py index a68cf10cd3..66c75db7dc 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_Directions2Amplitude.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_Directions2Amplitude.py @@ -4,46 +4,34 @@ def test_Directions2Amplitude_inputs(): input_map = dict( - args=dict(argstr='%s', ), - display_debug=dict(argstr='-debug', ), - display_info=dict(argstr='-info', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - num_peaks=dict(argstr='-num %s', ), + args=dict(argstr="%s",), + display_debug=dict(argstr="-debug",), + display_info=dict(argstr="-info",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + num_peaks=dict(argstr="-num %s",), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, hash_files=False, keep_extension=False, - name_source=['in_file'], - name_template='%s_amplitudes.mif', + name_source=["in_file"], + name_template="%s_amplitudes.mif", position=-1, ), - peak_directions=dict( - argstr='-direction %s', - sep=' ', - ), - peaks_image=dict( - argstr='-peaks %s', - extensions=None, - ), - quiet_display=dict(argstr='-quiet', ), + peak_directions=dict(argstr="-direction %s", sep=" ",), + peaks_image=dict(argstr="-peaks %s", extensions=None,), + quiet_display=dict(argstr="-quiet",), ) inputs = Directions2Amplitude.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Directions2Amplitude_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Directions2Amplitude.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_Erode.py b/nipype/interfaces/mrtrix/tests/test_auto_Erode.py index 10f6d464de..cb038461ad 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_Erode.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_Erode.py @@ -4,44 +4,24 @@ def test_Erode_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debug=dict( - argstr='-debug', - position=1, - ), - dilate=dict( - argstr='-dilate', - position=1, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - number_of_passes=dict(argstr='-npass %s', ), - out_filename=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, - ), - quiet=dict( - argstr='-quiet', - position=1, - ), + args=dict(argstr="%s",), + debug=dict(argstr="-debug", position=1,), + dilate=dict(argstr="-dilate", position=1,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + number_of_passes=dict(argstr="-npass %s",), + out_filename=dict(argstr="%s", extensions=None, genfile=True, position=-1,), + quiet=dict(argstr="-quiet", position=1,), ) inputs = Erode.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Erode_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Erode.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_EstimateResponseForSH.py b/nipype/interfaces/mrtrix/tests/test_auto_EstimateResponseForSH.py index 5eb68f522a..4040fe9479 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_EstimateResponseForSH.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_EstimateResponseForSH.py @@ -4,47 +4,28 @@ def test_EstimateResponseForSH_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debug=dict(argstr='-debug', ), + args=dict(argstr="%s",), + debug=dict(argstr="-debug",), encoding_file=dict( - argstr='-grad %s', - extensions=None, - mandatory=True, - position=1, + argstr="-grad %s", extensions=None, mandatory=True, position=1, ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), - mask_image=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - maximum_harmonic_order=dict(argstr='-lmax %s', ), - normalise=dict(argstr='-normalise', ), - out_filename=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, - ), - quiet=dict(argstr='-quiet', ), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + mask_image=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + maximum_harmonic_order=dict(argstr="-lmax %s",), + normalise=dict(argstr="-normalise",), + out_filename=dict(argstr="%s", extensions=None, genfile=True, position=-1,), + quiet=dict(argstr="-quiet",), ) inputs = EstimateResponseForSH.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EstimateResponseForSH_outputs(): - output_map = dict(response=dict(extensions=None, ), ) + output_map = dict(response=dict(extensions=None,),) outputs = EstimateResponseForSH.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_FSL2MRTrix.py b/nipype/interfaces/mrtrix/tests/test_auto_FSL2MRTrix.py index d5b8bb8f76..4772abcbc4 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_FSL2MRTrix.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_FSL2MRTrix.py @@ -4,29 +4,22 @@ def test_FSL2MRTrix_inputs(): input_map = dict( - bval_file=dict( - extensions=None, - mandatory=True, - ), - bvec_file=dict( - extensions=None, - mandatory=True, - ), - invert_x=dict(usedefault=True, ), - invert_y=dict(usedefault=True, ), - invert_z=dict(usedefault=True, ), - out_encoding_file=dict( - extensions=None, - genfile=True, - ), + bval_file=dict(extensions=None, mandatory=True,), + bvec_file=dict(extensions=None, mandatory=True,), + invert_x=dict(usedefault=True,), + invert_y=dict(usedefault=True,), + invert_z=dict(usedefault=True,), + out_encoding_file=dict(extensions=None, genfile=True,), ) inputs = FSL2MRTrix.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FSL2MRTrix_outputs(): - output_map = dict(encoding_file=dict(extensions=None, ), ) + output_map = dict(encoding_file=dict(extensions=None,),) outputs = FSL2MRTrix.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_FilterTracks.py b/nipype/interfaces/mrtrix/tests/test_auto_FilterTracks.py index bda0a2059b..0ce949eb41 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_FilterTracks.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_FilterTracks.py @@ -4,71 +4,52 @@ def test_FilterTracks_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debug=dict( - argstr='-debug', - position=1, - ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + debug=dict(argstr="-debug", position=1,), + environ=dict(nohash=True, usedefault=True,), exclude_file=dict( - argstr='-exclude %s', - extensions=None, - xor=['exclude_file', 'exclude_spec'], + argstr="-exclude %s", extensions=None, xor=["exclude_file", "exclude_spec"], ), exclude_spec=dict( - argstr='-exclude %s', + argstr="-exclude %s", position=2, - sep=',', - units='mm', - xor=['exclude_file', 'exclude_spec'], - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, + sep=",", + units="mm", + xor=["exclude_file", "exclude_spec"], ), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), include_file=dict( - argstr='-include %s', - extensions=None, - xor=['include_file', 'include_spec'], + argstr="-include %s", extensions=None, xor=["include_file", "include_spec"], ), include_spec=dict( - argstr='-include %s', + argstr="-include %s", position=2, - sep=',', - units='mm', - xor=['include_file', 'include_spec'], - ), - invert=dict(argstr='-invert', ), - minimum_tract_length=dict( - argstr='-minlength %s', - units='mm', + sep=",", + units="mm", + xor=["include_file", "include_spec"], ), - no_mask_interpolation=dict(argstr='-nomaskinterp', ), + invert=dict(argstr="-invert",), + minimum_tract_length=dict(argstr="-minlength %s", units="mm",), + no_mask_interpolation=dict(argstr="-nomaskinterp",), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, hash_files=False, - name_source=['in_file'], - name_template='%s_filt', + name_source=["in_file"], + name_template="%s_filt", position=-1, ), - quiet=dict( - argstr='-quiet', - position=1, - ), + quiet=dict(argstr="-quiet", position=1,), ) inputs = FilterTracks.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FilterTracks_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = FilterTracks.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_FindShPeaks.py b/nipype/interfaces/mrtrix/tests/test_auto_FindShPeaks.py index 010b912752..c8e6a9cb3d 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_FindShPeaks.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_FindShPeaks.py @@ -4,53 +4,38 @@ def test_FindShPeaks_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), directions_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - display_debug=dict(argstr='-debug', ), - display_info=dict(argstr='-info', ), - environ=dict( - nohash=True, - usedefault=True, + argstr="%s", extensions=None, mandatory=True, position=-2, ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), - num_peaks=dict(argstr='-num %s', ), + display_debug=dict(argstr="-debug",), + display_info=dict(argstr="-info",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + num_peaks=dict(argstr="-num %s",), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, hash_files=False, keep_extension=False, - name_source=['in_file'], - name_template='%s_peak_dirs.mif', + name_source=["in_file"], + name_template="%s_peak_dirs.mif", position=-1, ), - peak_directions=dict( - argstr='-direction %s', - sep=' ', - ), - peak_threshold=dict(argstr='-threshold %s', ), - peaks_image=dict( - argstr='-peaks %s', - extensions=None, - ), - quiet_display=dict(argstr='-quiet', ), + peak_directions=dict(argstr="-direction %s", sep=" ",), + peak_threshold=dict(argstr="-threshold %s",), + peaks_image=dict(argstr="-peaks %s", extensions=None,), + quiet_display=dict(argstr="-quiet",), ) inputs = FindShPeaks.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FindShPeaks_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = FindShPeaks.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_GenerateDirections.py b/nipype/interfaces/mrtrix/tests/test_auto_GenerateDirections.py index ae8f2a3cfa..f980f9386b 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_GenerateDirections.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_GenerateDirections.py @@ -4,37 +4,32 @@ def test_GenerateDirections_inputs(): input_map = dict( - args=dict(argstr='%s', ), - display_debug=dict(argstr='-debug', ), - display_info=dict(argstr='-info', ), - environ=dict( - nohash=True, - usedefault=True, - ), - niter=dict(argstr='-niter %s', ), - num_dirs=dict( - argstr='%s', - mandatory=True, - position=-2, - ), + args=dict(argstr="%s",), + display_debug=dict(argstr="-debug",), + display_info=dict(argstr="-info",), + environ=dict(nohash=True, usedefault=True,), + niter=dict(argstr="-niter %s",), + num_dirs=dict(argstr="%s", mandatory=True, position=-2,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, hash_files=False, - name_source=['num_dirs'], - name_template='directions_%d.txt', + name_source=["num_dirs"], + name_template="directions_%d.txt", position=-1, ), - power=dict(argstr='-power %s', ), - quiet_display=dict(argstr='-quiet', ), + power=dict(argstr="-power %s",), + quiet_display=dict(argstr="-quiet",), ) inputs = GenerateDirections.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GenerateDirections_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = GenerateDirections.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_GenerateWhiteMatterMask.py b/nipype/interfaces/mrtrix/tests/test_auto_GenerateWhiteMatterMask.py index f558927eab..dc58ac51d2 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_GenerateWhiteMatterMask.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_GenerateWhiteMatterMask.py @@ -4,35 +4,16 @@ def test_GenerateWhiteMatterMask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - binary_mask=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), + args=dict(argstr="%s",), + binary_mask=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), encoding_file=dict( - argstr='-grad %s', - extensions=None, - mandatory=True, - position=1, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, + argstr="-grad %s", extensions=None, mandatory=True, position=1, ), - noise_level_margin=dict(argstr='-margin %s', ), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + noise_level_margin=dict(argstr="-margin %s",), out_WMProb_filename=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, + argstr="%s", extensions=None, genfile=True, position=-1, ), ) inputs = GenerateWhiteMatterMask.input_spec() @@ -40,8 +21,10 @@ def test_GenerateWhiteMatterMask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GenerateWhiteMatterMask_outputs(): - output_map = dict(WMprobabilitymap=dict(extensions=None, ), ) + output_map = dict(WMprobabilitymap=dict(extensions=None,),) outputs = GenerateWhiteMatterMask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_MRConvert.py b/nipype/interfaces/mrtrix/tests/test_auto_MRConvert.py index d2b9a02030..792beb6aa0 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_MRConvert.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_MRConvert.py @@ -4,75 +4,30 @@ def test_MRConvert_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - extension=dict( - position=2, - usedefault=True, - ), - extract_at_axis=dict( - argstr='-coord %s', - position=1, - ), - extract_at_coordinate=dict( - argstr='%s', - position=2, - sep=',', - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - layout=dict( - argstr='-output %s', - position=2, - ), - offset_bias=dict( - argstr='-scale %d', - position=3, - units='mm', - ), - out_filename=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, - ), - output_datatype=dict( - argstr='-output %s', - position=2, - ), - prs=dict( - argstr='-prs', - position=3, - ), - replace_NaN_with_zero=dict( - argstr='-zero', - position=3, - ), - resample=dict( - argstr='-scale %d', - position=3, - units='mm', - ), - voxel_dims=dict( - argstr='-vox %s', - position=3, - sep=',', - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + extension=dict(position=2, usedefault=True,), + extract_at_axis=dict(argstr="-coord %s", position=1,), + extract_at_coordinate=dict(argstr="%s", position=2, sep=",",), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + layout=dict(argstr="-output %s", position=2,), + offset_bias=dict(argstr="-scale %d", position=3, units="mm",), + out_filename=dict(argstr="%s", extensions=None, genfile=True, position=-1,), + output_datatype=dict(argstr="-output %s", position=2,), + prs=dict(argstr="-prs", position=3,), + replace_NaN_with_zero=dict(argstr="-zero", position=3,), + resample=dict(argstr="-scale %d", position=3, units="mm",), + voxel_dims=dict(argstr="-vox %s", position=3, sep=",",), ) inputs = MRConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRConvert_outputs(): - output_map = dict(converted=dict(extensions=None, ), ) + output_map = dict(converted=dict(extensions=None,),) outputs = MRConvert.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_MRMultiply.py b/nipype/interfaces/mrtrix/tests/test_auto_MRMultiply.py index fa9526c037..5525ef1130 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_MRMultiply.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_MRMultiply.py @@ -4,38 +4,22 @@ def test_MRMultiply_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debug=dict( - argstr='-debug', - position=1, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_files=dict( - argstr='%s', - mandatory=True, - position=-2, - ), - out_filename=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, - ), - quiet=dict( - argstr='-quiet', - position=1, - ), + args=dict(argstr="%s",), + debug=dict(argstr="-debug", position=1,), + environ=dict(nohash=True, usedefault=True,), + in_files=dict(argstr="%s", mandatory=True, position=-2,), + out_filename=dict(argstr="%s", extensions=None, genfile=True, position=-1,), + quiet=dict(argstr="-quiet", position=1,), ) inputs = MRMultiply.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRMultiply_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MRMultiply.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_MRTransform.py b/nipype/interfaces/mrtrix/tests/test_auto_MRTransform.py index d41758e65e..60e0f452ac 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_MRTransform.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_MRTransform.py @@ -4,65 +4,28 @@ def test_MRTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debug=dict( - argstr='-debug', - position=1, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - flip_x=dict( - argstr='-flipx', - position=1, - ), - in_files=dict( - argstr='%s', - mandatory=True, - position=-2, - ), - invert=dict( - argstr='-inverse', - position=1, - ), - out_filename=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, - ), - quiet=dict( - argstr='-quiet', - position=1, - ), - reference_image=dict( - argstr='-reference %s', - extensions=None, - position=1, - ), - replace_transform=dict( - argstr='-replace', - position=1, - ), - template_image=dict( - argstr='-template %s', - extensions=None, - position=1, - ), - transformation_file=dict( - argstr='-transform %s', - extensions=None, - position=1, - ), + args=dict(argstr="%s",), + debug=dict(argstr="-debug", position=1,), + environ=dict(nohash=True, usedefault=True,), + flip_x=dict(argstr="-flipx", position=1,), + in_files=dict(argstr="%s", mandatory=True, position=-2,), + invert=dict(argstr="-inverse", position=1,), + out_filename=dict(argstr="%s", extensions=None, genfile=True, position=-1,), + quiet=dict(argstr="-quiet", position=1,), + reference_image=dict(argstr="-reference %s", extensions=None, position=1,), + replace_transform=dict(argstr="-replace", position=1,), + template_image=dict(argstr="-template %s", extensions=None, position=1,), + transformation_file=dict(argstr="-transform %s", extensions=None, position=1,), ) inputs = MRTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRTransform_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MRTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_MRTrix2TrackVis.py b/nipype/interfaces/mrtrix/tests/test_auto_MRTrix2TrackVis.py index 130436fae6..05f80b4646 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_MRTrix2TrackVis.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_MRTrix2TrackVis.py @@ -4,26 +4,21 @@ def test_MRTrix2TrackVis_inputs(): input_map = dict( - image_file=dict(extensions=None, ), - in_file=dict( - extensions=None, - mandatory=True, - ), - matrix_file=dict(extensions=None, ), - out_filename=dict( - extensions=None, - genfile=True, - usedefault=True, - ), - registration_image_file=dict(extensions=None, ), + image_file=dict(extensions=None,), + in_file=dict(extensions=None, mandatory=True,), + matrix_file=dict(extensions=None,), + out_filename=dict(extensions=None, genfile=True, usedefault=True,), + registration_image_file=dict(extensions=None,), ) inputs = MRTrix2TrackVis.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRTrix2TrackVis_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MRTrix2TrackVis.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_MRTrixInfo.py b/nipype/interfaces/mrtrix/tests/test_auto_MRTrixInfo.py index 1ca965b012..5e3fd2882e 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_MRTrixInfo.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_MRTrixInfo.py @@ -4,23 +4,17 @@ def test_MRTrixInfo_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), ) inputs = MRTrixInfo.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRTrixInfo_outputs(): output_map = dict() outputs = MRTrixInfo.output_spec() diff --git a/nipype/interfaces/mrtrix/tests/test_auto_MRTrixViewer.py b/nipype/interfaces/mrtrix/tests/test_auto_MRTrixViewer.py index dd8b6066c3..711191bd16 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_MRTrixViewer.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_MRTrixViewer.py @@ -4,30 +4,19 @@ def test_MRTrixViewer_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debug=dict( - argstr='-debug', - position=1, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_files=dict( - argstr='%s', - mandatory=True, - position=-2, - ), - quiet=dict( - argstr='-quiet', - position=1, - ), + args=dict(argstr="%s",), + debug=dict(argstr="-debug", position=1,), + environ=dict(nohash=True, usedefault=True,), + in_files=dict(argstr="%s", mandatory=True, position=-2,), + quiet=dict(argstr="-quiet", position=1,), ) inputs = MRTrixViewer.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRTrixViewer_outputs(): output_map = dict() outputs = MRTrixViewer.output_spec() diff --git a/nipype/interfaces/mrtrix/tests/test_auto_MedianFilter3D.py b/nipype/interfaces/mrtrix/tests/test_auto_MedianFilter3D.py index 68e255e9d3..0b3f38dcbc 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_MedianFilter3D.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_MedianFilter3D.py @@ -4,39 +4,22 @@ def test_MedianFilter3D_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debug=dict( - argstr='-debug', - position=1, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - out_filename=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, - ), - quiet=dict( - argstr='-quiet', - position=1, - ), + args=dict(argstr="%s",), + debug=dict(argstr="-debug", position=1,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + out_filename=dict(argstr="%s", extensions=None, genfile=True, position=-1,), + quiet=dict(argstr="-quiet", position=1,), ) inputs = MedianFilter3D.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MedianFilter3D_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MedianFilter3D.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_ProbabilisticSphericallyDeconvolutedStreamlineTrack.py b/nipype/interfaces/mrtrix/tests/test_auto_ProbabilisticSphericallyDeconvolutedStreamlineTrack.py index 0dd42aab78..e640da1306 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_ProbabilisticSphericallyDeconvolutedStreamlineTrack.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_ProbabilisticSphericallyDeconvolutedStreamlineTrack.py @@ -4,121 +4,82 @@ def test_ProbabilisticSphericallyDeconvolutedStreamlineTrack_inputs(): input_map = dict( - args=dict(argstr='%s', ), - cutoff_value=dict( - argstr='-cutoff %s', - units='NA', - ), - desired_number_of_tracks=dict(argstr='-number %d', ), - do_not_precompute=dict(argstr='-noprecomputed', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + cutoff_value=dict(argstr="-cutoff %s", units="NA",), + desired_number_of_tracks=dict(argstr="-number %d",), + do_not_precompute=dict(argstr="-noprecomputed",), + environ=dict(nohash=True, usedefault=True,), exclude_file=dict( - argstr='-exclude %s', - extensions=None, - xor=['exclude_file', 'exclude_spec'], + argstr="-exclude %s", extensions=None, xor=["exclude_file", "exclude_spec"], ), exclude_spec=dict( - argstr='-exclude %s', + argstr="-exclude %s", position=2, - sep=',', - units='mm', - xor=['exclude_file', 'exclude_spec'], - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, + sep=",", + units="mm", + xor=["exclude_file", "exclude_spec"], ), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), include_file=dict( - argstr='-include %s', - extensions=None, - xor=['include_file', 'include_spec'], + argstr="-include %s", extensions=None, xor=["include_file", "include_spec"], ), include_spec=dict( - argstr='-include %s', + argstr="-include %s", position=2, - sep=',', - units='mm', - xor=['include_file', 'include_spec'], - ), - initial_cutoff_value=dict( - argstr='-initcutoff %s', - units='NA', - ), - initial_direction=dict( - argstr='-initdirection %s', - units='voxels', - ), - inputmodel=dict( - argstr='%s', - position=-3, - usedefault=True, + sep=",", + units="mm", + xor=["include_file", "include_spec"], ), + initial_cutoff_value=dict(argstr="-initcutoff %s", units="NA",), + initial_direction=dict(argstr="-initdirection %s", units="voxels",), + inputmodel=dict(argstr="%s", position=-3, usedefault=True,), mask_file=dict( - argstr='-mask %s', - extensions=None, - xor=['mask_file', 'mask_spec'], + argstr="-mask %s", extensions=None, xor=["mask_file", "mask_spec"], ), mask_spec=dict( - argstr='-mask %s', + argstr="-mask %s", position=2, - sep=',', - units='mm', - xor=['mask_file', 'mask_spec'], - ), - maximum_number_of_tracks=dict(argstr='-maxnum %d', ), - maximum_number_of_trials=dict(argstr='-trials %s', ), - maximum_tract_length=dict( - argstr='-length %s', - units='mm', - ), - minimum_radius_of_curvature=dict( - argstr='-curvature %s', - units='mm', - ), - minimum_tract_length=dict( - argstr='-minlength %s', - units='mm', - ), - no_mask_interpolation=dict(argstr='-nomaskinterp', ), + sep=",", + units="mm", + xor=["mask_file", "mask_spec"], + ), + maximum_number_of_tracks=dict(argstr="-maxnum %d",), + maximum_number_of_trials=dict(argstr="-trials %s",), + maximum_tract_length=dict(argstr="-length %s", units="mm",), + minimum_radius_of_curvature=dict(argstr="-curvature %s", units="mm",), + minimum_tract_length=dict(argstr="-minlength %s", units="mm",), + no_mask_interpolation=dict(argstr="-nomaskinterp",), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, - name_source=['in_file'], - name_template='%s_tracked.tck', - output_name='tracked', + name_source=["in_file"], + name_template="%s_tracked.tck", + output_name="tracked", position=-1, ), seed_file=dict( - argstr='-seed %s', - extensions=None, - xor=['seed_file', 'seed_spec'], + argstr="-seed %s", extensions=None, xor=["seed_file", "seed_spec"], ), seed_spec=dict( - argstr='-seed %s', + argstr="-seed %s", position=2, - sep=',', - units='mm', - xor=['seed_file', 'seed_spec'], - ), - step_size=dict( - argstr='-step %s', - units='mm', + sep=",", + units="mm", + xor=["seed_file", "seed_spec"], ), - stop=dict(argstr='-stop', ), - unidirectional=dict(argstr='-unidirectional', ), + step_size=dict(argstr="-step %s", units="mm",), + stop=dict(argstr="-stop",), + unidirectional=dict(argstr="-unidirectional",), ) inputs = ProbabilisticSphericallyDeconvolutedStreamlineTrack.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ProbabilisticSphericallyDeconvolutedStreamlineTrack_outputs(): - output_map = dict(tracked=dict(extensions=None, ), ) + output_map = dict(tracked=dict(extensions=None,),) outputs = ProbabilisticSphericallyDeconvolutedStreamlineTrack.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_SphericallyDeconvolutedStreamlineTrack.py b/nipype/interfaces/mrtrix/tests/test_auto_SphericallyDeconvolutedStreamlineTrack.py index 3a1c806453..bc32741331 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_SphericallyDeconvolutedStreamlineTrack.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_SphericallyDeconvolutedStreamlineTrack.py @@ -4,120 +4,81 @@ def test_SphericallyDeconvolutedStreamlineTrack_inputs(): input_map = dict( - args=dict(argstr='%s', ), - cutoff_value=dict( - argstr='-cutoff %s', - units='NA', - ), - desired_number_of_tracks=dict(argstr='-number %d', ), - do_not_precompute=dict(argstr='-noprecomputed', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + cutoff_value=dict(argstr="-cutoff %s", units="NA",), + desired_number_of_tracks=dict(argstr="-number %d",), + do_not_precompute=dict(argstr="-noprecomputed",), + environ=dict(nohash=True, usedefault=True,), exclude_file=dict( - argstr='-exclude %s', - extensions=None, - xor=['exclude_file', 'exclude_spec'], + argstr="-exclude %s", extensions=None, xor=["exclude_file", "exclude_spec"], ), exclude_spec=dict( - argstr='-exclude %s', + argstr="-exclude %s", position=2, - sep=',', - units='mm', - xor=['exclude_file', 'exclude_spec'], - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, + sep=",", + units="mm", + xor=["exclude_file", "exclude_spec"], ), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), include_file=dict( - argstr='-include %s', - extensions=None, - xor=['include_file', 'include_spec'], + argstr="-include %s", extensions=None, xor=["include_file", "include_spec"], ), include_spec=dict( - argstr='-include %s', + argstr="-include %s", position=2, - sep=',', - units='mm', - xor=['include_file', 'include_spec'], - ), - initial_cutoff_value=dict( - argstr='-initcutoff %s', - units='NA', - ), - initial_direction=dict( - argstr='-initdirection %s', - units='voxels', - ), - inputmodel=dict( - argstr='%s', - position=-3, - usedefault=True, + sep=",", + units="mm", + xor=["include_file", "include_spec"], ), + initial_cutoff_value=dict(argstr="-initcutoff %s", units="NA",), + initial_direction=dict(argstr="-initdirection %s", units="voxels",), + inputmodel=dict(argstr="%s", position=-3, usedefault=True,), mask_file=dict( - argstr='-mask %s', - extensions=None, - xor=['mask_file', 'mask_spec'], + argstr="-mask %s", extensions=None, xor=["mask_file", "mask_spec"], ), mask_spec=dict( - argstr='-mask %s', + argstr="-mask %s", position=2, - sep=',', - units='mm', - xor=['mask_file', 'mask_spec'], - ), - maximum_number_of_tracks=dict(argstr='-maxnum %d', ), - maximum_tract_length=dict( - argstr='-length %s', - units='mm', - ), - minimum_radius_of_curvature=dict( - argstr='-curvature %s', - units='mm', - ), - minimum_tract_length=dict( - argstr='-minlength %s', - units='mm', - ), - no_mask_interpolation=dict(argstr='-nomaskinterp', ), + sep=",", + units="mm", + xor=["mask_file", "mask_spec"], + ), + maximum_number_of_tracks=dict(argstr="-maxnum %d",), + maximum_tract_length=dict(argstr="-length %s", units="mm",), + minimum_radius_of_curvature=dict(argstr="-curvature %s", units="mm",), + minimum_tract_length=dict(argstr="-minlength %s", units="mm",), + no_mask_interpolation=dict(argstr="-nomaskinterp",), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, - name_source=['in_file'], - name_template='%s_tracked.tck', - output_name='tracked', + name_source=["in_file"], + name_template="%s_tracked.tck", + output_name="tracked", position=-1, ), seed_file=dict( - argstr='-seed %s', - extensions=None, - xor=['seed_file', 'seed_spec'], + argstr="-seed %s", extensions=None, xor=["seed_file", "seed_spec"], ), seed_spec=dict( - argstr='-seed %s', + argstr="-seed %s", position=2, - sep=',', - units='mm', - xor=['seed_file', 'seed_spec'], - ), - step_size=dict( - argstr='-step %s', - units='mm', + sep=",", + units="mm", + xor=["seed_file", "seed_spec"], ), - stop=dict(argstr='-stop', ), - unidirectional=dict(argstr='-unidirectional', ), + step_size=dict(argstr="-step %s", units="mm",), + stop=dict(argstr="-stop",), + unidirectional=dict(argstr="-unidirectional",), ) inputs = SphericallyDeconvolutedStreamlineTrack.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SphericallyDeconvolutedStreamlineTrack_outputs(): - output_map = dict(tracked=dict(extensions=None, ), ) + output_map = dict(tracked=dict(extensions=None,),) outputs = SphericallyDeconvolutedStreamlineTrack.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_StreamlineTrack.py b/nipype/interfaces/mrtrix/tests/test_auto_StreamlineTrack.py index 5515305a1f..bf58f3fcd0 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_StreamlineTrack.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_StreamlineTrack.py @@ -4,120 +4,81 @@ def test_StreamlineTrack_inputs(): input_map = dict( - args=dict(argstr='%s', ), - cutoff_value=dict( - argstr='-cutoff %s', - units='NA', - ), - desired_number_of_tracks=dict(argstr='-number %d', ), - do_not_precompute=dict(argstr='-noprecomputed', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + cutoff_value=dict(argstr="-cutoff %s", units="NA",), + desired_number_of_tracks=dict(argstr="-number %d",), + do_not_precompute=dict(argstr="-noprecomputed",), + environ=dict(nohash=True, usedefault=True,), exclude_file=dict( - argstr='-exclude %s', - extensions=None, - xor=['exclude_file', 'exclude_spec'], + argstr="-exclude %s", extensions=None, xor=["exclude_file", "exclude_spec"], ), exclude_spec=dict( - argstr='-exclude %s', + argstr="-exclude %s", position=2, - sep=',', - units='mm', - xor=['exclude_file', 'exclude_spec'], - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, + sep=",", + units="mm", + xor=["exclude_file", "exclude_spec"], ), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), include_file=dict( - argstr='-include %s', - extensions=None, - xor=['include_file', 'include_spec'], + argstr="-include %s", extensions=None, xor=["include_file", "include_spec"], ), include_spec=dict( - argstr='-include %s', + argstr="-include %s", position=2, - sep=',', - units='mm', - xor=['include_file', 'include_spec'], - ), - initial_cutoff_value=dict( - argstr='-initcutoff %s', - units='NA', - ), - initial_direction=dict( - argstr='-initdirection %s', - units='voxels', - ), - inputmodel=dict( - argstr='%s', - position=-3, - usedefault=True, + sep=",", + units="mm", + xor=["include_file", "include_spec"], ), + initial_cutoff_value=dict(argstr="-initcutoff %s", units="NA",), + initial_direction=dict(argstr="-initdirection %s", units="voxels",), + inputmodel=dict(argstr="%s", position=-3, usedefault=True,), mask_file=dict( - argstr='-mask %s', - extensions=None, - xor=['mask_file', 'mask_spec'], + argstr="-mask %s", extensions=None, xor=["mask_file", "mask_spec"], ), mask_spec=dict( - argstr='-mask %s', + argstr="-mask %s", position=2, - sep=',', - units='mm', - xor=['mask_file', 'mask_spec'], - ), - maximum_number_of_tracks=dict(argstr='-maxnum %d', ), - maximum_tract_length=dict( - argstr='-length %s', - units='mm', - ), - minimum_radius_of_curvature=dict( - argstr='-curvature %s', - units='mm', - ), - minimum_tract_length=dict( - argstr='-minlength %s', - units='mm', - ), - no_mask_interpolation=dict(argstr='-nomaskinterp', ), + sep=",", + units="mm", + xor=["mask_file", "mask_spec"], + ), + maximum_number_of_tracks=dict(argstr="-maxnum %d",), + maximum_tract_length=dict(argstr="-length %s", units="mm",), + minimum_radius_of_curvature=dict(argstr="-curvature %s", units="mm",), + minimum_tract_length=dict(argstr="-minlength %s", units="mm",), + no_mask_interpolation=dict(argstr="-nomaskinterp",), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, - name_source=['in_file'], - name_template='%s_tracked.tck', - output_name='tracked', + name_source=["in_file"], + name_template="%s_tracked.tck", + output_name="tracked", position=-1, ), seed_file=dict( - argstr='-seed %s', - extensions=None, - xor=['seed_file', 'seed_spec'], + argstr="-seed %s", extensions=None, xor=["seed_file", "seed_spec"], ), seed_spec=dict( - argstr='-seed %s', + argstr="-seed %s", position=2, - sep=',', - units='mm', - xor=['seed_file', 'seed_spec'], - ), - step_size=dict( - argstr='-step %s', - units='mm', + sep=",", + units="mm", + xor=["seed_file", "seed_spec"], ), - stop=dict(argstr='-stop', ), - unidirectional=dict(argstr='-unidirectional', ), + step_size=dict(argstr="-step %s", units="mm",), + stop=dict(argstr="-stop",), + unidirectional=dict(argstr="-unidirectional",), ) inputs = StreamlineTrack.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_StreamlineTrack_outputs(): - output_map = dict(tracked=dict(extensions=None, ), ) + output_map = dict(tracked=dict(extensions=None,),) outputs = StreamlineTrack.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_Tensor2ApparentDiffusion.py b/nipype/interfaces/mrtrix/tests/test_auto_Tensor2ApparentDiffusion.py index 088760ae85..da5225cc42 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_Tensor2ApparentDiffusion.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_Tensor2ApparentDiffusion.py @@ -4,39 +4,22 @@ def test_Tensor2ApparentDiffusion_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debug=dict( - argstr='-debug', - position=1, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - out_filename=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, - ), - quiet=dict( - argstr='-quiet', - position=1, - ), + args=dict(argstr="%s",), + debug=dict(argstr="-debug", position=1,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + out_filename=dict(argstr="%s", extensions=None, genfile=True, position=-1,), + quiet=dict(argstr="-quiet", position=1,), ) inputs = Tensor2ApparentDiffusion.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Tensor2ApparentDiffusion_outputs(): - output_map = dict(ADC=dict(extensions=None, ), ) + output_map = dict(ADC=dict(extensions=None,),) outputs = Tensor2ApparentDiffusion.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_Tensor2FractionalAnisotropy.py b/nipype/interfaces/mrtrix/tests/test_auto_Tensor2FractionalAnisotropy.py index 376755ef8f..8f9937b550 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_Tensor2FractionalAnisotropy.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_Tensor2FractionalAnisotropy.py @@ -4,39 +4,22 @@ def test_Tensor2FractionalAnisotropy_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debug=dict( - argstr='-debug', - position=1, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - out_filename=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, - ), - quiet=dict( - argstr='-quiet', - position=1, - ), + args=dict(argstr="%s",), + debug=dict(argstr="-debug", position=1,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + out_filename=dict(argstr="%s", extensions=None, genfile=True, position=-1,), + quiet=dict(argstr="-quiet", position=1,), ) inputs = Tensor2FractionalAnisotropy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Tensor2FractionalAnisotropy_outputs(): - output_map = dict(FA=dict(extensions=None, ), ) + output_map = dict(FA=dict(extensions=None,),) outputs = Tensor2FractionalAnisotropy.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_Tensor2Vector.py b/nipype/interfaces/mrtrix/tests/test_auto_Tensor2Vector.py index 93b1a2cf7f..dcc58860a4 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_Tensor2Vector.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_Tensor2Vector.py @@ -4,39 +4,22 @@ def test_Tensor2Vector_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debug=dict( - argstr='-debug', - position=1, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - out_filename=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, - ), - quiet=dict( - argstr='-quiet', - position=1, - ), + args=dict(argstr="%s",), + debug=dict(argstr="-debug", position=1,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + out_filename=dict(argstr="%s", extensions=None, genfile=True, position=-1,), + quiet=dict(argstr="-quiet", position=1,), ) inputs = Tensor2Vector.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Tensor2Vector_outputs(): - output_map = dict(vector=dict(extensions=None, ), ) + output_map = dict(vector=dict(extensions=None,),) outputs = Tensor2Vector.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_Threshold.py b/nipype/interfaces/mrtrix/tests/test_auto_Threshold.py index 1edaf3cc24..b1e9a27016 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_Threshold.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_Threshold.py @@ -4,49 +4,26 @@ def test_Threshold_inputs(): input_map = dict( - absolute_threshold_value=dict(argstr='-abs %s', ), - args=dict(argstr='%s', ), - debug=dict( - argstr='-debug', - position=1, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - invert=dict( - argstr='-invert', - position=1, - ), - out_filename=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, - ), - percentage_threshold_value=dict(argstr='-percent %s', ), - quiet=dict( - argstr='-quiet', - position=1, - ), - replace_zeros_with_NaN=dict( - argstr='-nan', - position=1, - ), + absolute_threshold_value=dict(argstr="-abs %s",), + args=dict(argstr="%s",), + debug=dict(argstr="-debug", position=1,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + invert=dict(argstr="-invert", position=1,), + out_filename=dict(argstr="%s", extensions=None, genfile=True, position=-1,), + percentage_threshold_value=dict(argstr="-percent %s",), + quiet=dict(argstr="-quiet", position=1,), + replace_zeros_with_NaN=dict(argstr="-nan", position=1,), ) inputs = Threshold.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Threshold_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Threshold.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tests/test_auto_Tracks2Prob.py b/nipype/interfaces/mrtrix/tests/test_auto_Tracks2Prob.py index 55b320809e..b8bc425de7 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_Tracks2Prob.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_Tracks2Prob.py @@ -4,58 +4,26 @@ def test_Tracks2Prob_inputs(): input_map = dict( - args=dict(argstr='%s', ), - colour=dict( - argstr='-colour', - position=3, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fraction=dict( - argstr='-fraction', - position=3, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - out_filename=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, - ), - output_datatype=dict( - argstr='-datatype %s', - position=2, - ), - resample=dict( - argstr='-resample %d', - position=3, - units='mm', - ), - template_file=dict( - argstr='-template %s', - extensions=None, - position=1, - ), - voxel_dims=dict( - argstr='-vox %s', - position=2, - sep=',', - ), + args=dict(argstr="%s",), + colour=dict(argstr="-colour", position=3,), + environ=dict(nohash=True, usedefault=True,), + fraction=dict(argstr="-fraction", position=3,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + out_filename=dict(argstr="%s", extensions=None, genfile=True, position=-1,), + output_datatype=dict(argstr="-datatype %s", position=2,), + resample=dict(argstr="-resample %d", position=3, units="mm",), + template_file=dict(argstr="-template %s", extensions=None, position=1,), + voxel_dims=dict(argstr="-vox %s", position=2, sep=",",), ) inputs = Tracks2Prob.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Tracks2Prob_outputs(): - output_map = dict(tract_image=dict(extensions=None, ), ) + output_map = dict(tract_image=dict(extensions=None,),) outputs = Tracks2Prob.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix/tracking.py b/nipype/interfaces/mrtrix/tracking.py index f115841482..c922c4fba4 100644 --- a/nipype/interfaces/mrtrix/tracking.py +++ b/nipype/interfaces/mrtrix/tracking.py @@ -6,84 +6,90 @@ import os.path as op from ...utils.filemanip import split_filename -from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec, - File, isdefined) +from ..base import ( + CommandLineInputSpec, + CommandLine, + traits, + TraitedSpec, + File, + isdefined, +) class FilterTracksInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='input tracks to be filtered') - include_xor = ['include_file', 'include_spec'] + desc="input tracks to be filtered", + ) + include_xor = ["include_file", "include_spec"] include_file = File( - exists=True, - argstr='-include %s', - desc='inclusion file', - xor=include_xor) + exists=True, argstr="-include %s", desc="inclusion file", xor=include_xor + ) include_spec = traits.List( traits.Float, - desc='inclusion specification in mm and radius (x y z r)', + desc="inclusion specification in mm and radius (x y z r)", position=2, - argstr='-include %s', + argstr="-include %s", minlen=4, maxlen=4, - sep=',', - units='mm', - xor=include_xor) + sep=",", + units="mm", + xor=include_xor, + ) - exclude_xor = ['exclude_file', 'exclude_spec'] + exclude_xor = ["exclude_file", "exclude_spec"] exclude_file = File( - exists=True, - argstr='-exclude %s', - desc='exclusion file', - xor=exclude_xor) + exists=True, argstr="-exclude %s", desc="exclusion file", xor=exclude_xor + ) exclude_spec = traits.List( traits.Float, - desc='exclusion specification in mm and radius (x y z r)', + desc="exclusion specification in mm and radius (x y z r)", position=2, - argstr='-exclude %s', + argstr="-exclude %s", minlen=4, maxlen=4, - sep=',', - units='mm', - xor=exclude_xor) + sep=",", + units="mm", + xor=exclude_xor, + ) minimum_tract_length = traits.Float( - argstr='-minlength %s', - units='mm', - desc= - "Sets the minimum length of any track in millimeters (default is 10 mm)." + argstr="-minlength %s", + units="mm", + desc="Sets the minimum length of any track in millimeters (default is 10 mm).", ) out_file = File( - argstr='%s', + argstr="%s", position=-1, - desc='Output filtered track filename', - name_source=['in_file'], + desc="Output filtered track filename", + name_source=["in_file"], hash_files=False, - name_template='%s_filt') + name_template="%s_filt", + ) no_mask_interpolation = traits.Bool( - argstr='-nomaskinterp', - desc="Turns off trilinear interpolation of mask images.") + argstr="-nomaskinterp", desc="Turns off trilinear interpolation of mask images." + ) invert = traits.Bool( - argstr='-invert', + argstr="-invert", desc="invert the matching process, so that tracks that would" - "otherwise have been included are now excluded and vice-versa.") + "otherwise have been included are now excluded and vice-versa.", + ) quiet = traits.Bool( - argstr='-quiet', + argstr="-quiet", position=1, - desc="Do not display information messages or progress status.") - debug = traits.Bool( - argstr='-debug', position=1, desc="Display debugging messages.") + desc="Do not display information messages or progress status.", + ) + debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class FilterTracksOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output filtered tracks') + out_file = File(exists=True, desc="the output filtered tracks") class FilterTracks(CommandLine): @@ -100,45 +106,39 @@ class FilterTracks(CommandLine): >>> filt.run() # doctest: +SKIP """ - _cmd = 'filter_tracks' + _cmd = "filter_tracks" input_spec = FilterTracksInputSpec output_spec = FilterTracksOutputSpec class Tracks2ProbInputSpec(CommandLineInputSpec): in_file = File( - exists=True, - argstr='%s', - mandatory=True, - position=-2, - desc='tract file') + exists=True, argstr="%s", mandatory=True, position=-2, desc="tract file" + ) template_file = File( exists=True, - argstr='-template %s', + argstr="-template %s", position=1, - desc= - 'an image file to be used as a template for the output (the output image wil have the same transform and field of view)' + desc="an image file to be used as a template for the output (the output image wil have the same transform and field of view)", ) voxel_dims = traits.List( traits.Float, - argstr='-vox %s', - sep=',', + argstr="-vox %s", + sep=",", position=2, minlen=3, maxlen=3, - desc= - 'Three comma-separated numbers giving the size of each voxel in mm.') + desc="Three comma-separated numbers giving the size of each voxel in mm.", + ) colour = traits.Bool( - argstr='-colour', + argstr="-colour", position=3, - desc= - "add colour to the output image according to the direction of the tracks." + desc="add colour to the output image according to the direction of the tracks.", ) fraction = traits.Bool( - argstr='-fraction', + argstr="-fraction", position=3, - desc= - "produce an image of the fraction of fibres through each voxel (as a proportion of the total number in the file), rather than the count." + desc="produce an image of the fraction of fibres through each voxel (as a proportion of the total number in the file), rather than the count.", ) output_datatype = traits.Enum( "Bit", @@ -150,25 +150,21 @@ class Tracks2ProbInputSpec(CommandLineInputSpec): "UInt32", "float32", "float64", - argstr='-datatype %s', + argstr="-datatype %s", position=2, - desc= - '"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"' + desc='"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"', ) # , usedefault=True) resample = traits.Float( - argstr='-resample %d', + argstr="-resample %d", position=3, - units='mm', - desc= - 'resample the tracks at regular intervals using Hermite interpolation. If omitted, the program will select an appropriate interpolation factor automatically.' + units="mm", + desc="resample the tracks at regular intervals using Hermite interpolation. If omitted, the program will select an appropriate interpolation factor automatically.", ) - out_filename = File( - genfile=True, argstr='%s', position=-1, desc='output data file') + out_filename = File(genfile=True, argstr="%s", position=-1, desc="output data file") class Tracks2ProbOutputSpec(TraitedSpec): - tract_image = File( - exists=True, desc='Output tract count or track density image') + tract_image = File(exists=True, desc="Output tract count or track density image") class Tracks2Prob(CommandLine): @@ -188,199 +184,197 @@ class Tracks2Prob(CommandLine): >>> tdi.run() # doctest: +SKIP """ - _cmd = 'tracks2prob' + _cmd = "tracks2prob" input_spec = Tracks2ProbInputSpec output_spec = Tracks2ProbOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['tract_image'] = self.inputs.out_filename - if not isdefined(outputs['tract_image']): - outputs['tract_image'] = op.abspath(self._gen_outfilename()) + outputs["tract_image"] = self.inputs.out_filename + if not isdefined(outputs["tract_image"]): + outputs["tract_image"] = op.abspath(self._gen_outfilename()) else: - outputs['tract_image'] = os.path.abspath(outputs['tract_image']) + outputs["tract_image"] = os.path.abspath(outputs["tract_image"]) return outputs def _gen_filename(self, name): - if name == 'out_filename': + if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_TDI.mif' + return name + "_TDI.mif" class StreamlineTrackInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='the image containing the source data.' - 'The type of data required depends on the type of tracking as set in the preceeding argument. For DT methods, ' - 'the base DWI are needed. For SD methods, the SH harmonic coefficients of the FOD are needed.' + desc="the image containing the source data." + "The type of data required depends on the type of tracking as set in the preceeding argument. For DT methods, " + "the base DWI are needed. For SD methods, the SH harmonic coefficients of the FOD are needed.", ) - seed_xor = ['seed_file', 'seed_spec'] - seed_file = File( - exists=True, argstr='-seed %s', desc='seed file', xor=seed_xor) + seed_xor = ["seed_file", "seed_spec"] + seed_file = File(exists=True, argstr="-seed %s", desc="seed file", xor=seed_xor) seed_spec = traits.List( traits.Float, - desc='seed specification in mm and radius (x y z r)', + desc="seed specification in mm and radius (x y z r)", position=2, - argstr='-seed %s', + argstr="-seed %s", minlen=4, maxlen=4, - sep=',', - units='mm', - xor=seed_xor) + sep=",", + units="mm", + xor=seed_xor, + ) - include_xor = ['include_file', 'include_spec'] + include_xor = ["include_file", "include_spec"] include_file = File( - exists=True, - argstr='-include %s', - desc='inclusion file', - xor=include_xor) + exists=True, argstr="-include %s", desc="inclusion file", xor=include_xor + ) include_spec = traits.List( traits.Float, - desc='inclusion specification in mm and radius (x y z r)', + desc="inclusion specification in mm and radius (x y z r)", position=2, - argstr='-include %s', + argstr="-include %s", minlen=4, maxlen=4, - sep=',', - units='mm', - xor=include_xor) + sep=",", + units="mm", + xor=include_xor, + ) - exclude_xor = ['exclude_file', 'exclude_spec'] + exclude_xor = ["exclude_file", "exclude_spec"] exclude_file = File( - exists=True, - argstr='-exclude %s', - desc='exclusion file', - xor=exclude_xor) + exists=True, argstr="-exclude %s", desc="exclusion file", xor=exclude_xor + ) exclude_spec = traits.List( traits.Float, - desc='exclusion specification in mm and radius (x y z r)', + desc="exclusion specification in mm and radius (x y z r)", position=2, - argstr='-exclude %s', + argstr="-exclude %s", minlen=4, maxlen=4, - sep=',', - units='mm', - xor=exclude_xor) + sep=",", + units="mm", + xor=exclude_xor, + ) - mask_xor = ['mask_file', 'mask_spec'] + mask_xor = ["mask_file", "mask_spec"] mask_file = File( exists=True, - argstr='-mask %s', - desc='mask file. Only tracks within mask.', - xor=mask_xor) + argstr="-mask %s", + desc="mask file. Only tracks within mask.", + xor=mask_xor, + ) mask_spec = traits.List( traits.Float, - desc= - 'Mask specification in mm and radius (x y z r). Tracks will be terminated when they leave the ROI.', + desc="Mask specification in mm and radius (x y z r). Tracks will be terminated when they leave the ROI.", position=2, - argstr='-mask %s', + argstr="-mask %s", minlen=4, maxlen=4, - sep=',', - units='mm', - xor=mask_xor) + sep=",", + units="mm", + xor=mask_xor, + ) inputmodel = traits.Enum( - 'DT_STREAM', - 'SD_PROB', - 'SD_STREAM', - argstr='%s', - desc='input model type', + "DT_STREAM", + "SD_PROB", + "SD_STREAM", + argstr="%s", + desc="input model type", usedefault=True, - position=-3) + position=-3, + ) stop = traits.Bool( - argstr='-stop', - desc="stop track as soon as it enters any of the include regions.") + argstr="-stop", + desc="stop track as soon as it enters any of the include regions.", + ) do_not_precompute = traits.Bool( - argstr='-noprecomputed', - desc= - "Turns off precomputation of the legendre polynomial values. Warning: this will slow down the algorithm by a factor of approximately 4." + argstr="-noprecomputed", + desc="Turns off precomputation of the legendre polynomial values. Warning: this will slow down the algorithm by a factor of approximately 4.", ) unidirectional = traits.Bool( - argstr='-unidirectional', - desc= - "Track from the seed point in one direction only (default is to track in both directions)." + argstr="-unidirectional", + desc="Track from the seed point in one direction only (default is to track in both directions).", ) no_mask_interpolation = traits.Bool( - argstr='-nomaskinterp', - desc="Turns off trilinear interpolation of mask images.") + argstr="-nomaskinterp", desc="Turns off trilinear interpolation of mask images." + ) step_size = traits.Float( - argstr='-step %s', - units='mm', - desc="Set the step size of the algorithm in mm (default is 0.2).") + argstr="-step %s", + units="mm", + desc="Set the step size of the algorithm in mm (default is 0.2).", + ) minimum_radius_of_curvature = traits.Float( - argstr='-curvature %s', - units='mm', - desc= - "Set the minimum radius of curvature (default is 2 mm for DT_STREAM, 0 for SD_STREAM, 1 mm for SD_PROB and DT_PROB)" + argstr="-curvature %s", + units="mm", + desc="Set the minimum radius of curvature (default is 2 mm for DT_STREAM, 0 for SD_STREAM, 1 mm for SD_PROB and DT_PROB)", ) desired_number_of_tracks = traits.Int( - argstr='-number %d', - desc='Sets the desired number of tracks.' - 'The program will continue to generate tracks until this number of tracks have been selected and written to the output file' - '(default is 100 for *_STREAM methods, 1000 for *_PROB methods).') + argstr="-number %d", + desc="Sets the desired number of tracks." + "The program will continue to generate tracks until this number of tracks have been selected and written to the output file" + "(default is 100 for *_STREAM methods, 1000 for *_PROB methods).", + ) maximum_number_of_tracks = traits.Int( - argstr='-maxnum %d', - desc='Sets the maximum number of tracks to generate.' + argstr="-maxnum %d", + desc="Sets the maximum number of tracks to generate." "The program will not generate more tracks than this number, even if the desired number of tracks hasn't yet been reached" - '(default is 100 x number).') + "(default is 100 x number).", + ) minimum_tract_length = traits.Float( - argstr='-minlength %s', - units='mm', - desc= - "Sets the minimum length of any track in millimeters (default is 10 mm)." + argstr="-minlength %s", + units="mm", + desc="Sets the minimum length of any track in millimeters (default is 10 mm).", ) maximum_tract_length = traits.Float( - argstr='-length %s', - units='mm', - desc= - "Sets the maximum length of any track in millimeters (default is 200 mm)." + argstr="-length %s", + units="mm", + desc="Sets the maximum length of any track in millimeters (default is 200 mm).", ) cutoff_value = traits.Float( - argstr='-cutoff %s', - units='NA', - desc= - "Set the FA or FOD amplitude cutoff for terminating tracks (default is 0.1)." + argstr="-cutoff %s", + units="NA", + desc="Set the FA or FOD amplitude cutoff for terminating tracks (default is 0.1).", ) initial_cutoff_value = traits.Float( - argstr='-initcutoff %s', - units='NA', - desc= - "Sets the minimum FA or FOD amplitude for initiating tracks (default is twice the normal cutoff)." + argstr="-initcutoff %s", + units="NA", + desc="Sets the minimum FA or FOD amplitude for initiating tracks (default is twice the normal cutoff).", ) initial_direction = traits.List( traits.Int, - desc='Specify the initial tracking direction as a vector', - argstr='-initdirection %s', + desc="Specify the initial tracking direction as a vector", + argstr="-initdirection %s", minlen=2, maxlen=2, - units='voxels') + units="voxels", + ) out_file = File( - argstr='%s', + argstr="%s", position=-1, - name_source=['in_file'], - name_template='%s_tracked.tck', - output_name='tracked', - desc='output data file') + name_source=["in_file"], + name_template="%s_tracked.tck", + output_name="tracked", + desc="output data file", + ) class StreamlineTrackOutputSpec(TraitedSpec): - tracked = File( - exists=True, desc='output file containing reconstructed tracts') + tracked = File(exists=True, desc="output file containing reconstructed tracts") class StreamlineTrack(CommandLine): @@ -403,7 +397,8 @@ class StreamlineTrack(CommandLine): 'streamtrack -mask mask.nii -seed seed_mask.nii SD_PROB data.Bfloat data_tracked.tck' >>> strack.run() # doctest: +SKIP """ - _cmd = 'streamtrack' + + _cmd = "streamtrack" input_spec = StreamlineTrackInputSpec output_spec = StreamlineTrackOutputSpec @@ -411,11 +406,10 @@ class StreamlineTrack(CommandLine): class DiffusionTensorStreamlineTrackInputSpec(StreamlineTrackInputSpec): gradient_encoding_file = File( exists=True, - argstr='-grad %s', + argstr="-grad %s", mandatory=True, position=-2, - desc= - 'Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix' + desc="Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix", ) @@ -439,16 +433,15 @@ class DiffusionTensorStreamlineTrack(StreamlineTrack): def __init__(self, command=None, **inputs): inputs["inputmodel"] = "DT_STREAM" - return super(DiffusionTensorStreamlineTrack, self).__init__( - command, **inputs) + return super(DiffusionTensorStreamlineTrack, self).__init__(command, **inputs) class ProbabilisticSphericallyDeconvolutedStreamlineTrackInputSpec( - StreamlineTrackInputSpec): + StreamlineTrackInputSpec +): maximum_number_of_trials = traits.Int( - argstr='-trials %s', - desc= - "Set the maximum number of sampling trials at each point (only used for probabilistic tracking)." + argstr="-trials %s", + desc="Set the maximum number of sampling trials at each point (only used for probabilistic tracking).", ) @@ -469,12 +462,14 @@ class ProbabilisticSphericallyDeconvolutedStreamlineTrack(StreamlineTrack): >>> sdprobtrack.inputs.seed_file = 'seed_mask.nii' >>> sdprobtrack.run() # doctest: +SKIP """ + input_spec = ProbabilisticSphericallyDeconvolutedStreamlineTrackInputSpec def __init__(self, command=None, **inputs): inputs["inputmodel"] = "SD_PROB" - return super(ProbabilisticSphericallyDeconvolutedStreamlineTrack, - self).__init__(command, **inputs) + return super( + ProbabilisticSphericallyDeconvolutedStreamlineTrack, self + ).__init__(command, **inputs) class SphericallyDeconvolutedStreamlineTrack(StreamlineTrack): @@ -494,9 +489,11 @@ class SphericallyDeconvolutedStreamlineTrack(StreamlineTrack): >>> sdtrack.inputs.seed_file = 'seed_mask.nii' >>> sdtrack.run() # doctest: +SKIP """ + input_spec = StreamlineTrackInputSpec def __init__(self, command=None, **inputs): inputs["inputmodel"] = "SD_STREAM" return super(SphericallyDeconvolutedStreamlineTrack, self).__init__( - command, **inputs) + command, **inputs + ) diff --git a/nipype/interfaces/mrtrix3/__init__.py b/nipype/interfaces/mrtrix3/__init__.py index 9fb4311730..0dbe3bb872 100644 --- a/nipype/interfaces/mrtrix3/__init__.py +++ b/nipype/interfaces/mrtrix3/__init__.py @@ -2,11 +2,26 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: # -*- coding: utf-8 -*- -from .utils import (Mesh2PVE, Generate5tt, BrainMask, TensorMetrics, - ComputeTDI, TCK2VTK, MRMath, MRConvert, MRResize, - DWIExtract) -from .preprocess import (ResponseSD, ACTPrepareFSL, ReplaceFSwithFIRST, - DWIDenoise, MRDeGibbs, DWIBiasCorrect) +from .utils import ( + Mesh2PVE, + Generate5tt, + BrainMask, + TensorMetrics, + ComputeTDI, + TCK2VTK, + MRMath, + MRConvert, + MRResize, + DWIExtract, +) +from .preprocess import ( + ResponseSD, + ACTPrepareFSL, + ReplaceFSwithFIRST, + DWIDenoise, + MRDeGibbs, + DWIBiasCorrect, +) from .tracking import Tractography from .reconst import FitTensor, EstimateFOD from .connectivity import LabelConfig, LabelConvert, BuildConnectome diff --git a/nipype/interfaces/mrtrix3/base.py b/nipype/interfaces/mrtrix3/base.py index 53a9a2284e..15b208b4cd 100644 --- a/nipype/interfaces/mrtrix3/base.py +++ b/nipype/interfaces/mrtrix3/base.py @@ -4,27 +4,35 @@ from ... import logging, LooseVersion from ...utils.filemanip import which -from ..base import (CommandLineInputSpec, CommandLine, traits, File, isdefined, PackageInfo) -iflogger = logging.getLogger('nipype.interface') +from ..base import ( + CommandLineInputSpec, + CommandLine, + traits, + File, + isdefined, + PackageInfo, +) + +iflogger = logging.getLogger("nipype.interface") class Info(PackageInfo): - version_cmd = 'mrconvert --version' + version_cmd = "mrconvert --version" @staticmethod def parse_version(raw_info): # info is like: "== mrconvert 0.3.15-githash" for line in raw_info.splitlines(): - if line.startswith('== mrconvert '): + if line.startswith("== mrconvert "): v_string = line.split()[2] break else: return None # -githash may or may not be appended - v_string = v_string.split('-')[0] + v_string = v_string.split("-")[0] - return '.'.join(v_string.split('.')[:3]) + return ".".join(v_string.split(".")[:3]) @classmethod def looseversion(cls): @@ -32,56 +40,61 @@ def looseversion(cls): If no version found, use LooseVersion('0.0.0') """ - return LooseVersion(cls.version() or '0.0.0') + return LooseVersion(cls.version() or "0.0.0") class MRTrix3BaseInputSpec(CommandLineInputSpec): nthreads = traits.Int( - argstr='-nthreads %d', - desc='number of threads. if zero, the number' - ' of available cpus will be used', - nohash=True) + argstr="-nthreads %d", + desc="number of threads. if zero, the number" " of available cpus will be used", + nohash=True, + ) # DW gradient table import options grad_file = File( exists=True, - argstr='-grad %s', - desc='dw gradient scheme (MRTrix format)', - xor=['grad_fsl']) + argstr="-grad %s", + desc="dw gradient scheme (MRTrix format)", + xor=["grad_fsl"], + ) grad_fsl = traits.Tuple( File(exists=True), File(exists=True), - argstr='-fslgrad %s %s', - desc='(bvecs, bvals) dw gradient scheme (FSL format)', - xor=['grad_file']) + argstr="-fslgrad %s %s", + desc="(bvecs, bvals) dw gradient scheme (FSL format)", + xor=["grad_file"], + ) bval_scale = traits.Enum( - 'yes', - 'no', - argstr='-bvalue_scaling %s', - desc='specifies whether the b - values should be scaled by the square' - ' of the corresponding DW gradient norm, as often required for ' - 'multishell or DSI DW acquisition schemes. The default action ' - 'can also be set in the MRtrix config file, under the ' - 'BValueScaling entry. Valid choices are yes / no, true / ' - 'false, 0 / 1 (default: true).') + "yes", + "no", + argstr="-bvalue_scaling %s", + desc="specifies whether the b - values should be scaled by the square" + " of the corresponding DW gradient norm, as often required for " + "multishell or DSI DW acquisition schemes. The default action " + "can also be set in the MRtrix config file, under the " + "BValueScaling entry. Valid choices are yes / no, true / " + "false, 0 / 1 (default: true).", + ) in_bvec = File( - exists=True, argstr='-fslgrad %s %s', desc='bvecs file in FSL format') - in_bval = File(exists=True, desc='bvals file in FSL format') + exists=True, argstr="-fslgrad %s %s", desc="bvecs file in FSL format" + ) + in_bval = File(exists=True, desc="bvals file in FSL format") class MRTrix3Base(CommandLine): def _format_arg(self, name, trait_spec, value): - if name == 'nthreads' and value == 0: + if name == "nthreads" and value == 0: value = 1 try: from multiprocessing import cpu_count + value = cpu_count() except: - iflogger.warning('Number of threads could not be computed') + iflogger.warning("Number of threads could not be computed") pass return trait_spec.argstr % value - if name == 'in_bvec': + if name == "in_bvec": return trait_spec.argstr % (value, self.inputs.in_bval) return super(MRTrix3Base, self)._format_arg(name, trait_spec, value) @@ -91,17 +104,17 @@ def _parse_inputs(self, skip=None): skip = [] try: - if (isdefined(self.inputs.grad_file) - or isdefined(self.inputs.grad_fsl)): - skip += ['in_bvec', 'in_bval'] + if isdefined(self.inputs.grad_file) or isdefined(self.inputs.grad_fsl): + skip += ["in_bvec", "in_bval"] is_bvec = isdefined(self.inputs.in_bvec) is_bval = isdefined(self.inputs.in_bval) if is_bvec or is_bval: if not is_bvec or not is_bval: - raise RuntimeError('If using bvecs and bvals inputs, both' - 'should be defined') - skip += ['in_bval'] + raise RuntimeError( + "If using bvecs and bvals inputs, both" "should be defined" + ) + skip += ["in_bval"] except AttributeError: pass diff --git a/nipype/interfaces/mrtrix3/connectivity.py b/nipype/interfaces/mrtrix3/connectivity.py index 63277d7a38..308eccd45f 100644 --- a/nipype/interfaces/mrtrix3/connectivity.py +++ b/nipype/interfaces/mrtrix3/connectivity.py @@ -5,92 +5,95 @@ import os import os.path as op -from ..base import (CommandLineInputSpec, traits, TraitedSpec, File, isdefined) +from ..base import CommandLineInputSpec, traits, TraitedSpec, File, isdefined from .base import MRTrix3Base class BuildConnectomeInputSpec(CommandLineInputSpec): in_file = File( - exists=True, - argstr='%s', - mandatory=True, - position=-3, - desc='input tractography') - in_parc = File( - exists=True, argstr='%s', position=-2, desc='parcellation file') + exists=True, argstr="%s", mandatory=True, position=-3, desc="input tractography" + ) + in_parc = File(exists=True, argstr="%s", position=-2, desc="parcellation file") out_file = File( - 'connectome.csv', - argstr='%s', + "connectome.csv", + argstr="%s", mandatory=True, position=-1, usedefault=True, - desc='output file after processing') + desc="output file after processing", + ) nthreads = traits.Int( - argstr='-nthreads %d', - desc='number of threads. if zero, the number' - ' of available cpus will be used', - nohash=True) + argstr="-nthreads %d", + desc="number of threads. if zero, the number" " of available cpus will be used", + nohash=True, + ) vox_lookup = traits.Bool( - argstr='-assignment_voxel_lookup', - desc='use a simple voxel lookup value at each streamline endpoint') + argstr="-assignment_voxel_lookup", + desc="use a simple voxel lookup value at each streamline endpoint", + ) search_radius = traits.Float( - argstr='-assignment_radial_search %f', - desc='perform a radial search from each streamline endpoint to locate ' - 'the nearest node. Argument is the maximum radius in mm; if no node is' - ' found within this radius, the streamline endpoint is not assigned to' - ' any node.') + argstr="-assignment_radial_search %f", + desc="perform a radial search from each streamline endpoint to locate " + "the nearest node. Argument is the maximum radius in mm; if no node is" + " found within this radius, the streamline endpoint is not assigned to" + " any node.", + ) search_reverse = traits.Float( - argstr='-assignment_reverse_search %f', - desc='traverse from each streamline endpoint inwards along the ' - 'streamline, in search of the last node traversed by the streamline. ' - 'Argument is the maximum traversal length in mm (set to 0 to allow ' - 'search to continue to the streamline midpoint).') + argstr="-assignment_reverse_search %f", + desc="traverse from each streamline endpoint inwards along the " + "streamline, in search of the last node traversed by the streamline. " + "Argument is the maximum traversal length in mm (set to 0 to allow " + "search to continue to the streamline midpoint).", + ) search_forward = traits.Float( - argstr='-assignment_forward_search %f', - desc='project the streamline forwards from the endpoint in search of a' - 'parcellation node voxel. Argument is the maximum traversal length in ' - 'mm.') + argstr="-assignment_forward_search %f", + desc="project the streamline forwards from the endpoint in search of a" + "parcellation node voxel. Argument is the maximum traversal length in " + "mm.", + ) metric = traits.Enum( - 'count', - 'meanlength', - 'invlength', - 'invnodevolume', - 'mean_scalar', - 'invlength_invnodevolume', - argstr='-metric %s', - desc='specify the edge' - ' weight metric') + "count", + "meanlength", + "invlength", + "invnodevolume", + "mean_scalar", + "invlength_invnodevolume", + argstr="-metric %s", + desc="specify the edge" " weight metric", + ) in_scalar = File( exists=True, - argstr='-image %s', - desc='provide the associated image ' - 'for the mean_scalar metric') + argstr="-image %s", + desc="provide the associated image " "for the mean_scalar metric", + ) in_weights = File( exists=True, - argstr='-tck_weights_in %s', - desc='specify a text scalar ' - 'file containing the streamline weights') + argstr="-tck_weights_in %s", + desc="specify a text scalar " "file containing the streamline weights", + ) keep_unassigned = traits.Bool( - argstr='-keep_unassigned', - desc='By default, the program discards the' - ' information regarding those streamlines that are not successfully ' - 'assigned to a node pair. Set this option to keep these values (will ' - 'be the first row/column in the output matrix)') + argstr="-keep_unassigned", + desc="By default, the program discards the" + " information regarding those streamlines that are not successfully " + "assigned to a node pair. Set this option to keep these values (will " + "be the first row/column in the output matrix)", + ) zero_diagonal = traits.Bool( - argstr='-zero_diagonal', - desc='set all diagonal entries in the matrix ' - 'to zero (these represent streamlines that connect to the same node at' - ' both ends)') + argstr="-zero_diagonal", + desc="set all diagonal entries in the matrix " + "to zero (these represent streamlines that connect to the same node at" + " both ends)", + ) class BuildConnectomeOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output response file') + out_file = File(exists=True, desc="the output response file") class BuildConnectome(MRTrix3Base): @@ -110,69 +113,74 @@ class BuildConnectome(MRTrix3Base): >>> mat.run() # doctest: +SKIP """ - _cmd = 'tck2connectome' + _cmd = "tck2connectome" input_spec = BuildConnectomeInputSpec output_spec = BuildConnectomeOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class LabelConfigInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-3, - desc='input anatomical image') + desc="input anatomical image", + ) in_config = File( - exists=True, - argstr='%s', - position=-2, - desc='connectome configuration file') + exists=True, argstr="%s", position=-2, desc="connectome configuration file" + ) out_file = File( - 'parcellation.mif', - argstr='%s', + "parcellation.mif", + argstr="%s", mandatory=True, position=-1, usedefault=True, - desc='output file after processing') + desc="output file after processing", + ) lut_basic = File( - argstr='-lut_basic %s', - desc='get information from ' - 'a basic lookup table consisting of index / name pairs') + argstr="-lut_basic %s", + desc="get information from " + "a basic lookup table consisting of index / name pairs", + ) lut_fs = File( - argstr='-lut_freesurfer %s', - desc='get information from ' + argstr="-lut_freesurfer %s", + desc="get information from " 'a FreeSurfer lookup table(typically "FreeSurferColorLUT' - '.txt")') + '.txt")', + ) lut_aal = File( - argstr='-lut_aal %s', - desc='get information from the AAL ' - 'lookup table (typically "ROI_MNI_V4.txt")') + argstr="-lut_aal %s", + desc="get information from the AAL " + 'lookup table (typically "ROI_MNI_V4.txt")', + ) lut_itksnap = File( - argstr='-lut_itksnap %s', - desc='get information from an' - ' ITK - SNAP lookup table(this includes the IIT atlas ' - 'file "LUT_GM.txt")') + argstr="-lut_itksnap %s", + desc="get information from an" + " ITK - SNAP lookup table(this includes the IIT atlas " + 'file "LUT_GM.txt")', + ) spine = File( - argstr='-spine %s', - desc='provide a manually-defined ' - 'segmentation of the base of the spine where the streamlines' - ' terminate, so that this can become a node in the connection' - ' matrix.') + argstr="-spine %s", + desc="provide a manually-defined " + "segmentation of the base of the spine where the streamlines" + " terminate, so that this can become a node in the connection" + " matrix.", + ) nthreads = traits.Int( - argstr='-nthreads %d', - desc='number of threads. if zero, the number' - ' of available cpus will be used', - nohash=True) + argstr="-nthreads %d", + desc="number of threads. if zero, the number" " of available cpus will be used", + nohash=True, + ) class LabelConfigOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output response file') + out_file = File(exists=True, desc="the output response file") class LabelConfig(MRTrix3Base): @@ -191,7 +199,7 @@ class LabelConfig(MRTrix3Base): >>> labels.run() # doctest: +SKIP """ - _cmd = 'labelconfig' + _cmd = "labelconfig" input_spec = LabelConfigInputSpec output_spec = LabelConfigOutputSpec @@ -201,65 +209,69 @@ def _parse_inputs(self, skip=None): if not isdefined(self.inputs.in_config): from distutils.spawn import find_executable + path = find_executable(self._cmd) if path is None: - path = os.getenv(MRTRIX3_HOME, '/opt/mrtrix3') + path = os.getenv(MRTRIX3_HOME, "/opt/mrtrix3") else: path = op.dirname(op.dirname(path)) self.inputs.in_config = op.join( - path, 'src/dwi/tractography/connectomics/' - 'example_configs/fs_default.txt') + path, + "src/dwi/tractography/connectomics/" "example_configs/fs_default.txt", + ) return super(LabelConfig, self)._parse_inputs(skip=skip) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class LabelConvertInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-4, - desc='input anatomical image') + desc="input anatomical image", + ) in_lut = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-3, - desc='get information from ' - 'a basic lookup table consisting of index / name pairs') + desc="get information from " + "a basic lookup table consisting of index / name pairs", + ) in_config = File( - exists=True, - argstr='%s', - position=-2, - desc='connectome configuration file') + exists=True, argstr="%s", position=-2, desc="connectome configuration file" + ) out_file = File( - 'parcellation.mif', - argstr='%s', + "parcellation.mif", + argstr="%s", mandatory=True, position=-1, usedefault=True, - desc='output file after processing') + desc="output file after processing", + ) spine = File( - argstr='-spine %s', - desc='provide a manually-defined ' - 'segmentation of the base of the spine where the streamlines' - ' terminate, so that this can become a node in the connection' - ' matrix.') + argstr="-spine %s", + desc="provide a manually-defined " + "segmentation of the base of the spine where the streamlines" + " terminate, so that this can become a node in the connection" + " matrix.", + ) num_threads = traits.Int( - argstr='-nthreads %d', - desc='number of threads. if zero, the number' - ' of available cpus will be used', - nohash=True) + argstr="-nthreads %d", + desc="number of threads. if zero, the number" " of available cpus will be used", + nohash=True, + ) class LabelConvertOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output response file') + out_file = File(exists=True, desc="the output response file") class LabelConvert(MRTrix3Base): @@ -279,7 +291,7 @@ class LabelConvert(MRTrix3Base): >>> labels.run() # doctest: +SKIP """ - _cmd = 'labelconvert' + _cmd = "labelconvert" input_spec = LabelConvertInputSpec output_spec = LabelConvertOutputSpec @@ -289,19 +301,21 @@ def _parse_inputs(self, skip=None): if not isdefined(self.inputs.in_config): from nipype.utils.filemanip import which + path = which(self._cmd) if path is None: - path = os.getenv(MRTRIX3_HOME, '/opt/mrtrix3') + path = os.getenv(MRTRIX3_HOME, "/opt/mrtrix3") else: path = op.dirname(op.dirname(path)) self.inputs.in_config = op.join( - path, 'src/dwi/tractography/connectomics/' - 'example_configs/fs_default.txt') + path, + "src/dwi/tractography/connectomics/" "example_configs/fs_default.txt", + ) return super(LabelConvert, self)._parse_inputs(skip=skip) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs diff --git a/nipype/interfaces/mrtrix3/preprocess.py b/nipype/interfaces/mrtrix3/preprocess.py index eeeb9e820a..9384ef43c7 100644 --- a/nipype/interfaces/mrtrix3/preprocess.py +++ b/nipype/interfaces/mrtrix3/preprocess.py @@ -4,45 +4,50 @@ import os.path as op -from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec, - File, isdefined, Undefined, InputMultiObject) +from ..base import ( + CommandLineInputSpec, + CommandLine, + traits, + TraitedSpec, + File, + isdefined, + Undefined, + InputMultiObject, +) from .base import MRTrix3BaseInputSpec, MRTrix3Base class DWIDenoiseInputSpec(MRTrix3BaseInputSpec): in_file = File( - exists=True, - argstr='%s', - position=-2, - mandatory=True, - desc='input DWI image') - mask = File( - exists=True, - argstr='-mask %s', - position=1, - desc='mask image') + exists=True, argstr="%s", position=-2, mandatory=True, desc="input DWI image" + ) + mask = File(exists=True, argstr="-mask %s", position=1, desc="mask image") extent = traits.Tuple( (traits.Int, traits.Int, traits.Int), - argstr='-extent %d,%d,%d', - desc='set the window size of the denoising filter. (default = 5,5,5)') + argstr="-extent %d,%d,%d", + desc="set the window size of the denoising filter. (default = 5,5,5)", + ) noise = File( - argstr='-noise %s', - name_template='%s_noise', - name_source='in_file', + argstr="-noise %s", + name_template="%s_noise", + name_source="in_file", keep_extension=True, - desc='the output noise map') + desc="the output noise map", + ) out_file = File( - argstr='%s', + argstr="%s", position=-1, - name_template='%s_denoised', - name_source='in_file', + name_template="%s_denoised", + name_source="in_file", keep_extension=True, - desc='the output denoised DWI image') + desc="the output denoised DWI image", + ) class DWIDenoiseOutputSpec(TraitedSpec): - noise = File(desc='the output noise map', exists=True) - out_file = File(desc='the output denoised DWI image', exists=True) + noise = File(desc="the output noise map", exists=True) + out_file = File(desc="the output denoised DWI image", exists=True) + class DWIDenoise(MRTrix3Base): """ @@ -76,54 +81,58 @@ class DWIDenoise(MRTrix3Base): >>> denoise.run() # doctest: +SKIP """ - _cmd = 'dwidenoise' + _cmd = "dwidenoise" input_spec = DWIDenoiseInputSpec output_spec = DWIDenoiseOutputSpec class MRDeGibbsInputSpec(MRTrix3BaseInputSpec): in_file = File( - exists=True, - argstr='%s', - position=-2, - mandatory=True, - desc='input DWI image') + exists=True, argstr="%s", position=-2, mandatory=True, desc="input DWI image" + ) axes = traits.ListInt( default_value=[0, 1], usedefault=True, - sep=',', + sep=",", minlen=2, maxlen=2, - argstr='-axes %s', - desc='indicate the plane in which the data was acquired (axial = 0,1; ' - 'coronal = 0,2; sagittal = 1,2') + argstr="-axes %s", + desc="indicate the plane in which the data was acquired (axial = 0,1; " + "coronal = 0,2; sagittal = 1,2", + ) nshifts = traits.Int( default_value=20, usedefault=True, - argstr='-nshifts %d', - desc='discretization of subpixel spacing (default = 20)') + argstr="-nshifts %d", + desc="discretization of subpixel spacing (default = 20)", + ) minW = traits.Int( default_value=1, usedefault=True, - argstr='-minW %d', - desc='left border of window used for total variation (TV) computation ' - '(default = 1)') + argstr="-minW %d", + desc="left border of window used for total variation (TV) computation " + "(default = 1)", + ) maxW = traits.Int( default_value=3, usedefault=True, - argstr='-maxW %d', - desc='right border of window used for total variation (TV) computation ' - '(default = 3)') + argstr="-maxW %d", + desc="right border of window used for total variation (TV) computation " + "(default = 3)", + ) out_file = File( - name_template='%s_unr', - name_source='in_file', + name_template="%s_unr", + name_source="in_file", keep_extension=True, - argstr='%s', + argstr="%s", position=-1, - desc='the output unringed DWI image') + desc="the output unringed DWI image", + ) + class MRDeGibbsOutputSpec(TraitedSpec): - out_file = File(desc='the output unringed DWI image', exists=True) + out_file = File(desc="the output unringed DWI image", exists=True) + class MRDeGibbs(MRTrix3Base): """ @@ -161,46 +170,44 @@ class MRDeGibbs(MRTrix3Base): >>> unring.run() # doctest: +SKIP """ - _cmd = 'mrdegibbs' + _cmd = "mrdegibbs" input_spec = MRDeGibbsInputSpec output_spec = MRDeGibbsOutputSpec class DWIBiasCorrectInputSpec(MRTrix3BaseInputSpec): in_file = File( - exists=True, - argstr='%s', - position=-2, - mandatory=True, - desc='input DWI image') - in_mask = File( - argstr='-mask %s', - desc='input mask image for bias field estimation') + exists=True, argstr="%s", position=-2, mandatory=True, desc="input DWI image" + ) + in_mask = File(argstr="-mask %s", desc="input mask image for bias field estimation") use_ants = traits.Bool( - argstr='-ants', + argstr="-ants", mandatory=True, - desc='use ANTS N4 to estimate the inhomogeneity field', - xor=['use_fsl']) + desc="use ANTS N4 to estimate the inhomogeneity field", + xor=["use_fsl"], + ) use_fsl = traits.Bool( - argstr='-fsl', + argstr="-fsl", mandatory=True, - desc='use FSL FAST to estimate the inhomogeneity field', - xor=['use_ants']) - bias = File( - argstr='-bias %s', - desc='bias field') + desc="use FSL FAST to estimate the inhomogeneity field", + xor=["use_ants"], + ) + bias = File(argstr="-bias %s", desc="bias field") out_file = File( - name_template='%s_biascorr', - name_source='in_file', + name_template="%s_biascorr", + name_source="in_file", keep_extension=True, - argstr='%s', + argstr="%s", position=-1, - desc='the output bias corrected DWI image', - genfile=True) + desc="the output bias corrected DWI image", + genfile=True, + ) + class DWIBiasCorrectOutputSpec(TraitedSpec): - bias = File(desc='the output bias field', exists=True) - out_file = File(desc='the output bias corrected DWI image', exists=True) + bias = File(desc="the output bias field", exists=True) + out_file = File(desc="the output bias corrected DWI image", exists=True) + class DWIBiasCorrect(MRTrix3Base): """ @@ -221,52 +228,51 @@ class DWIBiasCorrect(MRTrix3Base): >>> bias_correct.run() # doctest: +SKIP """ - _cmd = 'dwibiascorrect' + _cmd = "dwibiascorrect" input_spec = DWIBiasCorrectInputSpec output_spec = DWIBiasCorrectOutputSpec class ResponseSDInputSpec(MRTrix3BaseInputSpec): algorithm = traits.Enum( - 'msmt_5tt', - 'dhollander', - 'tournier', - 'tax', - argstr='%s', + "msmt_5tt", + "dhollander", + "tournier", + "tax", + argstr="%s", position=1, mandatory=True, - desc='response estimation algorithm (multi-tissue)') + desc="response estimation algorithm (multi-tissue)", + ) in_file = File( - exists=True, - argstr='%s', - position=-5, - mandatory=True, - desc='input DWI image') - mtt_file = File(argstr='%s', position=-4, desc='input 5tt image') + exists=True, argstr="%s", position=-5, mandatory=True, desc="input DWI image" + ) + mtt_file = File(argstr="%s", position=-4, desc="input 5tt image") wm_file = File( - 'wm.txt', - argstr='%s', + "wm.txt", + argstr="%s", position=-3, usedefault=True, - desc='output WM response text file') - gm_file = File( - argstr='%s', position=-2, desc='output GM response text file') - csf_file = File( - argstr='%s', position=-1, desc='output CSF response text file') - in_mask = File( - exists=True, argstr='-mask %s', desc='provide initial mask image') + desc="output WM response text file", + ) + gm_file = File(argstr="%s", position=-2, desc="output GM response text file") + csf_file = File(argstr="%s", position=-1, desc="output CSF response text file") + in_mask = File(exists=True, argstr="-mask %s", desc="provide initial mask image") max_sh = InputMultiObject( traits.Int, - argstr='-lmax %s', - sep=',', - desc=('maximum harmonic degree of response function - single value for ' - 'single-shell response, list for multi-shell response')) + argstr="-lmax %s", + sep=",", + desc=( + "maximum harmonic degree of response function - single value for " + "single-shell response, list for multi-shell response" + ), + ) class ResponseSDOutputSpec(TraitedSpec): - wm_file = File(argstr='%s', desc='output WM response text file') - gm_file = File(argstr='%s', desc='output GM response text file') - csf_file = File(argstr='%s', desc='output CSF response text file') + wm_file = File(argstr="%s", desc="output WM response text file") + gm_file = File(argstr="%s", desc="output GM response text file") + csf_file = File(argstr="%s", desc="output CSF response text file") class ResponseSD(MRTrix3Base): @@ -291,39 +297,41 @@ class ResponseSD(MRTrix3Base): 'dwi2response tournier -fslgrad bvecs bvals -lmax 6,8,10 dwi.mif wm.txt' """ - _cmd = 'dwi2response' + _cmd = "dwi2response" input_spec = ResponseSDInputSpec output_spec = ResponseSDOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['wm_file'] = op.abspath(self.inputs.wm_file) + outputs["wm_file"] = op.abspath(self.inputs.wm_file) if self.inputs.gm_file != Undefined: - outputs['gm_file'] = op.abspath(self.inputs.gm_file) + outputs["gm_file"] = op.abspath(self.inputs.gm_file) if self.inputs.csf_file != Undefined: - outputs['csf_file'] = op.abspath(self.inputs.csf_file) + outputs["csf_file"] = op.abspath(self.inputs.csf_file) return outputs class ACTPrepareFSLInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='input anatomical image') + desc="input anatomical image", + ) out_file = File( - 'act_5tt.mif', - argstr='%s', + "act_5tt.mif", + argstr="%s", mandatory=True, position=-1, usedefault=True, - desc='output file after processing') + desc="output file after processing", + ) class ACTPrepareFSLOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output response file') + out_file = File(exists=True, desc="the output response file") class ACTPrepareFSL(CommandLine): @@ -342,46 +350,43 @@ class ACTPrepareFSL(CommandLine): >>> prep.run() # doctest: +SKIP """ - _cmd = 'act_anat_prepare_fsl' + _cmd = "act_anat_prepare_fsl" input_spec = ACTPrepareFSLInputSpec output_spec = ACTPrepareFSLOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class ReplaceFSwithFIRSTInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-4, - desc='input anatomical image') + desc="input anatomical image", + ) in_t1w = File( - exists=True, - argstr='%s', - mandatory=True, - position=-3, - desc='input T1 image') + exists=True, argstr="%s", mandatory=True, position=-3, desc="input T1 image" + ) in_config = File( - exists=True, - argstr='%s', - position=-2, - desc='connectome configuration file') + exists=True, argstr="%s", position=-2, desc="connectome configuration file" + ) out_file = File( - 'aparc+first.mif', - argstr='%s', + "aparc+first.mif", + argstr="%s", mandatory=True, position=-1, usedefault=True, - desc='output file after processing') + desc="output file after processing", + ) class ReplaceFSwithFIRSTOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output response file') + out_file = File(exists=True, desc="the output response file") class ReplaceFSwithFIRST(CommandLine): @@ -403,11 +408,11 @@ class ReplaceFSwithFIRST(CommandLine): >>> prep.run() # doctest: +SKIP """ - _cmd = 'fs_parc_replace_sgm_first' + _cmd = "fs_parc_replace_sgm_first" input_spec = ReplaceFSwithFIRSTInputSpec output_spec = ReplaceFSwithFIRSTOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs diff --git a/nipype/interfaces/mrtrix3/reconst.py b/nipype/interfaces/mrtrix3/reconst.py index be89022267..2f2854ed8c 100644 --- a/nipype/interfaces/mrtrix3/reconst.py +++ b/nipype/interfaces/mrtrix3/reconst.py @@ -11,41 +11,49 @@ class FitTensorInputSpec(MRTrix3BaseInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='input diffusion weighted images') + desc="input diffusion weighted images", + ) out_file = File( - 'dti.mif', - argstr='%s', + "dti.mif", + argstr="%s", mandatory=True, position=-1, usedefault=True, - desc='the output diffusion tensor image') + desc="the output diffusion tensor image", + ) # General options in_mask = File( exists=True, - argstr='-mask %s', - desc=('only perform computation within the specified ' - 'binary brain mask image')) + argstr="-mask %s", + desc=( + "only perform computation within the specified " "binary brain mask image" + ), + ) method = traits.Enum( - 'nonlinear', - 'loglinear', - 'sech', - 'rician', - argstr='-method %s', - desc=('select method used to perform the fitting')) + "nonlinear", + "loglinear", + "sech", + "rician", + argstr="-method %s", + desc=("select method used to perform the fitting"), + ) reg_term = traits.Float( - argstr='-regularisation %f', - max_ver='0.3.13', - desc=('specify the strength of the regularisation term on the ' - 'magnitude of the tensor elements (default = 5000). This ' - 'only applies to the non-linear methods')) + argstr="-regularisation %f", + max_ver="0.3.13", + desc=( + "specify the strength of the regularisation term on the " + "magnitude of the tensor elements (default = 5000). This " + "only applies to the non-linear methods" + ), + ) class FitTensorOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output DTI file') + out_file = File(exists=True, desc="the output DTI file") class FitTensor(MRTrix3Base): @@ -66,73 +74,82 @@ class FitTensor(MRTrix3Base): >>> tsr.run() # doctest: +SKIP """ - _cmd = 'dwi2tensor' + _cmd = "dwi2tensor" input_spec = FitTensorInputSpec output_spec = FitTensorOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class EstimateFODInputSpec(MRTrix3BaseInputSpec): algorithm = traits.Enum( - 'csd', - 'msmt_csd', - argstr='%s', + "csd", + "msmt_csd", + argstr="%s", position=-8, mandatory=True, - desc='FOD algorithm') + desc="FOD algorithm", + ) in_file = File( - exists=True, - argstr='%s', - position=-7, - mandatory=True, - desc='input DWI image') + exists=True, argstr="%s", position=-7, mandatory=True, desc="input DWI image" + ) wm_txt = File( - argstr='%s', position=-6, mandatory=True, desc='WM response text file') + argstr="%s", position=-6, mandatory=True, desc="WM response text file" + ) wm_odf = File( - 'wm.mif', - argstr='%s', + "wm.mif", + argstr="%s", position=-5, usedefault=True, mandatory=True, - desc='output WM ODF') - gm_txt = File(argstr='%s', position=-4, desc='GM response text file') - gm_odf = File('gm.mif', usedefault=True, argstr='%s', - position=-3, desc='output GM ODF') - csf_txt = File(argstr='%s', position=-2, desc='CSF response text file') - csf_odf = File('csf.mif', usedefault=True, argstr='%s', - position=-1, desc='output CSF ODF') - mask_file = File(exists=True, argstr='-mask %s', desc='mask image') + desc="output WM ODF", + ) + gm_txt = File(argstr="%s", position=-4, desc="GM response text file") + gm_odf = File( + "gm.mif", usedefault=True, argstr="%s", position=-3, desc="output GM ODF" + ) + csf_txt = File(argstr="%s", position=-2, desc="CSF response text file") + csf_odf = File( + "csf.mif", usedefault=True, argstr="%s", position=-1, desc="output CSF ODF" + ) + mask_file = File(exists=True, argstr="-mask %s", desc="mask image") # DW Shell selection options shell = traits.List( traits.Float, - sep=',', - argstr='-shell %s', - desc='specify one or more dw gradient shells') + sep=",", + argstr="-shell %s", + desc="specify one or more dw gradient shells", + ) max_sh = InputMultiObject( traits.Int, value=[8], usedefault=True, - argstr='-lmax %s', - sep=',', - desc=('maximum harmonic degree of response function - single value for single-shell response, list for multi-shell response')) + argstr="-lmax %s", + sep=",", + desc=( + "maximum harmonic degree of response function - single value for single-shell response, list for multi-shell response" + ), + ) in_dirs = File( exists=True, - argstr='-directions %s', - desc=('specify the directions over which to apply the non-negativity ' - 'constraint (by default, the built-in 300 direction set is ' - 'used). These should be supplied as a text file containing the ' - '[ az el ] pairs for the directions.')) + argstr="-directions %s", + desc=( + "specify the directions over which to apply the non-negativity " + "constraint (by default, the built-in 300 direction set is " + "used). These should be supplied as a text file containing the " + "[ az el ] pairs for the directions." + ), + ) class EstimateFODOutputSpec(TraitedSpec): - wm_odf = File(argstr='%s', desc='output WM ODF') - gm_odf = File(argstr='%s', desc='output GM ODF') - csf_odf = File(argstr='%s', desc='output CSF ODF') + wm_odf = File(argstr="%s", desc="output WM ODF") + gm_odf = File(argstr="%s", desc="output GM ODF") + csf_odf = File(argstr="%s", desc="output CSF ODF") class EstimateFOD(MRTrix3Base): @@ -153,15 +170,15 @@ class EstimateFOD(MRTrix3Base): >>> fod.run() # doctest: +SKIP """ - _cmd = 'dwi2fod' + _cmd = "dwi2fod" input_spec = EstimateFODInputSpec output_spec = EstimateFODOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['wm_odf'] = op.abspath(self.inputs.wm_odf) + outputs["wm_odf"] = op.abspath(self.inputs.wm_odf) if self.inputs.gm_odf != Undefined: - outputs['gm_odf'] = op.abspath(self.inputs.gm_odf) + outputs["gm_odf"] = op.abspath(self.inputs.gm_odf) if self.inputs.csf_odf != Undefined: - outputs['csf_odf'] = op.abspath(self.inputs.csf_odf) + outputs["csf_odf"] = op.abspath(self.inputs.csf_odf) return outputs diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_ACTPrepareFSL.py b/nipype/interfaces/mrtrix3/tests/test_auto_ACTPrepareFSL.py index 137ddec81f..8064175d65 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_ACTPrepareFSL.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_ACTPrepareFSL.py @@ -4,23 +4,11 @@ def test_ACTPrepareFSL_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), out_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - usedefault=True, + argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, ), ) inputs = ACTPrepareFSL.input_spec() @@ -28,8 +16,10 @@ def test_ACTPrepareFSL_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ACTPrepareFSL_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ACTPrepareFSL.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_BrainMask.py b/nipype/interfaces/mrtrix3/tests/test_auto_BrainMask.py index 9d22520268..c7ce6cc9af 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_BrainMask.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_BrainMask.py @@ -4,42 +4,17 @@ def test_BrainMask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - grad_file=dict( - argstr='-grad %s', - extensions=None, - xor=['grad_fsl'], - ), - grad_fsl=dict( - argstr='-fslgrad %s %s', - xor=['grad_file'], - ), - in_bval=dict(extensions=None, ), - in_bvec=dict( - argstr='-fslgrad %s %s', - extensions=None, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - nthreads=dict( - argstr='-nthreads %d', - nohash=True, - ), + args=dict(argstr="%s",), + bval_scale=dict(argstr="-bvalue_scaling %s",), + environ=dict(nohash=True, usedefault=True,), + grad_file=dict(argstr="-grad %s", extensions=None, xor=["grad_fsl"],), + grad_fsl=dict(argstr="-fslgrad %s %s", xor=["grad_file"],), + in_bval=dict(extensions=None,), + in_bvec=dict(argstr="-fslgrad %s %s", extensions=None,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + nthreads=dict(argstr="-nthreads %d", nohash=True,), out_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - usedefault=True, + argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, ), ) inputs = BrainMask.input_spec() @@ -47,8 +22,10 @@ def test_BrainMask_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BrainMask_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = BrainMask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_BuildConnectome.py b/nipype/interfaces/mrtrix3/tests/test_auto_BuildConnectome.py index 2e4c70cbe4..95aae6fc03 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_BuildConnectome.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_BuildConnectome.py @@ -4,56 +4,33 @@ def test_BuildConnectome_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), - in_parc=dict( - argstr='%s', - extensions=None, - position=-2, - ), - in_scalar=dict( - argstr='-image %s', - extensions=None, - ), - in_weights=dict( - argstr='-tck_weights_in %s', - extensions=None, - ), - keep_unassigned=dict(argstr='-keep_unassigned', ), - metric=dict(argstr='-metric %s', ), - nthreads=dict( - argstr='-nthreads %d', - nohash=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + in_parc=dict(argstr="%s", extensions=None, position=-2,), + in_scalar=dict(argstr="-image %s", extensions=None,), + in_weights=dict(argstr="-tck_weights_in %s", extensions=None,), + keep_unassigned=dict(argstr="-keep_unassigned",), + metric=dict(argstr="-metric %s",), + nthreads=dict(argstr="-nthreads %d", nohash=True,), out_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - usedefault=True, + argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, ), - search_forward=dict(argstr='-assignment_forward_search %f', ), - search_radius=dict(argstr='-assignment_radial_search %f', ), - search_reverse=dict(argstr='-assignment_reverse_search %f', ), - vox_lookup=dict(argstr='-assignment_voxel_lookup', ), - zero_diagonal=dict(argstr='-zero_diagonal', ), + search_forward=dict(argstr="-assignment_forward_search %f",), + search_radius=dict(argstr="-assignment_radial_search %f",), + search_reverse=dict(argstr="-assignment_reverse_search %f",), + vox_lookup=dict(argstr="-assignment_voxel_lookup",), + zero_diagonal=dict(argstr="-zero_diagonal",), ) inputs = BuildConnectome.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BuildConnectome_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = BuildConnectome.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_ComputeTDI.py b/nipype/interfaces/mrtrix3/tests/test_auto_ComputeTDI.py index 81fcf0ad7d..f6a8734cef 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_ComputeTDI.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_ComputeTDI.py @@ -4,66 +4,37 @@ def test_ComputeTDI_inputs(): input_map = dict( - args=dict(argstr='%s', ), - contrast=dict(argstr='-constrast %s', ), - data_type=dict(argstr='-datatype %s', ), - dixel=dict( - argstr='-dixel %s', - extensions=None, - ), - ends_only=dict(argstr='-ends_only', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fwhm_tck=dict(argstr='-fwhm_tck %f', ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - in_map=dict( - argstr='-image %s', - extensions=None, - ), - map_zero=dict(argstr='-map_zero', ), - max_tod=dict(argstr='-tod %d', ), - nthreads=dict( - argstr='-nthreads %d', - nohash=True, - ), - out_file=dict( - argstr='%s', - extensions=None, - position=-1, - usedefault=True, - ), - precise=dict(argstr='-precise', ), - reference=dict( - argstr='-template %s', - extensions=None, - ), - stat_tck=dict(argstr='-stat_tck %s', ), - stat_vox=dict(argstr='-stat_vox %s', ), - tck_weights=dict( - argstr='-tck_weights_in %s', - extensions=None, - ), - upsample=dict(argstr='-upsample %d', ), - use_dec=dict(argstr='-dec', ), - vox_size=dict( - argstr='-vox %s', - sep=',', - ), + args=dict(argstr="%s",), + contrast=dict(argstr="-constrast %s",), + data_type=dict(argstr="-datatype %s",), + dixel=dict(argstr="-dixel %s", extensions=None,), + ends_only=dict(argstr="-ends_only",), + environ=dict(nohash=True, usedefault=True,), + fwhm_tck=dict(argstr="-fwhm_tck %f",), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + in_map=dict(argstr="-image %s", extensions=None,), + map_zero=dict(argstr="-map_zero",), + max_tod=dict(argstr="-tod %d",), + nthreads=dict(argstr="-nthreads %d", nohash=True,), + out_file=dict(argstr="%s", extensions=None, position=-1, usedefault=True,), + precise=dict(argstr="-precise",), + reference=dict(argstr="-template %s", extensions=None,), + stat_tck=dict(argstr="-stat_tck %s",), + stat_vox=dict(argstr="-stat_vox %s",), + tck_weights=dict(argstr="-tck_weights_in %s", extensions=None,), + upsample=dict(argstr="-upsample %d",), + use_dec=dict(argstr="-dec",), + vox_size=dict(argstr="-vox %s", sep=",",), ) inputs = ComputeTDI.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComputeTDI_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ComputeTDI.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_DWIBiasCorrect.py b/nipype/interfaces/mrtrix3/tests/test_auto_DWIBiasCorrect.py index d5accaa3ee..210b39b141 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_DWIBiasCorrect.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_DWIBiasCorrect.py @@ -4,74 +4,38 @@ def test_DWIBiasCorrect_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bias=dict( - argstr='-bias %s', - extensions=None, - ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - grad_file=dict( - argstr='-grad %s', - extensions=None, - xor=['grad_fsl'], - ), - grad_fsl=dict( - argstr='-fslgrad %s %s', - xor=['grad_file'], - ), - in_bval=dict(extensions=None, ), - in_bvec=dict( - argstr='-fslgrad %s %s', - extensions=None, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - in_mask=dict( - argstr='-mask %s', - extensions=None, - ), - nthreads=dict( - argstr='-nthreads %d', - nohash=True, - ), + args=dict(argstr="%s",), + bias=dict(argstr="-bias %s", extensions=None,), + bval_scale=dict(argstr="-bvalue_scaling %s",), + environ=dict(nohash=True, usedefault=True,), + grad_file=dict(argstr="-grad %s", extensions=None, xor=["grad_fsl"],), + grad_fsl=dict(argstr="-fslgrad %s %s", xor=["grad_file"],), + in_bval=dict(extensions=None,), + in_bvec=dict(argstr="-fslgrad %s %s", extensions=None,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + in_mask=dict(argstr="-mask %s", extensions=None,), + nthreads=dict(argstr="-nthreads %d", nohash=True,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, genfile=True, keep_extension=True, - name_source='in_file', - name_template='%s_biascorr', + name_source="in_file", + name_template="%s_biascorr", position=-1, ), - use_ants=dict( - argstr='-ants', - mandatory=True, - xor=['use_fsl'], - ), - use_fsl=dict( - argstr='-fsl', - mandatory=True, - xor=['use_ants'], - ), + use_ants=dict(argstr="-ants", mandatory=True, xor=["use_fsl"],), + use_fsl=dict(argstr="-fsl", mandatory=True, xor=["use_ants"],), ) inputs = DWIBiasCorrect.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWIBiasCorrect_outputs(): - output_map = dict( - bias=dict(extensions=None, ), - out_file=dict(extensions=None, ), - ) + output_map = dict(bias=dict(extensions=None,), out_file=dict(extensions=None,),) outputs = DWIBiasCorrect.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_DWIDenoise.py b/nipype/interfaces/mrtrix3/tests/test_auto_DWIDenoise.py index 248ab20da2..d5050327aa 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_DWIDenoise.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_DWIDenoise.py @@ -4,55 +4,30 @@ def test_DWIDenoise_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - extent=dict(argstr='-extent %d,%d,%d', ), - grad_file=dict( - argstr='-grad %s', - extensions=None, - xor=['grad_fsl'], - ), - grad_fsl=dict( - argstr='-fslgrad %s %s', - xor=['grad_file'], - ), - in_bval=dict(extensions=None, ), - in_bvec=dict( - argstr='-fslgrad %s %s', - extensions=None, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - mask=dict( - argstr='-mask %s', - extensions=None, - position=1, - ), + args=dict(argstr="%s",), + bval_scale=dict(argstr="-bvalue_scaling %s",), + environ=dict(nohash=True, usedefault=True,), + extent=dict(argstr="-extent %d,%d,%d",), + grad_file=dict(argstr="-grad %s", extensions=None, xor=["grad_fsl"],), + grad_fsl=dict(argstr="-fslgrad %s %s", xor=["grad_file"],), + in_bval=dict(extensions=None,), + in_bvec=dict(argstr="-fslgrad %s %s", extensions=None,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + mask=dict(argstr="-mask %s", extensions=None, position=1,), noise=dict( - argstr='-noise %s', + argstr="-noise %s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_noise', - ), - nthreads=dict( - argstr='-nthreads %d', - nohash=True, + name_source="in_file", + name_template="%s_noise", ), + nthreads=dict(argstr="-nthreads %d", nohash=True,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_denoised', + name_source="in_file", + name_template="%s_denoised", position=-1, ), ) @@ -61,11 +36,10 @@ def test_DWIDenoise_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWIDenoise_outputs(): - output_map = dict( - noise=dict(extensions=None, ), - out_file=dict(extensions=None, ), - ) + output_map = dict(noise=dict(extensions=None,), out_file=dict(extensions=None,),) outputs = DWIDenoise.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_DWIExtract.py b/nipype/interfaces/mrtrix3/tests/test_auto_DWIExtract.py index 67f4992768..18fa49c260 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_DWIExtract.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_DWIExtract.py @@ -4,57 +4,30 @@ def test_DWIExtract_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), - bzero=dict(argstr='-bzero', ), - environ=dict( - nohash=True, - usedefault=True, - ), - grad_file=dict( - argstr='-grad %s', - extensions=None, - xor=['grad_fsl'], - ), - grad_fsl=dict( - argstr='-fslgrad %s %s', - xor=['grad_file'], - ), - in_bval=dict(extensions=None, ), - in_bvec=dict( - argstr='-fslgrad %s %s', - extensions=None, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - nobzero=dict(argstr='-no_bzero', ), - nthreads=dict( - argstr='-nthreads %d', - nohash=True, - ), - out_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - ), - shell=dict( - argstr='-shell %s', - sep=',', - ), - singleshell=dict(argstr='-singleshell', ), + args=dict(argstr="%s",), + bval_scale=dict(argstr="-bvalue_scaling %s",), + bzero=dict(argstr="-bzero",), + environ=dict(nohash=True, usedefault=True,), + grad_file=dict(argstr="-grad %s", extensions=None, xor=["grad_fsl"],), + grad_fsl=dict(argstr="-fslgrad %s %s", xor=["grad_file"],), + in_bval=dict(extensions=None,), + in_bvec=dict(argstr="-fslgrad %s %s", extensions=None,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + nobzero=dict(argstr="-no_bzero",), + nthreads=dict(argstr="-nthreads %d", nohash=True,), + out_file=dict(argstr="%s", extensions=None, mandatory=True, position=-1,), + shell=dict(argstr="-shell %s", sep=",",), + singleshell=dict(argstr="-singleshell",), ) inputs = DWIExtract.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWIExtract_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = DWIExtract.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_EstimateFOD.py b/nipype/interfaces/mrtrix3/tests/test_auto_EstimateFOD.py index 495257ca03..bfadae423f 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_EstimateFOD.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_EstimateFOD.py @@ -4,113 +4,41 @@ def test_EstimateFOD_inputs(): input_map = dict( - algorithm=dict( - argstr='%s', - mandatory=True, - position=-8, - ), - args=dict(argstr='%s', ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), - csf_odf=dict( - argstr='%s', - extensions=None, - position=-1, - usedefault=True, - ), - csf_txt=dict( - argstr='%s', - extensions=None, - position=-2, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - gm_odf=dict( - argstr='%s', - extensions=None, - position=-3, - usedefault=True, - ), - gm_txt=dict( - argstr='%s', - extensions=None, - position=-4, - ), - grad_file=dict( - argstr='-grad %s', - extensions=None, - xor=['grad_fsl'], - ), - grad_fsl=dict( - argstr='-fslgrad %s %s', - xor=['grad_file'], - ), - in_bval=dict(extensions=None, ), - in_bvec=dict( - argstr='-fslgrad %s %s', - extensions=None, - ), - in_dirs=dict( - argstr='-directions %s', - extensions=None, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-7, - ), - mask_file=dict( - argstr='-mask %s', - extensions=None, - ), - max_sh=dict( - argstr='-lmax %s', - sep=',', - usedefault=True, - ), - nthreads=dict( - argstr='-nthreads %d', - nohash=True, - ), - shell=dict( - argstr='-shell %s', - sep=',', - ), + algorithm=dict(argstr="%s", mandatory=True, position=-8,), + args=dict(argstr="%s",), + bval_scale=dict(argstr="-bvalue_scaling %s",), + csf_odf=dict(argstr="%s", extensions=None, position=-1, usedefault=True,), + csf_txt=dict(argstr="%s", extensions=None, position=-2,), + environ=dict(nohash=True, usedefault=True,), + gm_odf=dict(argstr="%s", extensions=None, position=-3, usedefault=True,), + gm_txt=dict(argstr="%s", extensions=None, position=-4,), + grad_file=dict(argstr="-grad %s", extensions=None, xor=["grad_fsl"],), + grad_fsl=dict(argstr="-fslgrad %s %s", xor=["grad_file"],), + in_bval=dict(extensions=None,), + in_bvec=dict(argstr="-fslgrad %s %s", extensions=None,), + in_dirs=dict(argstr="-directions %s", extensions=None,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-7,), + mask_file=dict(argstr="-mask %s", extensions=None,), + max_sh=dict(argstr="-lmax %s", sep=",", usedefault=True,), + nthreads=dict(argstr="-nthreads %d", nohash=True,), + shell=dict(argstr="-shell %s", sep=",",), wm_odf=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-5, - usedefault=True, - ), - wm_txt=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-6, + argstr="%s", extensions=None, mandatory=True, position=-5, usedefault=True, ), + wm_txt=dict(argstr="%s", extensions=None, mandatory=True, position=-6,), ) inputs = EstimateFOD.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EstimateFOD_outputs(): output_map = dict( - csf_odf=dict( - argstr='%s', - extensions=None, - ), - gm_odf=dict( - argstr='%s', - extensions=None, - ), - wm_odf=dict( - argstr='%s', - extensions=None, - ), + csf_odf=dict(argstr="%s", extensions=None,), + gm_odf=dict(argstr="%s", extensions=None,), + wm_odf=dict(argstr="%s", extensions=None,), ) outputs = EstimateFOD.output_spec() diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_FitTensor.py b/nipype/interfaces/mrtrix3/tests/test_auto_FitTensor.py index 9491194e77..d586dbaf59 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_FitTensor.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_FitTensor.py @@ -4,60 +4,31 @@ def test_FitTensor_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - grad_file=dict( - argstr='-grad %s', - extensions=None, - xor=['grad_fsl'], - ), - grad_fsl=dict( - argstr='-fslgrad %s %s', - xor=['grad_file'], - ), - in_bval=dict(extensions=None, ), - in_bvec=dict( - argstr='-fslgrad %s %s', - extensions=None, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - in_mask=dict( - argstr='-mask %s', - extensions=None, - ), - method=dict(argstr='-method %s', ), - nthreads=dict( - argstr='-nthreads %d', - nohash=True, - ), + args=dict(argstr="%s",), + bval_scale=dict(argstr="-bvalue_scaling %s",), + environ=dict(nohash=True, usedefault=True,), + grad_file=dict(argstr="-grad %s", extensions=None, xor=["grad_fsl"],), + grad_fsl=dict(argstr="-fslgrad %s %s", xor=["grad_file"],), + in_bval=dict(extensions=None,), + in_bvec=dict(argstr="-fslgrad %s %s", extensions=None,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + in_mask=dict(argstr="-mask %s", extensions=None,), + method=dict(argstr="-method %s",), + nthreads=dict(argstr="-nthreads %d", nohash=True,), out_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - usedefault=True, - ), - reg_term=dict( - argstr='-regularisation %f', - max_ver='0.3.13', + argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, ), + reg_term=dict(argstr="-regularisation %f", max_ver="0.3.13",), ) inputs = FitTensor.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FitTensor_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = FitTensor.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_Generate5tt.py b/nipype/interfaces/mrtrix3/tests/test_auto_Generate5tt.py index d73ea20a1c..d41fd52a11 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_Generate5tt.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_Generate5tt.py @@ -4,55 +4,27 @@ def test_Generate5tt_inputs(): input_map = dict( - algorithm=dict( - argstr='%s', - mandatory=True, - position=-3, - ), - args=dict(argstr='%s', ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - grad_file=dict( - argstr='-grad %s', - extensions=None, - xor=['grad_fsl'], - ), - grad_fsl=dict( - argstr='-fslgrad %s %s', - xor=['grad_file'], - ), - in_bval=dict(extensions=None, ), - in_bvec=dict( - argstr='-fslgrad %s %s', - extensions=None, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - nthreads=dict( - argstr='-nthreads %d', - nohash=True, - ), - out_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - ), + algorithm=dict(argstr="%s", mandatory=True, position=-3,), + args=dict(argstr="%s",), + bval_scale=dict(argstr="-bvalue_scaling %s",), + environ=dict(nohash=True, usedefault=True,), + grad_file=dict(argstr="-grad %s", extensions=None, xor=["grad_fsl"],), + grad_fsl=dict(argstr="-fslgrad %s %s", xor=["grad_file"],), + in_bval=dict(extensions=None,), + in_bvec=dict(argstr="-fslgrad %s %s", extensions=None,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + nthreads=dict(argstr="-nthreads %d", nohash=True,), + out_file=dict(argstr="%s", extensions=None, mandatory=True, position=-1,), ) inputs = Generate5tt.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Generate5tt_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Generate5tt.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_LabelConfig.py b/nipype/interfaces/mrtrix3/tests/test_auto_LabelConfig.py index 90a0f25314..2c37a6bc93 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_LabelConfig.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_LabelConfig.py @@ -4,61 +4,29 @@ def test_LabelConfig_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_config=dict( - argstr='%s', - extensions=None, - position=-2, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), - lut_aal=dict( - argstr='-lut_aal %s', - extensions=None, - ), - lut_basic=dict( - argstr='-lut_basic %s', - extensions=None, - ), - lut_fs=dict( - argstr='-lut_freesurfer %s', - extensions=None, - ), - lut_itksnap=dict( - argstr='-lut_itksnap %s', - extensions=None, - ), - nthreads=dict( - argstr='-nthreads %d', - nohash=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_config=dict(argstr="%s", extensions=None, position=-2,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + lut_aal=dict(argstr="-lut_aal %s", extensions=None,), + lut_basic=dict(argstr="-lut_basic %s", extensions=None,), + lut_fs=dict(argstr="-lut_freesurfer %s", extensions=None,), + lut_itksnap=dict(argstr="-lut_itksnap %s", extensions=None,), + nthreads=dict(argstr="-nthreads %d", nohash=True,), out_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - usedefault=True, - ), - spine=dict( - argstr='-spine %s', - extensions=None, + argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, ), + spine=dict(argstr="-spine %s", extensions=None,), ) inputs = LabelConfig.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LabelConfig_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = LabelConfig.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_LabelConvert.py b/nipype/interfaces/mrtrix3/tests/test_auto_LabelConvert.py index 6d4a6c8dcb..9db9bb4df5 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_LabelConvert.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_LabelConvert.py @@ -4,51 +4,26 @@ def test_LabelConvert_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_config=dict( - argstr='%s', - extensions=None, - position=-2, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-4, - ), - in_lut=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), - num_threads=dict( - argstr='-nthreads %d', - nohash=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_config=dict(argstr="%s", extensions=None, position=-2,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-4,), + in_lut=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + num_threads=dict(argstr="-nthreads %d", nohash=True,), out_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - usedefault=True, - ), - spine=dict( - argstr='-spine %s', - extensions=None, + argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, ), + spine=dict(argstr="-spine %s", extensions=None,), ) inputs = LabelConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LabelConvert_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = LabelConvert.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_MRConvert.py b/nipype/interfaces/mrtrix3/tests/test_auto_MRConvert.py index b959c22546..2440113e20 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_MRConvert.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_MRConvert.py @@ -4,67 +4,32 @@ def test_MRConvert_inputs(): input_map = dict( - args=dict(argstr='%s', ), - axes=dict( - argstr='-axes %s', - sep=',', - ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), - coord=dict( - argstr='-coord %s', - sep=' ', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - grad_file=dict( - argstr='-grad %s', - extensions=None, - xor=['grad_fsl'], - ), - grad_fsl=dict( - argstr='-fslgrad %s %s', - xor=['grad_file'], - ), - in_bval=dict(extensions=None, ), - in_bvec=dict( - argstr='-fslgrad %s %s', - extensions=None, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - nthreads=dict( - argstr='-nthreads %d', - nohash=True, - ), + args=dict(argstr="%s",), + axes=dict(argstr="-axes %s", sep=",",), + bval_scale=dict(argstr="-bvalue_scaling %s",), + coord=dict(argstr="-coord %s", sep=" ",), + environ=dict(nohash=True, usedefault=True,), + grad_file=dict(argstr="-grad %s", extensions=None, xor=["grad_fsl"],), + grad_fsl=dict(argstr="-fslgrad %s %s", xor=["grad_file"],), + in_bval=dict(extensions=None,), + in_bvec=dict(argstr="-fslgrad %s %s", extensions=None,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + nthreads=dict(argstr="-nthreads %d", nohash=True,), out_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - usedefault=True, - ), - scaling=dict( - argstr='-scaling %s', - sep=',', - ), - vox=dict( - argstr='-vox %s', - sep=',', + argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, ), + scaling=dict(argstr="-scaling %s", sep=",",), + vox=dict(argstr="-vox %s", sep=",",), ) inputs = MRConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRConvert_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MRConvert.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_MRDeGibbs.py b/nipype/interfaces/mrtrix3/tests/test_auto_MRDeGibbs.py index 010fbb8a25..439e834eb2 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_MRDeGibbs.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_MRDeGibbs.py @@ -4,61 +4,25 @@ def test_MRDeGibbs_inputs(): input_map = dict( - args=dict(argstr='%s', ), - axes=dict( - argstr='-axes %s', - maxlen=2, - minlen=2, - sep=',', - usedefault=True, - ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - grad_file=dict( - argstr='-grad %s', - extensions=None, - xor=['grad_fsl'], - ), - grad_fsl=dict( - argstr='-fslgrad %s %s', - xor=['grad_file'], - ), - in_bval=dict(extensions=None, ), - in_bvec=dict( - argstr='-fslgrad %s %s', - extensions=None, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - maxW=dict( - argstr='-maxW %d', - usedefault=True, - ), - minW=dict( - argstr='-minW %d', - usedefault=True, - ), - nshifts=dict( - argstr='-nshifts %d', - usedefault=True, - ), - nthreads=dict( - argstr='-nthreads %d', - nohash=True, - ), + args=dict(argstr="%s",), + axes=dict(argstr="-axes %s", maxlen=2, minlen=2, sep=",", usedefault=True,), + bval_scale=dict(argstr="-bvalue_scaling %s",), + environ=dict(nohash=True, usedefault=True,), + grad_file=dict(argstr="-grad %s", extensions=None, xor=["grad_fsl"],), + grad_fsl=dict(argstr="-fslgrad %s %s", xor=["grad_file"],), + in_bval=dict(extensions=None,), + in_bvec=dict(argstr="-fslgrad %s %s", extensions=None,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + maxW=dict(argstr="-maxW %d", usedefault=True,), + minW=dict(argstr="-minW %d", usedefault=True,), + nshifts=dict(argstr="-nshifts %d", usedefault=True,), + nthreads=dict(argstr="-nthreads %d", nohash=True,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_unr', + name_source="in_file", + name_template="%s_unr", position=-1, ), ) @@ -67,8 +31,10 @@ def test_MRDeGibbs_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRDeGibbs_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MRDeGibbs.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_MRMath.py b/nipype/interfaces/mrtrix3/tests/test_auto_MRMath.py index d2a1d057b8..33f9c82d22 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_MRMath.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_MRMath.py @@ -4,56 +4,28 @@ def test_MRMath_inputs(): input_map = dict( - args=dict(argstr='%s', ), - axis=dict(argstr='-axis %d', ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - grad_file=dict( - argstr='-grad %s', - extensions=None, - xor=['grad_fsl'], - ), - grad_fsl=dict( - argstr='-fslgrad %s %s', - xor=['grad_file'], - ), - in_bval=dict(extensions=None, ), - in_bvec=dict( - argstr='-fslgrad %s %s', - extensions=None, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), - nthreads=dict( - argstr='-nthreads %d', - nohash=True, - ), - operation=dict( - argstr='%s', - mandatory=True, - position=-2, - ), - out_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - ), + args=dict(argstr="%s",), + axis=dict(argstr="-axis %d",), + bval_scale=dict(argstr="-bvalue_scaling %s",), + environ=dict(nohash=True, usedefault=True,), + grad_file=dict(argstr="-grad %s", extensions=None, xor=["grad_fsl"],), + grad_fsl=dict(argstr="-fslgrad %s %s", xor=["grad_file"],), + in_bval=dict(extensions=None,), + in_bvec=dict(argstr="-fslgrad %s %s", extensions=None,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + nthreads=dict(argstr="-nthreads %d", nohash=True,), + operation=dict(argstr="%s", mandatory=True, position=-2,), + out_file=dict(argstr="%s", extensions=None, mandatory=True, position=-1,), ) inputs = MRMath.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRMath_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MRMath.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_MRResize.py b/nipype/interfaces/mrtrix3/tests/test_auto_MRResize.py index 3c904259b5..cf41dfe856 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_MRResize.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_MRResize.py @@ -4,62 +4,34 @@ def test_MRResize_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - grad_file=dict( - argstr='-grad %s', - extensions=None, - xor=['grad_fsl'], - ), - grad_fsl=dict( - argstr='-fslgrad %s %s', - xor=['grad_file'], - ), + args=dict(argstr="%s",), + bval_scale=dict(argstr="-bvalue_scaling %s",), + environ=dict(nohash=True, usedefault=True,), + grad_file=dict(argstr="-grad %s", extensions=None, xor=["grad_fsl"],), + grad_fsl=dict(argstr="-fslgrad %s %s", xor=["grad_file"],), image_size=dict( - argstr='-size %d,%d,%d', - mandatory=True, - xor=['voxel_size', 'scale_factor'], - ), - in_bval=dict(extensions=None, ), - in_bvec=dict( - argstr='-fslgrad %s %s', - extensions=None, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - interpolation=dict( - argstr='-interp %s', - usedefault=True, - ), - nthreads=dict( - argstr='-nthreads %d', - nohash=True, + argstr="-size %d,%d,%d", mandatory=True, xor=["voxel_size", "scale_factor"], ), + in_bval=dict(extensions=None,), + in_bvec=dict(argstr="-fslgrad %s %s", extensions=None,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + interpolation=dict(argstr="-interp %s", usedefault=True,), + nthreads=dict(argstr="-nthreads %d", nohash=True,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, keep_extension=True, - name_source=['in_file'], - name_template='%s_resized', + name_source=["in_file"], + name_template="%s_resized", position=-1, ), scale_factor=dict( - argstr='-scale %g,%g,%g', - mandatory=True, - xor=['image_size', 'voxel_size'], + argstr="-scale %g,%g,%g", mandatory=True, xor=["image_size", "voxel_size"], ), voxel_size=dict( - argstr='-voxel %g,%g,%g', + argstr="-voxel %g,%g,%g", mandatory=True, - xor=['image_size', 'scale_factor'], + xor=["image_size", "scale_factor"], ), ) inputs = MRResize.input_spec() @@ -67,8 +39,10 @@ def test_MRResize_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MRResize_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MRResize.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_MRTrix3Base.py b/nipype/interfaces/mrtrix3/tests/test_auto_MRTrix3Base.py index ed91143ab8..a5042e58d9 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_MRTrix3Base.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_MRTrix3Base.py @@ -4,11 +4,7 @@ def test_MRTrix3Base_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), environ=dict(nohash=True, usedefault=True,), ) inputs = MRTrix3Base.input_spec() diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_Mesh2PVE.py b/nipype/interfaces/mrtrix3/tests/test_auto_Mesh2PVE.py index 92a5349d04..602d3c0228 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_Mesh2PVE.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_Mesh2PVE.py @@ -4,42 +4,24 @@ def test_Mesh2PVE_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), - in_first=dict( - argstr='-first %s', - extensions=None, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), + in_first=dict(argstr="-first %s", extensions=None,), out_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - usedefault=True, - ), - reference=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, + argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, ), + reference=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), ) inputs = Mesh2PVE.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Mesh2PVE_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Mesh2PVE.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_ReplaceFSwithFIRST.py b/nipype/interfaces/mrtrix3/tests/test_auto_ReplaceFSwithFIRST.py index 4a78432f5e..802f2fd64a 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_ReplaceFSwithFIRST.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_ReplaceFSwithFIRST.py @@ -4,34 +4,13 @@ def test_ReplaceFSwithFIRST_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_config=dict( - argstr='%s', - extensions=None, - position=-2, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-4, - ), - in_t1w=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-3, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_config=dict(argstr="%s", extensions=None, position=-2,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-4,), + in_t1w=dict(argstr="%s", extensions=None, mandatory=True, position=-3,), out_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - usedefault=True, + argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, ), ) inputs = ReplaceFSwithFIRST.input_spec() @@ -39,8 +18,10 @@ def test_ReplaceFSwithFIRST_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ReplaceFSwithFIRST_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ReplaceFSwithFIRST.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_ResponseSD.py b/nipype/interfaces/mrtrix3/tests/test_auto_ResponseSD.py index fee3dce67e..b35f6529e7 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_ResponseSD.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_ResponseSD.py @@ -4,90 +4,35 @@ def test_ResponseSD_inputs(): input_map = dict( - algorithm=dict( - argstr='%s', - mandatory=True, - position=1, - ), - args=dict(argstr='%s', ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), - csf_file=dict( - argstr='%s', - extensions=None, - position=-1, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - gm_file=dict( - argstr='%s', - extensions=None, - position=-2, - ), - grad_file=dict( - argstr='-grad %s', - extensions=None, - xor=['grad_fsl'], - ), - grad_fsl=dict( - argstr='-fslgrad %s %s', - xor=['grad_file'], - ), - in_bval=dict(extensions=None, ), - in_bvec=dict( - argstr='-fslgrad %s %s', - extensions=None, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-5, - ), - in_mask=dict( - argstr='-mask %s', - extensions=None, - ), - max_sh=dict( - argstr='-lmax %s', - sep=',', - ), - mtt_file=dict( - argstr='%s', - extensions=None, - position=-4, - ), - nthreads=dict( - argstr='-nthreads %d', - nohash=True, - ), - wm_file=dict( - argstr='%s', - extensions=None, - position=-3, - usedefault=True, - ), + algorithm=dict(argstr="%s", mandatory=True, position=1,), + args=dict(argstr="%s",), + bval_scale=dict(argstr="-bvalue_scaling %s",), + csf_file=dict(argstr="%s", extensions=None, position=-1,), + environ=dict(nohash=True, usedefault=True,), + gm_file=dict(argstr="%s", extensions=None, position=-2,), + grad_file=dict(argstr="-grad %s", extensions=None, xor=["grad_fsl"],), + grad_fsl=dict(argstr="-fslgrad %s %s", xor=["grad_file"],), + in_bval=dict(extensions=None,), + in_bvec=dict(argstr="-fslgrad %s %s", extensions=None,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-5,), + in_mask=dict(argstr="-mask %s", extensions=None,), + max_sh=dict(argstr="-lmax %s", sep=",",), + mtt_file=dict(argstr="%s", extensions=None, position=-4,), + nthreads=dict(argstr="-nthreads %d", nohash=True,), + wm_file=dict(argstr="%s", extensions=None, position=-3, usedefault=True,), ) inputs = ResponseSD.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ResponseSD_outputs(): output_map = dict( - csf_file=dict( - argstr='%s', - extensions=None, - ), - gm_file=dict( - argstr='%s', - extensions=None, - ), - wm_file=dict( - argstr='%s', - extensions=None, - ), + csf_file=dict(argstr="%s", extensions=None,), + gm_file=dict(argstr="%s", extensions=None,), + wm_file=dict(argstr="%s", extensions=None,), ) outputs = ResponseSD.output_spec() diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_TCK2VTK.py b/nipype/interfaces/mrtrix3/tests/test_auto_TCK2VTK.py index d841a3fc6f..2c72dee012 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_TCK2VTK.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_TCK2VTK.py @@ -4,43 +4,23 @@ def test_TCK2VTK_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - nthreads=dict( - argstr='-nthreads %d', - nohash=True, - ), - out_file=dict( - argstr='%s', - extensions=None, - position=-1, - usedefault=True, - ), - reference=dict( - argstr='-image %s', - extensions=None, - ), - voxel=dict( - argstr='-image %s', - extensions=None, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + nthreads=dict(argstr="-nthreads %d", nohash=True,), + out_file=dict(argstr="%s", extensions=None, position=-1, usedefault=True,), + reference=dict(argstr="-image %s", extensions=None,), + voxel=dict(argstr="-image %s", extensions=None,), ) inputs = TCK2VTK.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TCK2VTK_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = TCK2VTK.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_TensorMetrics.py b/nipype/interfaces/mrtrix3/tests/test_auto_TensorMetrics.py index 45da9dbf1c..be6736cecb 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_TensorMetrics.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_TensorMetrics.py @@ -4,55 +4,30 @@ def test_TensorMetrics_inputs(): input_map = dict( - args=dict(argstr='%s', ), - component=dict( - argstr='-num %s', - sep=',', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - ), - in_mask=dict( - argstr='-mask %s', - extensions=None, - ), - modulate=dict(argstr='-modulate %s', ), - out_adc=dict( - argstr='-adc %s', - extensions=None, - ), - out_eval=dict( - argstr='-value %s', - extensions=None, - ), - out_evec=dict( - argstr='-vector %s', - extensions=None, - ), - out_fa=dict( - argstr='-fa %s', - extensions=None, - ), + args=dict(argstr="%s",), + component=dict(argstr="-num %s", sep=",", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-1,), + in_mask=dict(argstr="-mask %s", extensions=None,), + modulate=dict(argstr="-modulate %s",), + out_adc=dict(argstr="-adc %s", extensions=None,), + out_eval=dict(argstr="-value %s", extensions=None,), + out_evec=dict(argstr="-vector %s", extensions=None,), + out_fa=dict(argstr="-fa %s", extensions=None,), ) inputs = TensorMetrics.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TensorMetrics_outputs(): output_map = dict( - out_adc=dict(extensions=None, ), - out_eval=dict(extensions=None, ), - out_evec=dict(extensions=None, ), - out_fa=dict(extensions=None, ), + out_adc=dict(extensions=None,), + out_eval=dict(extensions=None,), + out_evec=dict(extensions=None,), + out_fa=dict(extensions=None,), ) outputs = TensorMetrics.output_spec() diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_Tractography.py b/nipype/interfaces/mrtrix3/tests/test_auto_Tractography.py index f8239c5341..9c1b51d363 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_Tractography.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_Tractography.py @@ -4,126 +4,71 @@ def test_Tractography_inputs(): input_map = dict( - act_file=dict( - argstr='-act %s', - extensions=None, - ), - algorithm=dict( - argstr='-algorithm %s', - usedefault=True, - ), - angle=dict(argstr='-angle %f', ), - args=dict(argstr='%s', ), - backtrack=dict(argstr='-backtrack', ), - bval_scale=dict(argstr='-bvalue_scaling %s', ), - crop_at_gmwmi=dict(argstr='-crop_at_gmwmi', ), - cutoff=dict(argstr='-cutoff %f', ), - cutoff_init=dict(argstr='-initcutoff %f', ), - downsample=dict(argstr='-downsample %f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - grad_file=dict( - argstr='-grad %s', - extensions=None, - xor=['grad_fsl'], - ), - grad_fsl=dict( - argstr='-fslgrad %s %s', - xor=['grad_file'], - ), - in_bval=dict(extensions=None, ), - in_bvec=dict( - argstr='-fslgrad %s %s', - extensions=None, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-2, - ), - init_dir=dict(argstr='-initdirection %f,%f,%f', ), - max_length=dict(argstr='-maxlength %f', ), - max_seed_attempts=dict(argstr='-max_seed_attempts %d', ), - max_tracks=dict(argstr='-maxnum %d', ), - min_length=dict(argstr='-minlength %f', ), - n_samples=dict( - argstr='-samples %d', - usedefault=True, - ), - n_tracks=dict( - argstr='-number %d', - max_ver='0.4', - ), - n_trials=dict(argstr='-trials %d', ), - noprecompt=dict(argstr='-noprecomputed', ), - nthreads=dict( - argstr='-nthreads %d', - nohash=True, - ), + act_file=dict(argstr="-act %s", extensions=None,), + algorithm=dict(argstr="-algorithm %s", usedefault=True,), + angle=dict(argstr="-angle %f",), + args=dict(argstr="%s",), + backtrack=dict(argstr="-backtrack",), + bval_scale=dict(argstr="-bvalue_scaling %s",), + crop_at_gmwmi=dict(argstr="-crop_at_gmwmi",), + cutoff=dict(argstr="-cutoff %f",), + cutoff_init=dict(argstr="-initcutoff %f",), + downsample=dict(argstr="-downsample %f",), + environ=dict(nohash=True, usedefault=True,), + grad_file=dict(argstr="-grad %s", extensions=None, xor=["grad_fsl"],), + grad_fsl=dict(argstr="-fslgrad %s %s", xor=["grad_file"],), + in_bval=dict(extensions=None,), + in_bvec=dict(argstr="-fslgrad %s %s", extensions=None,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=-2,), + init_dir=dict(argstr="-initdirection %f,%f,%f",), + max_length=dict(argstr="-maxlength %f",), + max_seed_attempts=dict(argstr="-max_seed_attempts %d",), + max_tracks=dict(argstr="-maxnum %d",), + min_length=dict(argstr="-minlength %f",), + n_samples=dict(argstr="-samples %d", usedefault=True,), + n_tracks=dict(argstr="-number %d", max_ver="0.4",), + n_trials=dict(argstr="-trials %d",), + noprecompt=dict(argstr="-noprecomputed",), + nthreads=dict(argstr="-nthreads %d", nohash=True,), out_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=-1, - usedefault=True, - ), - out_seeds=dict( - argstr='-output_seeds %s', - extensions=None, - usedefault=True, - ), - power=dict(argstr='-power %d', ), - roi_excl=dict(argstr='-exclude %s', ), - roi_incl=dict(argstr='-include %s', ), - roi_mask=dict(argstr='-mask %s', ), - seed_dynamic=dict( - argstr='-seed_dynamic %s', - extensions=None, - ), + argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, + ), + out_seeds=dict(argstr="-output_seeds %s", extensions=None, usedefault=True,), + power=dict(argstr="-power %d",), + roi_excl=dict(argstr="-exclude %s",), + roi_incl=dict(argstr="-include %s",), + roi_mask=dict(argstr="-mask %s",), + seed_dynamic=dict(argstr="-seed_dynamic %s", extensions=None,), seed_gmwmi=dict( - argstr='-seed_gmwmi %s', - extensions=None, - requires=['act_file'], + argstr="-seed_gmwmi %s", extensions=None, requires=["act_file"], ), seed_grid_voxel=dict( - argstr='-seed_grid_per_voxel %s %d', - xor=['seed_image', 'seed_rnd_voxel'], - ), - seed_image=dict( - argstr='-seed_image %s', - extensions=None, - ), - seed_rejection=dict( - argstr='-seed_rejection %s', - extensions=None, + argstr="-seed_grid_per_voxel %s %d", xor=["seed_image", "seed_rnd_voxel"], ), + seed_image=dict(argstr="-seed_image %s", extensions=None,), + seed_rejection=dict(argstr="-seed_rejection %s", extensions=None,), seed_rnd_voxel=dict( - argstr='-seed_random_per_voxel %s %d', - xor=['seed_image', 'seed_grid_voxel'], - ), - seed_sphere=dict(argstr='-seed_sphere %f,%f,%f,%f', ), - select=dict( - argstr='-select %d', - min_ver='3', - ), - sph_trait=dict(argstr='%f,%f,%f,%f', ), - step_size=dict(argstr='-step %f', ), - stop=dict(argstr='-stop', ), - unidirectional=dict(argstr='-unidirectional', ), - use_rk4=dict(argstr='-rk4', ), + argstr="-seed_random_per_voxel %s %d", + xor=["seed_image", "seed_grid_voxel"], + ), + seed_sphere=dict(argstr="-seed_sphere %f,%f,%f,%f",), + select=dict(argstr="-select %d", min_ver="3",), + sph_trait=dict(argstr="%f,%f,%f,%f",), + step_size=dict(argstr="-step %f",), + stop=dict(argstr="-stop",), + unidirectional=dict(argstr="-unidirectional",), + use_rk4=dict(argstr="-rk4",), ) inputs = Tractography.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Tractography_outputs(): output_map = dict( - out_file=dict(extensions=None, ), - out_seeds=dict(extensions=None, ), + out_file=dict(extensions=None,), out_seeds=dict(extensions=None,), ) outputs = Tractography.output_spec() diff --git a/nipype/interfaces/mrtrix3/tracking.py b/nipype/interfaces/mrtrix3/tracking.py index fb7d8e7375..e71d9cd37a 100644 --- a/nipype/interfaces/mrtrix3/tracking.py +++ b/nipype/interfaces/mrtrix3/tracking.py @@ -10,155 +10,217 @@ class TractographyInputSpec(MRTrix3BaseInputSpec): sph_trait = traits.Tuple( - traits.Float, - traits.Float, - traits.Float, - traits.Float, - argstr='%f,%f,%f,%f') + traits.Float, traits.Float, traits.Float, traits.Float, argstr="%f,%f,%f,%f" + ) in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='input file to be processed') + desc="input file to be processed", + ) out_file = File( - 'tracked.tck', - argstr='%s', + "tracked.tck", + argstr="%s", mandatory=True, position=-1, usedefault=True, - desc='output file containing tracks') + desc="output file containing tracks", + ) algorithm = traits.Enum( - 'iFOD2', - 'FACT', - 'iFOD1', - 'Nulldist', - 'SD_Stream', - 'Tensor_Det', - 'Tensor_Prob', + "iFOD2", + "FACT", + "iFOD1", + "Nulldist", + "SD_Stream", + "Tensor_Det", + "Tensor_Prob", usedefault=True, - argstr='-algorithm %s', - desc='tractography algorithm to be used') + argstr="-algorithm %s", + desc="tractography algorithm to be used", + ) # ROIs processing options roi_incl = traits.Either( File(exists=True), sph_trait, - argstr='-include %s', - desc=('specify an inclusion region of interest, streamlines must' - ' traverse ALL inclusion regions to be accepted')) + argstr="-include %s", + desc=( + "specify an inclusion region of interest, streamlines must" + " traverse ALL inclusion regions to be accepted" + ), + ) roi_excl = traits.Either( File(exists=True), sph_trait, - argstr='-exclude %s', - desc=('specify an exclusion region of interest, streamlines that' - ' enter ANY exclude region will be discarded')) + argstr="-exclude %s", + desc=( + "specify an exclusion region of interest, streamlines that" + " enter ANY exclude region will be discarded" + ), + ) roi_mask = traits.Either( File(exists=True), sph_trait, - argstr='-mask %s', - desc=('specify a masking region of interest. If defined,' - 'streamlines exiting the mask will be truncated')) + argstr="-mask %s", + desc=( + "specify a masking region of interest. If defined," + "streamlines exiting the mask will be truncated" + ), + ) # Streamlines tractography options step_size = traits.Float( - argstr='-step %f', - desc=('set the step size of the algorithm in mm (default is 0.1' - ' x voxelsize; for iFOD2: 0.5 x voxelsize)')) + argstr="-step %f", + desc=( + "set the step size of the algorithm in mm (default is 0.1" + " x voxelsize; for iFOD2: 0.5 x voxelsize)" + ), + ) angle = traits.Float( - argstr='-angle %f', - desc=('set the maximum angle between successive steps (default ' - 'is 90deg x stepsize / voxelsize)')) + argstr="-angle %f", + desc=( + "set the maximum angle between successive steps (default " + "is 90deg x stepsize / voxelsize)" + ), + ) n_tracks = traits.Int( - argstr='-number %d', - max_ver='0.4', - desc=('set the desired number of tracks. The program will continue' - ' to generate tracks until this number of tracks have been ' - 'selected and written to the output file')) + argstr="-number %d", + max_ver="0.4", + desc=( + "set the desired number of tracks. The program will continue" + " to generate tracks until this number of tracks have been " + "selected and written to the output file" + ), + ) select = traits.Int( - argstr='-select %d', - min_ver='3', - desc=('set the desired number of tracks. The program will continue' - ' to generate tracks until this number of tracks have been ' - 'selected and written to the output file')) + argstr="-select %d", + min_ver="3", + desc=( + "set the desired number of tracks. The program will continue" + " to generate tracks until this number of tracks have been " + "selected and written to the output file" + ), + ) max_tracks = traits.Int( - argstr='-maxnum %d', - desc=('set the maximum number of tracks to generate. The program ' - 'will not generate more tracks than this number, even if ' - 'the desired number of tracks hasn\'t yet been reached ' - '(default is 100 x number)')) + argstr="-maxnum %d", + desc=( + "set the maximum number of tracks to generate. The program " + "will not generate more tracks than this number, even if " + "the desired number of tracks hasn't yet been reached " + "(default is 100 x number)" + ), + ) max_length = traits.Float( - argstr='-maxlength %f', - desc=('set the maximum length of any track in mm (default is ' - '100 x voxelsize)')) + argstr="-maxlength %f", + desc=( + "set the maximum length of any track in mm (default is " "100 x voxelsize)" + ), + ) min_length = traits.Float( - argstr='-minlength %f', - desc=('set the minimum length of any track in mm (default is ' - '5 x voxelsize)')) + argstr="-minlength %f", + desc=( + "set the minimum length of any track in mm (default is " "5 x voxelsize)" + ), + ) cutoff = traits.Float( - argstr='-cutoff %f', - desc=('set the FA or FOD amplitude cutoff for terminating ' - 'tracks (default is 0.1)')) + argstr="-cutoff %f", + desc=( + "set the FA or FOD amplitude cutoff for terminating " + "tracks (default is 0.1)" + ), + ) cutoff_init = traits.Float( - argstr='-initcutoff %f', - desc=('set the minimum FA or FOD amplitude for initiating ' - 'tracks (default is the same as the normal cutoff)')) + argstr="-initcutoff %f", + desc=( + "set the minimum FA or FOD amplitude for initiating " + "tracks (default is the same as the normal cutoff)" + ), + ) n_trials = traits.Int( - argstr='-trials %d', - desc=('set the maximum number of sampling trials at each point' - ' (only used for probabilistic tracking)')) + argstr="-trials %d", + desc=( + "set the maximum number of sampling trials at each point" + " (only used for probabilistic tracking)" + ), + ) unidirectional = traits.Bool( - argstr='-unidirectional', - desc=('track from the seed point in one direction only ' - '(default is to track in both directions)')) + argstr="-unidirectional", + desc=( + "track from the seed point in one direction only " + "(default is to track in both directions)" + ), + ) init_dir = traits.Tuple( traits.Float, traits.Float, traits.Float, - argstr='-initdirection %f,%f,%f', - desc=('specify an initial direction for the tracking (this ' - 'should be supplied as a vector of 3 comma-separated values')) + argstr="-initdirection %f,%f,%f", + desc=( + "specify an initial direction for the tracking (this " + "should be supplied as a vector of 3 comma-separated values" + ), + ) noprecompt = traits.Bool( - argstr='-noprecomputed', - desc=('do NOT pre-compute legendre polynomial values. Warning: this ' - 'will slow down the algorithm by a factor of approximately 4')) + argstr="-noprecomputed", + desc=( + "do NOT pre-compute legendre polynomial values. Warning: this " + "will slow down the algorithm by a factor of approximately 4" + ), + ) power = traits.Int( - argstr='-power %d', - desc=('raise the FOD to the power specified (default is 1/nsamples)')) + argstr="-power %d", + desc=("raise the FOD to the power specified (default is 1/nsamples)"), + ) n_samples = traits.Int( - 4, usedefault=True, - argstr='-samples %d', - desc=('set the number of FOD samples to take per step for the 2nd ' - 'order (iFOD2) method')) + 4, + usedefault=True, + argstr="-samples %d", + desc=( + "set the number of FOD samples to take per step for the 2nd " + "order (iFOD2) method" + ), + ) use_rk4 = traits.Bool( - argstr='-rk4', - desc=('use 4th-order Runge-Kutta integration (slower, but eliminates' - ' curvature overshoot in 1st-order deterministic methods)')) + argstr="-rk4", + desc=( + "use 4th-order Runge-Kutta integration (slower, but eliminates" + " curvature overshoot in 1st-order deterministic methods)" + ), + ) stop = traits.Bool( - argstr='-stop', - desc=('stop propagating a streamline once it has traversed all ' - 'include regions')) + argstr="-stop", + desc=( + "stop propagating a streamline once it has traversed all " "include regions" + ), + ) downsample = traits.Float( - argstr='-downsample %f', - desc='downsample the generated streamlines to reduce output file size') + argstr="-downsample %f", + desc="downsample the generated streamlines to reduce output file size", + ) # Anatomically-Constrained Tractography options act_file = File( exists=True, - argstr='-act %s', - desc=('use the Anatomically-Constrained Tractography framework during' - ' tracking; provided image must be in the 5TT ' - '(five - tissue - type) format')) - backtrack = traits.Bool( - argstr='-backtrack', desc='allow tracks to be truncated') + argstr="-act %s", + desc=( + "use the Anatomically-Constrained Tractography framework during" + " tracking; provided image must be in the 5TT " + "(five - tissue - type) format" + ), + ) + backtrack = traits.Bool(argstr="-backtrack", desc="allow tracks to be truncated") crop_at_gmwmi = traits.Bool( - argstr='-crop_at_gmwmi', - desc=('crop streamline endpoints more ' - 'precisely as they cross the GM-WM interface')) + argstr="-crop_at_gmwmi", + desc=( + "crop streamline endpoints more " + "precisely as they cross the GM-WM interface" + ), + ) # Tractography seeding options seed_sphere = traits.Tuple( @@ -166,64 +228,85 @@ class TractographyInputSpec(MRTrix3BaseInputSpec): traits.Float, traits.Float, traits.Float, - argstr='-seed_sphere %f,%f,%f,%f', - desc='spherical seed') + argstr="-seed_sphere %f,%f,%f,%f", + desc="spherical seed", + ) seed_image = File( exists=True, - argstr='-seed_image %s', - desc='seed streamlines entirely at random within mask') + argstr="-seed_image %s", + desc="seed streamlines entirely at random within mask", + ) seed_rnd_voxel = traits.Tuple( File(exists=True), traits.Int(), - argstr='-seed_random_per_voxel %s %d', - xor=['seed_image', 'seed_grid_voxel'], - desc=('seed a fixed number of streamlines per voxel in a mask ' - 'image; random placement of seeds in each voxel')) + argstr="-seed_random_per_voxel %s %d", + xor=["seed_image", "seed_grid_voxel"], + desc=( + "seed a fixed number of streamlines per voxel in a mask " + "image; random placement of seeds in each voxel" + ), + ) seed_grid_voxel = traits.Tuple( File(exists=True), traits.Int(), - argstr='-seed_grid_per_voxel %s %d', - xor=['seed_image', 'seed_rnd_voxel'], - desc=('seed a fixed number of streamlines per voxel in a mask ' - 'image; place seeds on a 3D mesh grid (grid_size argument ' - 'is per axis; so a grid_size of 3 results in 27 seeds per' - ' voxel)')) + argstr="-seed_grid_per_voxel %s %d", + xor=["seed_image", "seed_rnd_voxel"], + desc=( + "seed a fixed number of streamlines per voxel in a mask " + "image; place seeds on a 3D mesh grid (grid_size argument " + "is per axis; so a grid_size of 3 results in 27 seeds per" + " voxel)" + ), + ) seed_rejection = File( exists=True, - argstr='-seed_rejection %s', - desc=('seed from an image using rejection sampling (higher ' - 'values = more probable to seed from')) + argstr="-seed_rejection %s", + desc=( + "seed from an image using rejection sampling (higher " + "values = more probable to seed from" + ), + ) seed_gmwmi = File( exists=True, - argstr='-seed_gmwmi %s', - requires=['act_file'], - desc=('seed from the grey matter - white matter interface (only ' - 'valid if using ACT framework)')) + argstr="-seed_gmwmi %s", + requires=["act_file"], + desc=( + "seed from the grey matter - white matter interface (only " + "valid if using ACT framework)" + ), + ) seed_dynamic = File( exists=True, - argstr='-seed_dynamic %s', - desc=('determine seed points dynamically using the SIFT model ' - '(must not provide any other seeding mechanism). Note that' - ' while this seeding mechanism improves the distribution of' - ' reconstructed streamlines density, it should NOT be used ' - 'as a substitute for the SIFT method itself.')) + argstr="-seed_dynamic %s", + desc=( + "determine seed points dynamically using the SIFT model " + "(must not provide any other seeding mechanism). Note that" + " while this seeding mechanism improves the distribution of" + " reconstructed streamlines density, it should NOT be used " + "as a substitute for the SIFT method itself." + ), + ) max_seed_attempts = traits.Int( - argstr='-max_seed_attempts %d', - desc=('set the maximum number of times that the tracking ' - 'algorithm should attempt to find an appropriate tracking' - ' direction from a given seed point')) + argstr="-max_seed_attempts %d", + desc=( + "set the maximum number of times that the tracking " + "algorithm should attempt to find an appropriate tracking" + " direction from a given seed point" + ), + ) out_seeds = File( - 'out_seeds.nii.gz', usedefault=True, - argstr='-output_seeds %s', - desc=('output the seed location of all successful streamlines to' - ' a file')) + "out_seeds.nii.gz", + usedefault=True, + argstr="-output_seeds %s", + desc=("output the seed location of all successful streamlines to" " a file"), + ) class TractographyOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output filtered tracks') + out_file = File(exists=True, desc="the output filtered tracks") out_seeds = File( - desc=('output the seed location of all successful' - ' streamlines to a file')) + desc=("output the seed location of all successful" " streamlines to a file") + ) class Tractography(MRTrix3Base): @@ -273,18 +356,18 @@ class Tractography(MRTrix3Base): >>> tk.run() # doctest: +SKIP """ - _cmd = 'tckgen' + _cmd = "tckgen" input_spec = TractographyInputSpec output_spec = TractographyOutputSpec def _format_arg(self, name, trait_spec, value): - if 'roi_' in name and isinstance(value, tuple): - value = ['%f' % v for v in value] - return trait_spec.argstr % ','.join(value) + if "roi_" in name and isinstance(value, tuple): + value = ["%f" % v for v in value] + return trait_spec.argstr % ",".join(value) return super(Tractography, self)._format_arg(name, trait_spec, value) def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs diff --git a/nipype/interfaces/mrtrix3/utils.py b/nipype/interfaces/mrtrix3/utils.py index 52f5caab64..d13b5d0ce7 100644 --- a/nipype/interfaces/mrtrix3/utils.py +++ b/nipype/interfaces/mrtrix3/utils.py @@ -4,29 +4,38 @@ import os.path as op -from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec, - File, InputMultiPath, isdefined) +from ..base import ( + CommandLineInputSpec, + CommandLine, + traits, + TraitedSpec, + File, + InputMultiPath, + isdefined, +) from .base import MRTrix3BaseInputSpec, MRTrix3Base class BrainMaskInputSpec(MRTrix3BaseInputSpec): in_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='input diffusion weighted images') + desc="input diffusion weighted images", + ) out_file = File( - 'brainmask.mif', - argstr='%s', + "brainmask.mif", + argstr="%s", mandatory=True, position=-1, usedefault=True, - desc='output brain mask') + desc="output brain mask", + ) class BrainMaskOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output response file') + out_file = File(exists=True, desc="the output response file") class BrainMask(CommandLine): @@ -45,45 +54,45 @@ class BrainMask(CommandLine): >>> bmsk.run() # doctest: +SKIP """ - _cmd = 'dwi2mask' + _cmd = "dwi2mask" input_spec = BrainMaskInputSpec output_spec = BrainMaskOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class Mesh2PVEInputSpec(CommandLineInputSpec): in_file = File( - exists=True, - argstr='%s', - mandatory=True, - position=-3, - desc='input mesh') + exists=True, argstr="%s", mandatory=True, position=-3, desc="input mesh" + ) reference = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=-2, - desc='input reference image') + desc="input reference image", + ) in_first = File( exists=True, - argstr='-first %s', - desc='indicates that the mesh file is provided by FSL FIRST') + argstr="-first %s", + desc="indicates that the mesh file is provided by FSL FIRST", + ) out_file = File( - 'mesh2volume.nii.gz', - argstr='%s', + "mesh2volume.nii.gz", + argstr="%s", mandatory=True, position=-1, usedefault=True, - desc='output file containing SH coefficients') + desc="output file containing SH coefficients", + ) class Mesh2PVEOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='the output response file') + out_file = File(exists=True, desc="the output response file") class Mesh2PVE(CommandLine): @@ -104,37 +113,34 @@ class Mesh2PVE(CommandLine): >>> m2p.run() # doctest: +SKIP """ - _cmd = 'mesh2pve' + _cmd = "mesh2pve" input_spec = Mesh2PVEInputSpec output_spec = Mesh2PVEOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class Generate5ttInputSpec(MRTrix3BaseInputSpec): algorithm = traits.Enum( - 'fsl', - 'gif', - 'freesurfer', - argstr='%s', + "fsl", + "gif", + "freesurfer", + argstr="%s", position=-3, mandatory=True, - desc='tissue segmentation algorithm') + desc="tissue segmentation algorithm", + ) in_file = File( - exists=True, - argstr='%s', - mandatory=True, - position=-2, - desc='input image') - out_file = File( - argstr='%s', mandatory=True, position=-1, desc='output image') + exists=True, argstr="%s", mandatory=True, position=-2, desc="input image" + ) + out_file = File(argstr="%s", mandatory=True, position=-1, desc="output image") class Generate5ttOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='output image') + out_file = File(exists=True, desc="output image") class Generate5tt(MRTrix3Base): @@ -155,56 +161,56 @@ class Generate5tt(MRTrix3Base): >>> gen5tt.run() # doctest: +SKIP """ - _cmd = '5ttgen' + _cmd = "5ttgen" input_spec = Generate5ttInputSpec output_spec = Generate5ttOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class TensorMetricsInputSpec(CommandLineInputSpec): in_file = File( - exists=True, - argstr='%s', - mandatory=True, - position=-1, - desc='input DTI image') - - out_fa = File(argstr='-fa %s', desc='output FA file') - out_adc = File(argstr='-adc %s', desc='output ADC file') - out_evec = File( - argstr='-vector %s', desc='output selected eigenvector(s) file') - out_eval = File( - argstr='-value %s', desc='output selected eigenvalue(s) file') + exists=True, argstr="%s", mandatory=True, position=-1, desc="input DTI image" + ) + + out_fa = File(argstr="-fa %s", desc="output FA file") + out_adc = File(argstr="-adc %s", desc="output ADC file") + out_evec = File(argstr="-vector %s", desc="output selected eigenvector(s) file") + out_eval = File(argstr="-value %s", desc="output selected eigenvalue(s) file") component = traits.List( [1], usedefault=True, - argstr='-num %s', - sep=',', - desc=('specify the desired eigenvalue/eigenvector(s). Note that ' - 'several eigenvalues can be specified as a number sequence')) + argstr="-num %s", + sep=",", + desc=( + "specify the desired eigenvalue/eigenvector(s). Note that " + "several eigenvalues can be specified as a number sequence" + ), + ) in_mask = File( exists=True, - argstr='-mask %s', - desc=('only perform computation within the specified binary' - ' brain mask image')) + argstr="-mask %s", + desc=( + "only perform computation within the specified binary" " brain mask image" + ), + ) modulate = traits.Enum( - 'FA', - 'none', - 'eval', - argstr='-modulate %s', - desc=('how to modulate the magnitude of the' - ' eigenvectors')) + "FA", + "none", + "eval", + argstr="-modulate %s", + desc=("how to modulate the magnitude of the" " eigenvectors"), + ) class TensorMetricsOutputSpec(TraitedSpec): - out_fa = File(desc='output FA file') - out_adc = File(desc='output ADC file') - out_evec = File(desc='output selected eigenvector(s) file') - out_eval = File(desc='output selected eigenvalue(s) file') + out_fa = File(desc="output FA file") + out_adc = File(desc="output ADC file") + out_evec = File(desc="output selected eigenvector(s) file") + out_eval = File(desc="output selected eigenvalue(s) file") class TensorMetrics(CommandLine): @@ -224,7 +230,7 @@ class TensorMetrics(CommandLine): >>> comp.run() # doctest: +SKIP """ - _cmd = 'tensor2metric' + _cmd = "tensor2metric" input_spec = TensorMetricsInputSpec output_spec = TensorMetricsOutputSpec @@ -240,127 +246,130 @@ def _list_outputs(self): class ComputeTDIInputSpec(CommandLineInputSpec): in_file = File( - exists=True, - argstr='%s', - mandatory=True, - position=-2, - desc='input tractography') + exists=True, argstr="%s", mandatory=True, position=-2, desc="input tractography" + ) out_file = File( - 'tdi.mif', - argstr='%s', - usedefault=True, - position=-1, - desc='output TDI file') + "tdi.mif", argstr="%s", usedefault=True, position=-1, desc="output TDI file" + ) reference = File( exists=True, - argstr='-template %s', - desc='a reference' - 'image to be used as template') + argstr="-template %s", + desc="a reference" "image to be used as template", + ) vox_size = traits.List( - traits.Int, argstr='-vox %s', sep=',', desc='voxel dimensions') + traits.Int, argstr="-vox %s", sep=",", desc="voxel dimensions" + ) data_type = traits.Enum( - 'float', - 'unsigned int', - argstr='-datatype %s', - desc='specify output image data type') - use_dec = traits.Bool(argstr='-dec', desc='perform mapping in DEC space') + "float", + "unsigned int", + argstr="-datatype %s", + desc="specify output image data type", + ) + use_dec = traits.Bool(argstr="-dec", desc="perform mapping in DEC space") dixel = File( - argstr='-dixel %s', - desc='map streamlines to' - 'dixels within each voxel. Directions are stored as' - 'azimuth elevation pairs.') + argstr="-dixel %s", + desc="map streamlines to" + "dixels within each voxel. Directions are stored as" + "azimuth elevation pairs.", + ) max_tod = traits.Int( - argstr='-tod %d', - desc='generate a Track Orientation ' - 'Distribution (TOD) in each voxel.') + argstr="-tod %d", + desc="generate a Track Orientation " "Distribution (TOD) in each voxel.", + ) contrast = traits.Enum( - 'tdi', - 'length', - 'invlength', - 'scalar_map', - 'scalar_map_conut', - 'fod_amp', - 'curvature', - argstr='-constrast %s', - desc='define the desired ' - 'form of contrast for the output image') + "tdi", + "length", + "invlength", + "scalar_map", + "scalar_map_conut", + "fod_amp", + "curvature", + argstr="-constrast %s", + desc="define the desired " "form of contrast for the output image", + ) in_map = File( exists=True, - argstr='-image %s', - desc='provide the' - 'scalar image map for generating images with ' - '\'scalar_map\' contrasts, or the SHs image for fod_amp') + argstr="-image %s", + desc="provide the" + "scalar image map for generating images with " + "'scalar_map' contrasts, or the SHs image for fod_amp", + ) stat_vox = traits.Enum( - 'sum', - 'min', - 'mean', - 'max', - argstr='-stat_vox %s', - desc='define the statistic for choosing the final' - 'voxel intesities for a given contrast') + "sum", + "min", + "mean", + "max", + argstr="-stat_vox %s", + desc="define the statistic for choosing the final" + "voxel intesities for a given contrast", + ) stat_tck = traits.Enum( - 'mean', - 'sum', - 'min', - 'max', - 'median', - 'mean_nonzero', - 'gaussian', - 'ends_min', - 'ends_mean', - 'ends_max', - 'ends_prod', - argstr='-stat_tck %s', - desc='define the statistic for choosing ' - 'the contribution to be made by each streamline as a function of' - ' the samples taken along their lengths.') + "mean", + "sum", + "min", + "max", + "median", + "mean_nonzero", + "gaussian", + "ends_min", + "ends_mean", + "ends_max", + "ends_prod", + argstr="-stat_tck %s", + desc="define the statistic for choosing " + "the contribution to be made by each streamline as a function of" + " the samples taken along their lengths.", + ) fwhm_tck = traits.Float( - argstr='-fwhm_tck %f', - desc='define the statistic for choosing the' - ' contribution to be made by each streamline as a function of the ' - 'samples taken along their lengths') + argstr="-fwhm_tck %f", + desc="define the statistic for choosing the" + " contribution to be made by each streamline as a function of the " + "samples taken along their lengths", + ) map_zero = traits.Bool( - argstr='-map_zero', - desc='if a streamline has zero contribution based ' - 'on the contrast & statistic, typically it is not mapped; use this ' - 'option to still contribute to the map even if this is the case ' - '(these non-contributing voxels can then influence the mean value in ' - 'each voxel of the map)') + argstr="-map_zero", + desc="if a streamline has zero contribution based " + "on the contrast & statistic, typically it is not mapped; use this " + "option to still contribute to the map even if this is the case " + "(these non-contributing voxels can then influence the mean value in " + "each voxel of the map)", + ) upsample = traits.Int( - argstr='-upsample %d', - desc='upsample the tracks by' - ' some ratio using Hermite interpolation before ' - 'mappping') + argstr="-upsample %d", + desc="upsample the tracks by" + " some ratio using Hermite interpolation before " + "mappping", + ) precise = traits.Bool( - argstr='-precise', - desc='use a more precise streamline mapping ' - 'strategy, that accurately quantifies the length through each voxel ' - '(these lengths are then taken into account during TWI calculation)') + argstr="-precise", + desc="use a more precise streamline mapping " + "strategy, that accurately quantifies the length through each voxel " + "(these lengths are then taken into account during TWI calculation)", + ) ends_only = traits.Bool( - argstr='-ends_only', - desc='only map the streamline' - ' endpoints to the image') + argstr="-ends_only", desc="only map the streamline" " endpoints to the image" + ) tck_weights = File( exists=True, - argstr='-tck_weights_in %s', - desc='specify' - ' a text scalar file containing the streamline weights') + argstr="-tck_weights_in %s", + desc="specify" " a text scalar file containing the streamline weights", + ) nthreads = traits.Int( - argstr='-nthreads %d', - desc='number of threads. if zero, the number' - ' of available cpus will be used', - nohash=True) + argstr="-nthreads %d", + desc="number of threads. if zero, the number" " of available cpus will be used", + nohash=True, + ) class ComputeTDIOutputSpec(TraitedSpec): - out_file = File(desc='output TDI file') + out_file = File(desc="output TDI file") class ComputeTDI(MRTrix3Base): @@ -417,51 +426,47 @@ class ComputeTDI(MRTrix3Base): >>> tdi.run() # doctest: +SKIP """ - _cmd = 'tckmap' + _cmd = "tckmap" input_spec = ComputeTDIInputSpec output_spec = ComputeTDIOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class TCK2VTKInputSpec(CommandLineInputSpec): in_file = File( - exists=True, - argstr='%s', - mandatory=True, - position=-2, - desc='input tractography') + exists=True, argstr="%s", mandatory=True, position=-2, desc="input tractography" + ) out_file = File( - 'tracks.vtk', - argstr='%s', - usedefault=True, - position=-1, - desc='output VTK file') + "tracks.vtk", argstr="%s", usedefault=True, position=-1, desc="output VTK file" + ) reference = File( exists=True, - argstr='-image %s', - desc='if specified, the properties of' - ' this image will be used to convert track point positions from real ' - '(scanner) coordinates into image coordinates (in mm).') + argstr="-image %s", + desc="if specified, the properties of" + " this image will be used to convert track point positions from real " + "(scanner) coordinates into image coordinates (in mm).", + ) voxel = File( exists=True, - argstr='-image %s', - desc='if specified, the properties of' - ' this image will be used to convert track point positions from real ' - '(scanner) coordinates into image coordinates.') + argstr="-image %s", + desc="if specified, the properties of" + " this image will be used to convert track point positions from real " + "(scanner) coordinates into image coordinates.", + ) nthreads = traits.Int( - argstr='-nthreads %d', - desc='number of threads. if zero, the number' - ' of available cpus will be used', - nohash=True) + argstr="-nthreads %d", + desc="number of threads. if zero, the number" " of available cpus will be used", + nohash=True, + ) class TCK2VTKOutputSpec(TraitedSpec): - out_file = File(desc='output VTK file') + out_file = File(desc="output VTK file") class TCK2VTK(MRTrix3Base): @@ -481,38 +486,36 @@ class TCK2VTK(MRTrix3Base): >>> vtk.run() # doctest: +SKIP """ - _cmd = 'tck2vtk' + _cmd = "tck2vtk" input_spec = TCK2VTKInputSpec output_spec = TCK2VTKOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class DWIExtractInputSpec(MRTrix3BaseInputSpec): in_file = File( - exists=True, - argstr='%s', - mandatory=True, - position=-2, - desc='input image') - out_file = File( - argstr='%s', mandatory=True, position=-1, desc='output image') - bzero = traits.Bool(argstr='-bzero', desc='extract b=0 volumes') - nobzero = traits.Bool(argstr='-no_bzero', desc='extract non b=0 volumes') + exists=True, argstr="%s", mandatory=True, position=-2, desc="input image" + ) + out_file = File(argstr="%s", mandatory=True, position=-1, desc="output image") + bzero = traits.Bool(argstr="-bzero", desc="extract b=0 volumes") + nobzero = traits.Bool(argstr="-no_bzero", desc="extract non b=0 volumes") singleshell = traits.Bool( - argstr='-singleshell', desc='extract volumes with a specific shell') + argstr="-singleshell", desc="extract volumes with a specific shell" + ) shell = traits.List( traits.Float, - sep=',', - argstr='-shell %s', - desc='specify one or more gradient shells') + sep=",", + argstr="-shell %s", + desc="specify one or more gradient shells", + ) class DWIExtractOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='output image') + out_file = File(exists=True, desc="output image") class DWIExtract(MRTrix3Base): @@ -534,54 +537,53 @@ class DWIExtract(MRTrix3Base): >>> dwiextract.run() # doctest: +SKIP """ - _cmd = 'dwiextract' + _cmd = "dwiextract" input_spec = DWIExtractInputSpec output_spec = DWIExtractOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class MRConvertInputSpec(MRTrix3BaseInputSpec): in_file = File( - exists=True, - argstr='%s', - mandatory=True, - position=-2, - desc='input image') + exists=True, argstr="%s", mandatory=True, position=-2, desc="input image" + ) out_file = File( - 'dwi.mif', - argstr='%s', + "dwi.mif", + argstr="%s", mandatory=True, position=-1, usedefault=True, - desc='output image') + desc="output image", + ) coord = traits.List( traits.Float, - sep=' ', - argstr='-coord %s', - desc='extract data at the specified coordinates') + sep=" ", + argstr="-coord %s", + desc="extract data at the specified coordinates", + ) vox = traits.List( - traits.Float, - sep=',', - argstr='-vox %s', - desc='change the voxel dimensions') + traits.Float, sep=",", argstr="-vox %s", desc="change the voxel dimensions" + ) axes = traits.List( traits.Int, - sep=',', - argstr='-axes %s', - desc='specify the axes that will be used') + sep=",", + argstr="-axes %s", + desc="specify the axes that will be used", + ) scaling = traits.List( traits.Float, - sep=',', - argstr='-scaling %s', - desc='specify the data scaling parameter') + sep=",", + argstr="-scaling %s", + desc="specify the data scaling parameter", + ) class MRConvertOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='output image') + out_file = File(exists=True, desc="output image") class MRConvert(MRTrix3Base): @@ -601,50 +603,46 @@ class MRConvert(MRTrix3Base): >>> mrconvert.run() # doctest: +SKIP """ - _cmd = 'mrconvert' + _cmd = "mrconvert" input_spec = MRConvertInputSpec output_spec = MRConvertOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class MRMathInputSpec(MRTrix3BaseInputSpec): in_file = File( - exists=True, - argstr='%s', - mandatory=True, - position=-3, - desc='input image') - out_file = File( - argstr='%s', mandatory=True, position=-1, desc='output image') + exists=True, argstr="%s", mandatory=True, position=-3, desc="input image" + ) + out_file = File(argstr="%s", mandatory=True, position=-1, desc="output image") operation = traits.Enum( - 'mean', - 'median', - 'sum', - 'product', - 'rms', - 'norm', - 'var', - 'std', - 'min', - 'max', - 'absmax', - 'magmax', - argstr='%s', + "mean", + "median", + "sum", + "product", + "rms", + "norm", + "var", + "std", + "min", + "max", + "absmax", + "magmax", + argstr="%s", position=-2, mandatory=True, - desc='operation to computer along a specified axis') + desc="operation to computer along a specified axis", + ) axis = traits.Int( - 0, - argstr='-axis %d', - desc='specfied axis to perform the operation along') + 0, argstr="-axis %d", desc="specfied axis to perform the operation along" + ) class MRMathOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='output image') + out_file = File(exists=True, desc="output image") class MRMath(MRTrix3Base): @@ -667,67 +665,63 @@ class MRMath(MRTrix3Base): >>> mrmath.run() # doctest: +SKIP """ - _cmd = 'mrmath' + _cmd = "mrmath" input_spec = MRMathInputSpec output_spec = MRMathOutputSpec def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = op.abspath(self.inputs.out_file) + outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class MRResizeInputSpec(MRTrix3BaseInputSpec): in_file = File( - exists=True, - argstr='%s', - position=-2, - mandatory=True, - desc='input DWI image' + exists=True, argstr="%s", position=-2, mandatory=True, desc="input DWI image" ) image_size = traits.Tuple( (traits.Int, traits.Int, traits.Int), - argstr='-size %d,%d,%d', + argstr="-size %d,%d,%d", mandatory=True, - desc='Number of voxels in each dimension of output image', - xor=['voxel_size', 'scale_factor'], + desc="Number of voxels in each dimension of output image", + xor=["voxel_size", "scale_factor"], ) voxel_size = traits.Tuple( (traits.Float, traits.Float, traits.Float), - argstr='-voxel %g,%g,%g', + argstr="-voxel %g,%g,%g", mandatory=True, - desc='Desired voxel size in mm for the output image', - xor=['image_size', 'scale_factor'], + desc="Desired voxel size in mm for the output image", + xor=["image_size", "scale_factor"], ) scale_factor = traits.Tuple( (traits.Float, traits.Float, traits.Float), - argstr='-scale %g,%g,%g', + argstr="-scale %g,%g,%g", mandatory=True, - desc='Scale factors to rescale the image by in each dimension', - xor=['image_size', 'voxel_size'], + desc="Scale factors to rescale the image by in each dimension", + xor=["image_size", "voxel_size"], ) interpolation = traits.Enum( - 'cubic', - 'nearest', - 'linear', - 'sinc', - argstr='-interp %s', + "cubic", + "nearest", + "linear", + "sinc", + argstr="-interp %s", usedefault=True, - desc='set the interpolation method to use when resizing (choices: ' - 'nearest, linear, cubic, sinc. Default: cubic).', + desc="set the interpolation method to use when resizing (choices: " + "nearest, linear, cubic, sinc. Default: cubic).", ) out_file = File( - argstr='%s', - name_template='%s_resized', - name_source=['in_file'], + argstr="%s", + name_template="%s_resized", + name_source=["in_file"], keep_extension=True, position=-1, - desc='the output resized DWI image', + desc="the output resized DWI image", ) class MRResizeOutputSpec(TraitedSpec): - out_file = File(desc='the output resized DWI image', exists=True) + out_file = File(desc="the output resized DWI image", exists=True) class MRResize(MRTrix3Base): @@ -768,6 +762,6 @@ class MRResize(MRTrix3Base): >>> scale_resize.run() # doctest: +SKIP """ - _cmd = 'mrresize' + _cmd = "mrresize" input_spec = MRResizeInputSpec output_spec = MRResizeOutputSpec diff --git a/nipype/interfaces/niftyfit/asl.py b/nipype/interfaces/niftyfit/asl.py index 0d13880e1e..5c23769e8f 100644 --- a/nipype/interfaces/niftyfit/asl.py +++ b/nipype/interfaces/niftyfit/asl.py @@ -11,79 +11,78 @@ class FitAslInputSpec(CommandLineInputSpec): """ Input Spec for FitAsl. """ - desc = 'Filename of the 4D ASL (control/label) source image (mandatory).' + + desc = "Filename of the 4D ASL (control/label) source image (mandatory)." source_file = File( - position=1, - exists=True, - argstr='-source %s', - mandatory=True, - desc=desc) - pasl = traits.Bool(desc='Fit PASL ASL data [default]', argstr='-pasl') - pcasl = traits.Bool(desc='Fit PCASL ASL data', argstr='-pcasl') + position=1, exists=True, argstr="-source %s", mandatory=True, desc=desc + ) + pasl = traits.Bool(desc="Fit PASL ASL data [default]", argstr="-pasl") + pcasl = traits.Bool(desc="Fit PCASL ASL data", argstr="-pcasl") # *** Output options: - desc = 'Filename of the Cerebral Blood Flow map (in ml/100g/min).' + desc = "Filename of the Cerebral Blood Flow map (in ml/100g/min)." cbf_file = File( - name_source=['source_file'], - name_template='%s_cbf.nii.gz', - argstr='-cbf %s', - desc=desc) + name_source=["source_file"], + name_template="%s_cbf.nii.gz", + argstr="-cbf %s", + desc=desc, + ) error_file = File( - name_source=['source_file'], - name_template='%s_error.nii.gz', - argstr='-error %s', - desc='Filename of the CBF error map.') + name_source=["source_file"], + name_template="%s_error.nii.gz", + argstr="-error %s", + desc="Filename of the CBF error map.", + ) syn_file = File( - name_source=['source_file'], - name_template='%s_syn.nii.gz', - argstr='-syn %s', - desc='Filename of the synthetic ASL data.') + name_source=["source_file"], + name_template="%s_syn.nii.gz", + argstr="-syn %s", + desc="Filename of the synthetic ASL data.", + ) # *** Input options (see also fit_qt1 for generic T1 fitting): - desc = 'Filename of the estimated input T1 map (in ms).' - t1map = File(exists=True, argstr='-t1map %s', desc=desc) - desc = 'Filename of the estimated input M0 map.' - m0map = File(exists=True, argstr='-m0map %s', desc=desc) - desc = 'Filename of the estimated input M0 map error.' - m0mape = File(exists=True, argstr='-m0mape %s', desc=desc) - desc = 'Filename of a [1,2,5]s Inversion Recovery volume (T1/M0 fitting \ -carried out internally).' - - ir_volume = File(exists=True, argstr='-IRvolume %s', desc=desc) - desc = 'Output of [1,2,5]s Inversion Recovery fitting.' - ir_output = File(exists=True, argstr='-IRoutput %s', desc=desc) + desc = "Filename of the estimated input T1 map (in ms)." + t1map = File(exists=True, argstr="-t1map %s", desc=desc) + desc = "Filename of the estimated input M0 map." + m0map = File(exists=True, argstr="-m0map %s", desc=desc) + desc = "Filename of the estimated input M0 map error." + m0mape = File(exists=True, argstr="-m0mape %s", desc=desc) + desc = "Filename of a [1,2,5]s Inversion Recovery volume (T1/M0 fitting \ +carried out internally)." + + ir_volume = File(exists=True, argstr="-IRvolume %s", desc=desc) + desc = "Output of [1,2,5]s Inversion Recovery fitting." + ir_output = File(exists=True, argstr="-IRoutput %s", desc=desc) # *** Experimental options (Choose those suitable for the model!): mask = File( - position=2, - exists=True, - desc='Filename of image mask.', - argstr='-mask %s') + position=2, exists=True, desc="Filename of image mask.", argstr="-mask %s" + ) t1_art_cmp = traits.Float( - desc='T1 of arterial component [1650ms].', argstr='-T1a %f') - desc = 'Single plasma/tissue partition coefficient [0.9ml/g].' - plasma_coeff = traits.Float(desc=desc, argstr='-L %f') - desc = 'Labelling efficiency [0.99 (pasl), 0.85 (pcasl)], ensure any \ -background suppression pulses are included in -eff' + desc="T1 of arterial component [1650ms].", argstr="-T1a %f" + ) + desc = "Single plasma/tissue partition coefficient [0.9ml/g]." + plasma_coeff = traits.Float(desc=desc, argstr="-L %f") + desc = "Labelling efficiency [0.99 (pasl), 0.85 (pcasl)], ensure any \ +background suppression pulses are included in -eff" - eff = traits.Float(desc=desc, argstr='-eff %f') - desc = 'Outlier rejection for multi CL volumes (enter z-score threshold \ -(e.g. 2.5)) [off].' + eff = traits.Float(desc=desc, argstr="-eff %f") + desc = "Outlier rejection for multi CL volumes (enter z-score threshold \ +(e.g. 2.5)) [off]." - out = traits.Float(desc=desc, argstr='-out %f') + out = traits.Float(desc=desc, argstr="-out %f") # *** PCASL options (Choose those suitable for the model!): - pld = traits.Float(desc='Post Labelling Delay [2000ms].', argstr='-PLD %f') - ldd = traits.Float(desc='Labelling Duration [1800ms].', argstr='-LDD %f') - desc = 'Difference in labelling delay per slice [0.0 ms/slice.' - dpld = traits.Float(desc=desc, argstr='-dPLD %f') + pld = traits.Float(desc="Post Labelling Delay [2000ms].", argstr="-PLD %f") + ldd = traits.Float(desc="Labelling Duration [1800ms].", argstr="-LDD %f") + desc = "Difference in labelling delay per slice [0.0 ms/slice." + dpld = traits.Float(desc=desc, argstr="-dPLD %f") # *** PASL options (Choose those suitable for the model!): - t_inv1 = traits.Float( - desc='Saturation pulse time [800ms].', argstr='-Tinv1 %f') - t_inv2 = traits.Float(desc='Inversion time [2000ms].', argstr='-Tinv2 %f') - desc = 'Difference in inversion time per slice [0ms/slice].' - dt_inv2 = traits.Float(desc=desc, argstr='-dTinv2 %f') + t_inv1 = traits.Float(desc="Saturation pulse time [800ms].", argstr="-Tinv1 %f") + t_inv2 = traits.Float(desc="Inversion time [2000ms].", argstr="-Tinv2 %f") + desc = "Difference in inversion time per slice [0ms/slice]." + dt_inv2 = traits.Float(desc=desc, argstr="-dTinv2 %f") # *** Other experimental assumptions: @@ -91,50 +90,52 @@ class FitAslInputSpec(CommandLineInputSpec): # desc = 'Slope and intercept for Arterial Transit Time.' # ATT = traits.Float(desc=desc, argstr='-ATT %f') - gm_t1 = traits.Float(desc='T1 of GM [1150ms].', argstr='-gmT1 %f') + gm_t1 = traits.Float(desc="T1 of GM [1150ms].", argstr="-gmT1 %f") gm_plasma = traits.Float( - desc='Plasma/GM water partition [0.95ml/g].', argstr='-gmL %f') - gm_ttt = traits.Float(desc='Time to GM [ATT+0ms].', argstr='-gmTTT %f') - wm_t1 = traits.Float(desc='T1 of WM [800ms].', argstr='-wmT1 %f') + desc="Plasma/GM water partition [0.95ml/g].", argstr="-gmL %f" + ) + gm_ttt = traits.Float(desc="Time to GM [ATT+0ms].", argstr="-gmTTT %f") + wm_t1 = traits.Float(desc="T1 of WM [800ms].", argstr="-wmT1 %f") wm_plasma = traits.Float( - desc='Plasma/WM water partition [0.82ml/g].', argstr='-wmL %f') - wm_ttt = traits.Float(desc='Time to WM [ATT+0ms].', argstr='-wmTTT %f') + desc="Plasma/WM water partition [0.82ml/g].", argstr="-wmL %f" + ) + wm_ttt = traits.Float(desc="Time to WM [ATT+0ms].", argstr="-wmTTT %f") # *** Segmentation options: - desc = 'Filename of the 4D segmentation (in ASL space) for L/T1 \ -estimation and PV correction {WM,GM,CSF}.' + desc = "Filename of the 4D segmentation (in ASL space) for L/T1 \ +estimation and PV correction {WM,GM,CSF}." - seg = File(exists=True, argstr='-seg %s', desc=desc) - desc = 'Use sigmoid to estimate L from T1: L(T1|gmL,wmL) [Off].' - sig = traits.Bool(desc=desc, argstr='-sig') - desc = 'Simple PV correction (CBF=vg*CBFg + vw*CBFw, with CBFw=f*CBFg) \ -[0.25].' + seg = File(exists=True, argstr="-seg %s", desc=desc) + desc = "Use sigmoid to estimate L from T1: L(T1|gmL,wmL) [Off]." + sig = traits.Bool(desc=desc, argstr="-sig") + desc = "Simple PV correction (CBF=vg*CBFg + vw*CBFw, with CBFw=f*CBFg) \ +[0.25]." - pv0 = traits.Int(desc=desc, argstr='-pv0 %d') - pv2 = traits.Int(desc='In plane PV kernel size [3x3].', argstr='-pv2 %d') + pv0 = traits.Int(desc=desc, argstr="-pv0 %d") + pv2 = traits.Int(desc="In plane PV kernel size [3x3].", argstr="-pv2 %d") pv3 = traits.Tuple( traits.Int, traits.Int, traits.Int, - desc='3D kernel size [3x3x1].', - argstr='-pv3 %d %d %d') - desc = 'Multiply CBF by this value (e.g. if CL are mislabelled use -1.0).' - mul = traits.Float(desc=desc, argstr='-mul %f') - mulgm = traits.Bool( - desc='Multiply CBF by segmentation [Off].', argstr='-sig') - desc = 'Set PV threshold for switching off LSQR [O.05].' - pv_threshold = traits.Bool(desc=desc, argstr='-pvthreshold') - segstyle = traits.Bool( - desc='Set CBF as [gm,wm] not [wm,gm].', argstr='-segstyle') + desc="3D kernel size [3x3x1].", + argstr="-pv3 %d %d %d", + ) + desc = "Multiply CBF by this value (e.g. if CL are mislabelled use -1.0)." + mul = traits.Float(desc=desc, argstr="-mul %f") + mulgm = traits.Bool(desc="Multiply CBF by segmentation [Off].", argstr="-sig") + desc = "Set PV threshold for switching off LSQR [O.05]." + pv_threshold = traits.Bool(desc=desc, argstr="-pvthreshold") + segstyle = traits.Bool(desc="Set CBF as [gm,wm] not [wm,gm].", argstr="-segstyle") class FitAslOutputSpec(TraitedSpec): """ Output Spec for FitAsl. """ - desc = 'Filename of the Cerebral Blood Flow map (in ml/100g/min).' + + desc = "Filename of the Cerebral Blood Flow map (in ml/100g/min)." cbf_file = File(exists=True, desc=desc) - desc = 'Filename of the CBF error map.' + desc = "Filename of the CBF error map." error_file = File(exists=True, desc=desc) - desc = 'Filename of the synthetic ASL data.' + desc = "Filename of the synthetic ASL data." syn_file = File(exists=True, desc=desc) @@ -158,7 +159,8 @@ class FitAsl(NiftyFitCommand): -syn asl_syn.nii.gz' """ - _cmd = get_custom_path('fit_asl', env_dir='NIFTYFITDIR') + + _cmd = get_custom_path("fit_asl", env_dir="NIFTYFITDIR") input_spec = FitAslInputSpec output_spec = FitAslOutputSpec - _suffix = '_fit_asl' + _suffix = "_fit_asl" diff --git a/nipype/interfaces/niftyfit/base.py b/nipype/interfaces/niftyfit/base.py index 87c1d63825..7af72cd30d 100644 --- a/nipype/interfaces/niftyfit/base.py +++ b/nipype/interfaces/niftyfit/base.py @@ -27,16 +27,17 @@ class NiftyFitCommand(CommandLine): """ Base support interface for NiftyFit commands. """ - _suffix = '_nf' + + _suffix = "_nf" def __init__(self, **inputs): """ Init method calling super. No version to be checked.""" super(NiftyFitCommand, self).__init__(**inputs) def _gen_fname(self, basename, out_dir=None, suffix=None, ext=None): - if basename == '': - msg = 'Unable to generate filename for command %s. ' % self.cmd - msg += 'basename is not set!' + if basename == "": + msg = "Unable to generate filename for command %s. " % self.cmd + msg += "basename is not set!" raise ValueError(msg) _, final_bn, final_ext = split_filename(basename) if out_dir is None: @@ -44,5 +45,5 @@ def _gen_fname(self, basename, out_dir=None, suffix=None, ext=None): if ext is not None: final_ext = ext if suffix is not None: - final_bn = ''.join((final_bn, suffix)) + final_bn = "".join((final_bn, suffix)) return os.path.abspath(os.path.join(out_dir, final_bn + final_ext)) diff --git a/nipype/interfaces/niftyfit/dwi.py b/nipype/interfaces/niftyfit/dwi.py index 900a558fa1..1cfc8826d8 100644 --- a/nipype/interfaces/niftyfit/dwi.py +++ b/nipype/interfaces/niftyfit/dwi.py @@ -11,247 +11,293 @@ class FitDwiInputSpec(CommandLineInputSpec): """ Input Spec for FitDwi. """ + # Inputs options source_file = File( position=1, exists=True, - argstr='-source %s', + argstr="-source %s", mandatory=True, - desc='The source image containing the dwi data.') - desc = 'The file containing the bvalues of the source DWI.' + desc="The source image containing the dwi data.", + ) + desc = "The file containing the bvalues of the source DWI." bval_file = File( - position=2, exists=True, argstr='-bval %s', mandatory=True, desc=desc) - desc = 'The file containing the bvectors of the source DWI.' + position=2, exists=True, argstr="-bval %s", mandatory=True, desc=desc + ) + desc = "The file containing the bvectors of the source DWI." bvec_file = File( - position=3, exists=True, argstr='-bvec %s', mandatory=True, desc=desc) + position=3, exists=True, argstr="-bvec %s", mandatory=True, desc=desc + ) te_file = File( - exists=True, - argstr='-TE %s', - desc='Filename of TEs (ms).', - xor=['te_file']) + exists=True, argstr="-TE %s", desc="Filename of TEs (ms).", xor=["te_file"] + ) te_value = File( - exists=True, - argstr='-TE %s', - desc='Value of TEs (ms).', - xor=['te_file']) - mask_file = File( - exists=True, desc='The image mask', argstr='-mask %s') - desc = 'Filename of parameter priors for -ball and -nod.' - prior_file = File(exists=True, argstr='-prior %s', desc=desc) - desc = 'Rotate the output tensors according to the q/s form of the image \ -(resulting tensors will be in mm coordinates, default: 0).' - - rot_sform_flag = traits.Int(desc=desc, argstr='-rotsform %d') + exists=True, argstr="-TE %s", desc="Value of TEs (ms).", xor=["te_file"] + ) + mask_file = File(exists=True, desc="The image mask", argstr="-mask %s") + desc = "Filename of parameter priors for -ball and -nod." + prior_file = File(exists=True, argstr="-prior %s", desc=desc) + desc = "Rotate the output tensors according to the q/s form of the image \ +(resulting tensors will be in mm coordinates, default: 0)." + + rot_sform_flag = traits.Int(desc=desc, argstr="-rotsform %d") # generic output options: error_file = File( - name_source=['source_file'], - name_template='%s_error.nii.gz', - desc='Filename of parameter error maps.', - argstr='-error %s') + name_source=["source_file"], + name_template="%s_error.nii.gz", + desc="Filename of parameter error maps.", + argstr="-error %s", + ) res_file = File( - name_source=['source_file'], - name_template='%s_resmap.nii.gz', - desc='Filename of model residual map.', - argstr='-res %s') + name_source=["source_file"], + name_template="%s_resmap.nii.gz", + desc="Filename of model residual map.", + argstr="-res %s", + ) syn_file = File( - name_source=['source_file'], - name_template='%s_syn.nii.gz', - desc='Filename of synthetic image.', - argstr='-syn %s') + name_source=["source_file"], + name_template="%s_syn.nii.gz", + desc="Filename of synthetic image.", + argstr="-syn %s", + ) nodiff_file = File( - name_source=['source_file'], - name_template='%s_no_diff.nii.gz', - desc='Filename of average no diffusion image.', - argstr='-nodiff %s') + name_source=["source_file"], + name_template="%s_no_diff.nii.gz", + desc="Filename of average no diffusion image.", + argstr="-nodiff %s", + ) # Output options, with templated output names based on the source image mcmap_file = File( - name_source=['source_file'], - name_template='%s_mcmap.nii.gz', - desc='Filename of multi-compartment model parameter map ' - '(-ivim,-ball,-nod)', - argstr='-mcmap %s', - requires=['nodv_flag']) + name_source=["source_file"], + name_template="%s_mcmap.nii.gz", + desc="Filename of multi-compartment model parameter map " "(-ivim,-ball,-nod)", + argstr="-mcmap %s", + requires=["nodv_flag"], + ) # Model Specific Output options: mdmap_file = File( - name_source=['source_file'], - name_template='%s_mdmap.nii.gz', - desc='Filename of MD map/ADC', - argstr='-mdmap %s') + name_source=["source_file"], + name_template="%s_mdmap.nii.gz", + desc="Filename of MD map/ADC", + argstr="-mdmap %s", + ) famap_file = File( - name_source=['source_file'], - name_template='%s_famap.nii.gz', - desc='Filename of FA map', - argstr='-famap %s') + name_source=["source_file"], + name_template="%s_famap.nii.gz", + desc="Filename of FA map", + argstr="-famap %s", + ) v1map_file = File( - name_source=['source_file'], - name_template='%s_v1map.nii.gz', - desc='Filename of PDD map [x,y,z]', - argstr='-v1map %s') + name_source=["source_file"], + name_template="%s_v1map.nii.gz", + desc="Filename of PDD map [x,y,z]", + argstr="-v1map %s", + ) rgbmap_file = File( - name_source=['source_file'], - name_template='%s_rgbmap.nii.gz', - desc='Filename of colour-coded FA map', - argstr='-rgbmap %s', - requires=['dti_flag']) + name_source=["source_file"], + name_template="%s_rgbmap.nii.gz", + desc="Filename of colour-coded FA map", + argstr="-rgbmap %s", + requires=["dti_flag"], + ) - desc = 'Use lower triangular (tenmap2) or diagonal, off-diagonal tensor \ -format' + desc = "Use lower triangular (tenmap2) or diagonal, off-diagonal tensor \ +format" - ten_type = traits.Enum( - 'lower-tri', 'diag-off-diag', desc=desc, usedefault=True) + ten_type = traits.Enum("lower-tri", "diag-off-diag", desc=desc, usedefault=True) tenmap_file = File( - name_source=['source_file'], - name_template='%s_tenmap.nii.gz', - desc='Filename of tensor map [diag,offdiag].', - argstr='-tenmap %s', - requires=['dti_flag']) + name_source=["source_file"], + name_template="%s_tenmap.nii.gz", + desc="Filename of tensor map [diag,offdiag].", + argstr="-tenmap %s", + requires=["dti_flag"], + ) tenmap2_file = File( - name_source=['source_file'], - name_template='%s_tenmap2.nii.gz', - desc='Filename of tensor map [lower tri]', - argstr='-tenmap2 %s', - requires=['dti_flag']) + name_source=["source_file"], + name_template="%s_tenmap2.nii.gz", + desc="Filename of tensor map [lower tri]", + argstr="-tenmap2 %s", + requires=["dti_flag"], + ) # Methods options - desc = 'Fit single exponential to non-directional data [default with \ -no b-vectors]' + desc = "Fit single exponential to non-directional data [default with \ +no b-vectors]" mono_flag = traits.Bool( desc=desc, - argstr='-mono', + argstr="-mono", position=4, xor=[ - 'ivim_flag', 'dti_flag', 'ball_flag', 'ballv_flag', 'nod_flag', - 'nodv_flag' - ]) + "ivim_flag", + "dti_flag", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", + ], + ) ivim_flag = traits.Bool( - desc='Fit IVIM model to non-directional data.', - argstr='-ivim', + desc="Fit IVIM model to non-directional data.", + argstr="-ivim", position=4, xor=[ - 'mono_flag', 'dti_flag', 'ball_flag', 'ballv_flag', 'nod_flag', - 'nodv_flag' - ]) - desc = 'Fit the tensor model [default with b-vectors].' + "mono_flag", + "dti_flag", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", + ], + ) + desc = "Fit the tensor model [default with b-vectors]." dti_flag = traits.Bool( desc=desc, - argstr='-dti', + argstr="-dti", position=4, xor=[ - 'mono_flag', 'ivim_flag', 'ball_flag', 'ballv_flag', 'nod_flag', - 'nodv_flag' - ]) + "mono_flag", + "ivim_flag", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", + ], + ) ball_flag = traits.Bool( - desc='Fit the ball and stick model.', - argstr='-ball', + desc="Fit the ball and stick model.", + argstr="-ball", position=4, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'ballv_flag', 'nod_flag', - 'nodv_flag' - ]) - desc = 'Fit the ball and stick model with optimised PDD.' + "mono_flag", + "ivim_flag", + "dti_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", + ], + ) + desc = "Fit the ball and stick model with optimised PDD." ballv_flag = traits.Bool( desc=desc, - argstr='-ballv', + argstr="-ballv", position=4, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'ball_flag', 'nod_flag', - 'nodv_flag' - ]) + "mono_flag", + "ivim_flag", + "dti_flag", + "ball_flag", + "nod_flag", + "nodv_flag", + ], + ) nod_flag = traits.Bool( - desc='Fit the NODDI model', - argstr='-nod', + desc="Fit the NODDI model", + argstr="-nod", position=4, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'ball_flag', 'ballv_flag', - 'nodv_flag' - ]) + "mono_flag", + "ivim_flag", + "dti_flag", + "ball_flag", + "ballv_flag", + "nodv_flag", + ], + ) nodv_flag = traits.Bool( - desc='Fit the NODDI model with optimised PDD', - argstr='-nodv', + desc="Fit the NODDI model with optimised PDD", + argstr="-nodv", position=4, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'ball_flag', 'ballv_flag', - 'nod_flag' - ]) + "mono_flag", + "ivim_flag", + "dti_flag", + "ball_flag", + "ballv_flag", + "nod_flag", + ], + ) # Experimental options - desc = 'Maximum number of non-linear LSQR iterations [100x2 passes])' - maxit_val = traits.Int(desc=desc, argstr='-maxit %d', requires=['gn_flag']) - desc = 'LM parameters (initial value, decrease rate) [100,1.2].' + desc = "Maximum number of non-linear LSQR iterations [100x2 passes])" + maxit_val = traits.Int(desc=desc, argstr="-maxit %d", requires=["gn_flag"]) + desc = "LM parameters (initial value, decrease rate) [100,1.2]." lm_vals = traits.Tuple( - traits.Float, - traits.Float, - argstr='-lm %f %f', - requires=['gn_flag'], - desc=desc) - desc = 'Use Gauss-Newton algorithm [Levenberg-Marquardt].' - gn_flag = traits.Bool(desc=desc, argstr='-gn', xor=['wls_flag']) - desc = 'Use Variational Bayes fitting with known prior (currently \ -identity covariance...).' - - vb_flag = traits.Bool(desc=desc, argstr='-vb') + traits.Float, traits.Float, argstr="-lm %f %f", requires=["gn_flag"], desc=desc + ) + desc = "Use Gauss-Newton algorithm [Levenberg-Marquardt]." + gn_flag = traits.Bool(desc=desc, argstr="-gn", xor=["wls_flag"]) + desc = "Use Variational Bayes fitting with known prior (currently \ +identity covariance...)." + + vb_flag = traits.Bool(desc=desc, argstr="-vb") cov_file = File( exists=True, - desc='Filename of ithe nc*nc covariance matrix [I]', - argstr='-cov %s') - wls_flag = traits.Bool(desc=desc, argstr='-wls', xor=['gn_flag']) - desc = 'Use location-weighted least squares for DTI fitting [3x3 Gaussian]' - swls_val = traits.Float(desc=desc, argstr='-swls %f') - slice_no = traits.Int( - desc='Fit to single slice number.', argstr='-slice %d') + desc="Filename of ithe nc*nc covariance matrix [I]", + argstr="-cov %s", + ) + wls_flag = traits.Bool(desc=desc, argstr="-wls", xor=["gn_flag"]) + desc = "Use location-weighted least squares for DTI fitting [3x3 Gaussian]" + swls_val = traits.Float(desc=desc, argstr="-swls %f") + slice_no = traits.Int(desc="Fit to single slice number.", argstr="-slice %d") voxel = traits.Tuple( traits.Int, traits.Int, traits.Int, - desc='Fit to single voxel only.', - argstr='-voxel %d %d %d') + desc="Fit to single voxel only.", + argstr="-voxel %d %d %d", + ) diso_val = traits.Float( - desc='Isotropic diffusivity for -nod [3e-3]', argstr='-diso %f') + desc="Isotropic diffusivity for -nod [3e-3]", argstr="-diso %f" + ) dpr_val = traits.Float( - desc='Parallel diffusivity for -nod [1.7e-3].', argstr='-dpr %f') - wm_t2_val = traits.Float( - desc='White matter T2 value [80ms].', argstr='-wmT2 %f') - csf_t2_val = traits.Float(desc='CSF T2 value [400ms].', argstr='-csfT2 %f') - desc = 'Threshold for perfusion/diffsuion effects [100].' - perf_thr = traits.Float(desc=desc, argstr='-perfthreshold %f') + desc="Parallel diffusivity for -nod [1.7e-3].", argstr="-dpr %f" + ) + wm_t2_val = traits.Float(desc="White matter T2 value [80ms].", argstr="-wmT2 %f") + csf_t2_val = traits.Float(desc="CSF T2 value [400ms].", argstr="-csfT2 %f") + desc = "Threshold for perfusion/diffsuion effects [100]." + perf_thr = traits.Float(desc=desc, argstr="-perfthreshold %f") # MCMC options: mcout = File( - name_source=['source_file'], - name_template='%s_mcout.txt', - desc='Filename of mc samples (ascii text file)', - argstr='-mcout %s') + name_source=["source_file"], + name_template="%s_mcout.txt", + desc="Filename of mc samples (ascii text file)", + argstr="-mcout %s", + ) mcsamples = traits.Int( - desc='Number of samples to keep [100].', argstr='-mcsamples %d') + desc="Number of samples to keep [100].", argstr="-mcsamples %d" + ) mcmaxit = traits.Int( - desc='Number of iterations to run [10,000].', argstr='-mcmaxit %d') + desc="Number of iterations to run [10,000].", argstr="-mcmaxit %d" + ) acceptance = traits.Float( - desc='Fraction of iterations to accept [0.23].', - argstr='-accpetance %f') + desc="Fraction of iterations to accept [0.23].", argstr="-accpetance %f" + ) class FitDwiOutputSpec(TraitedSpec): """ Output Spec for FitDwi. """ - error_file = File(desc='Filename of parameter error maps') - res_file = File(desc='Filename of model residual map') - syn_file = File(desc='Filename of synthetic image') - nodiff_file = File(desc='Filename of average no diffusion image.') - mdmap_file = File(desc='Filename of MD map/ADC') - famap_file = File(desc='Filename of FA map') - v1map_file = File(desc='Filename of PDD map [x,y,z]') - rgbmap_file = File(desc='Filename of colour FA map') - tenmap_file = File(desc='Filename of tensor map') - tenmap2_file = File(desc='Filename of tensor map [lower tri]') + error_file = File(desc="Filename of parameter error maps") + res_file = File(desc="Filename of model residual map") + syn_file = File(desc="Filename of synthetic image") + nodiff_file = File(desc="Filename of average no diffusion image.") + mdmap_file = File(desc="Filename of MD map/ADC") + famap_file = File(desc="Filename of FA map") + v1map_file = File(desc="Filename of PDD map [x,y,z]") + rgbmap_file = File(desc="Filename of colour FA map") + tenmap_file = File(desc="Filename of tensor map") + tenmap2_file = File(desc="Filename of tensor map [lower tri]") - mcmap_file = File(desc='Filename of multi-compartment model ' - 'parameter map (-ivim,-ball,-nod).') - mcout = File(desc='Filename of mc samples (ascii text file)') + mcmap_file = File( + desc="Filename of multi-compartment model " "parameter map (-ivim,-ball,-nod)." + ) + mcout = File(desc="Filename of mc samples (ascii text file)") class FitDwi(NiftyFitCommand): @@ -281,174 +327,231 @@ class FitDwi(NiftyFitCommand): -v1map dwi_v1map.nii.gz' """ - _cmd = get_custom_path('fit_dwi', env_dir='NIFTYFITDIR') + + _cmd = get_custom_path("fit_dwi", env_dir="NIFTYFITDIR") input_spec = FitDwiInputSpec output_spec = FitDwiOutputSpec - _suffix = '_fit_dwi' + _suffix = "_fit_dwi" def _format_arg(self, name, trait_spec, value): - if name == 'tenmap_file' and self.inputs.ten_type != 'diag-off-diag': - return '' - if name == 'tenmap2_file' and self.inputs.ten_type != 'lower-tri': - return '' + if name == "tenmap_file" and self.inputs.ten_type != "diag-off-diag": + return "" + if name == "tenmap2_file" and self.inputs.ten_type != "lower-tri": + return "" return super(FitDwi, self)._format_arg(name, trait_spec, value) class DwiToolInputSpec(CommandLineInputSpec): """ Input Spec for DwiTool. """ - desc = 'The source image containing the fitted model.' + + desc = "The source image containing the fitted model." source_file = File( - position=1, - exists=True, - desc=desc, - argstr='-source %s', - mandatory=True) - desc = 'The file containing the bvalues of the source DWI.' + position=1, exists=True, desc=desc, argstr="-source %s", mandatory=True + ) + desc = "The file containing the bvalues of the source DWI." bval_file = File( - position=2, exists=True, desc=desc, argstr='-bval %s', mandatory=True) - desc = 'The file containing the bvectors of the source DWI.' - bvec_file = File( - position=3, exists=True, desc=desc, argstr='-bvec %s') + position=2, exists=True, desc=desc, argstr="-bval %s", mandatory=True + ) + desc = "The file containing the bvectors of the source DWI." + bvec_file = File(position=3, exists=True, desc=desc, argstr="-bvec %s") b0_file = File( position=4, exists=True, - desc='The B0 image corresponding to the source DWI', - argstr='-b0 %s') - mask_file = File( - position=5, exists=True, desc='The image mask', argstr='-mask %s') + desc="The B0 image corresponding to the source DWI", + argstr="-b0 %s", + ) + mask_file = File(position=5, exists=True, desc="The image mask", argstr="-mask %s") # Output options, with templated output names based on the source image - desc = 'Filename of multi-compartment model parameter map \ -(-ivim,-ball,-nod)' + desc = "Filename of multi-compartment model parameter map \ +(-ivim,-ball,-nod)" mcmap_file = File( - name_source=['source_file'], - name_template='%s_mcmap.nii.gz', + name_source=["source_file"], + name_template="%s_mcmap.nii.gz", desc=desc, - argstr='-mcmap %s') - desc = 'Filename of synthetic image. Requires: bvec_file/b0_file.' + argstr="-mcmap %s", + ) + desc = "Filename of synthetic image. Requires: bvec_file/b0_file." syn_file = File( - name_source=['source_file'], - name_template='%s_syn.nii.gz', + name_source=["source_file"], + name_template="%s_syn.nii.gz", desc=desc, - argstr='-syn %s', - requires=['bvec_file', 'b0_file']) + argstr="-syn %s", + requires=["bvec_file", "b0_file"], + ) mdmap_file = File( - name_source=['source_file'], - name_template='%s_mdmap.nii.gz', - desc='Filename of MD map/ADC', - argstr='-mdmap %s') + name_source=["source_file"], + name_template="%s_mdmap.nii.gz", + desc="Filename of MD map/ADC", + argstr="-mdmap %s", + ) famap_file = File( - name_source=['source_file'], - name_template='%s_famap.nii.gz', - desc='Filename of FA map', - argstr='-famap %s') + name_source=["source_file"], + name_template="%s_famap.nii.gz", + desc="Filename of FA map", + argstr="-famap %s", + ) v1map_file = File( - name_source=['source_file'], - name_template='%s_v1map.nii.gz', - desc='Filename of PDD map [x,y,z]', - argstr='-v1map %s') + name_source=["source_file"], + name_template="%s_v1map.nii.gz", + desc="Filename of PDD map [x,y,z]", + argstr="-v1map %s", + ) rgbmap_file = File( - name_source=['source_file'], - name_template='%s_rgbmap.nii.gz', - desc='Filename of colour FA map.', - argstr='-rgbmap %s') + name_source=["source_file"], + name_template="%s_rgbmap.nii.gz", + desc="Filename of colour FA map.", + argstr="-rgbmap %s", + ) logdti_file = File( - name_source=['source_file'], - name_template='%s_logdti2.nii.gz', - desc='Filename of output logdti map.', - argstr='-logdti2 %s') + name_source=["source_file"], + name_template="%s_logdti2.nii.gz", + desc="Filename of output logdti map.", + argstr="-logdti2 %s", + ) # Methods options - desc = 'Input is a single exponential to non-directional data \ -[default with no b-vectors]' + desc = "Input is a single exponential to non-directional data \ +[default with no b-vectors]" mono_flag = traits.Bool( desc=desc, position=6, - argstr='-mono', + argstr="-mono", xor=[ - 'ivim_flag', 'dti_flag', 'dti_flag2', 'ball_flag', 'ballv_flag', - 'nod_flag', 'nodv_flag' - ]) - desc = 'Inputs is an IVIM model to non-directional data.' + "ivim_flag", + "dti_flag", + "dti_flag2", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", + ], + ) + desc = "Inputs is an IVIM model to non-directional data." ivim_flag = traits.Bool( desc=desc, position=6, - argstr='-ivim', + argstr="-ivim", xor=[ - 'mono_flag', 'dti_flag', 'dti_flag2', 'ball_flag', 'ballv_flag', - 'nod_flag', 'nodv_flag' - ]) + "mono_flag", + "dti_flag", + "dti_flag2", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", + ], + ) dti_flag = traits.Bool( - desc='Input is a tensor model diag/off-diag.', + desc="Input is a tensor model diag/off-diag.", position=6, - argstr='-dti', + argstr="-dti", xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag2', 'ball_flag', 'ballv_flag', - 'nod_flag', 'nodv_flag' - ]) + "mono_flag", + "ivim_flag", + "dti_flag2", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", + ], + ) dti_flag2 = traits.Bool( - desc='Input is a tensor model lower triangular', + desc="Input is a tensor model lower triangular", position=6, - argstr='-dti2', + argstr="-dti2", xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'ball_flag', 'ballv_flag', - 'nod_flag', 'nodv_flag' - ]) + "mono_flag", + "ivim_flag", + "dti_flag", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", + ], + ) ball_flag = traits.Bool( - desc='Input is a ball and stick model.', + desc="Input is a ball and stick model.", position=6, - argstr='-ball', + argstr="-ball", xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'dti_flag2', 'ballv_flag', - 'nod_flag', 'nodv_flag' - ]) - desc = 'Input is a ball and stick model with optimised PDD.' + "mono_flag", + "ivim_flag", + "dti_flag", + "dti_flag2", + "ballv_flag", + "nod_flag", + "nodv_flag", + ], + ) + desc = "Input is a ball and stick model with optimised PDD." ballv_flag = traits.Bool( desc=desc, position=6, - argstr='-ballv', + argstr="-ballv", xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'dti_flag2', 'ball_flag', - 'nod_flag', 'nodv_flag' - ]) + "mono_flag", + "ivim_flag", + "dti_flag", + "dti_flag2", + "ball_flag", + "nod_flag", + "nodv_flag", + ], + ) nod_flag = traits.Bool( - desc='Input is a NODDI model', + desc="Input is a NODDI model", position=6, - argstr='-nod', + argstr="-nod", xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'dti_flag2', 'ball_flag', - 'ballv_flag', 'nodv_flag' - ]) + "mono_flag", + "ivim_flag", + "dti_flag", + "dti_flag2", + "ball_flag", + "ballv_flag", + "nodv_flag", + ], + ) nodv_flag = traits.Bool( - desc='Input is a NODDI model with optimised PDD', + desc="Input is a NODDI model with optimised PDD", position=6, - argstr='-nodv', + argstr="-nodv", xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'dti_flag2', 'ball_flag', - 'ballv_flag', 'nod_flag' - ]) + "mono_flag", + "ivim_flag", + "dti_flag", + "dti_flag2", + "ball_flag", + "ballv_flag", + "nod_flag", + ], + ) # Experimental options diso_val = traits.Float( - desc='Isotropic diffusivity for -nod [3e-3]', argstr='-diso %f') + desc="Isotropic diffusivity for -nod [3e-3]", argstr="-diso %f" + ) dpr_val = traits.Float( - desc='Parallel diffusivity for -nod [1.7e-3].', argstr='-dpr %f') + desc="Parallel diffusivity for -nod [1.7e-3].", argstr="-dpr %f" + ) class DwiToolOutputSpec(TraitedSpec): """ Output Spec for DwiTool. """ - desc = 'Filename of multi-compartment model parameter map \ -(-ivim,-ball,-nod)' + + desc = "Filename of multi-compartment model parameter map \ +(-ivim,-ball,-nod)" mcmap_file = File(desc=desc) - syn_file = File(desc='Filename of synthetic image') - mdmap_file = File(desc='Filename of MD map/ADC') - famap_file = File(desc='Filename of FA map') - v1map_file = File(desc='Filename of PDD map [x,y,z]') - rgbmap_file = File(desc='Filename of colour FA map') - logdti_file = File(desc='Filename of output logdti map') + syn_file = File(desc="Filename of synthetic image") + mdmap_file = File(desc="Filename of MD map/ADC") + famap_file = File(desc="Filename of FA map") + v1map_file = File(desc="Filename of PDD map [x,y,z]") + rgbmap_file = File(desc="Filename of colour FA map") + logdti_file = File(desc="Filename of output logdti map") class DwiTool(NiftyFitCommand): @@ -480,18 +583,21 @@ class DwiTool(NiftyFitCommand): -syn dwi_syn.nii.gz -v1map dwi_v1map.nii.gz' """ - _cmd = get_custom_path('dwi_tool', env_dir='NIFTYFITDIR') + + _cmd = get_custom_path("dwi_tool", env_dir="NIFTYFITDIR") input_spec = DwiToolInputSpec output_spec = DwiToolOutputSpec - _suffix = '_dwi_tool' + _suffix = "_dwi_tool" def _format_arg(self, name, trait_spec, value): - if name == 'syn_file': - if not isdefined(self.inputs.bvec_file) or \ - not isdefined(self.inputs.b0_file): - return '' - if name in ['logdti_file', 'rgbmap_file']: - if not isdefined(self.inputs.dti_flag) and \ - not isdefined(self.inputs.dti_flag2): - return '' + if name == "syn_file": + if not isdefined(self.inputs.bvec_file) or not isdefined( + self.inputs.b0_file + ): + return "" + if name in ["logdti_file", "rgbmap_file"]: + if not isdefined(self.inputs.dti_flag) and not isdefined( + self.inputs.dti_flag2 + ): + return "" return super(DwiTool, self)._format_arg(name, trait_spec, value) diff --git a/nipype/interfaces/niftyfit/qt1.py b/nipype/interfaces/niftyfit/qt1.py index 9df8034526..9337de2306 100644 --- a/nipype/interfaces/niftyfit/qt1.py +++ b/nipype/interfaces/niftyfit/qt1.py @@ -12,150 +12,149 @@ class FitQt1InputSpec(CommandLineInputSpec): """ Input Spec for FitQt1. """ - desc = 'Filename of the 4D Multi-Echo T1 source image.' + + desc = "Filename of the 4D Multi-Echo T1 source image." source_file = File( - position=1, - exists=True, - desc=desc, - argstr='-source %s', - mandatory=True) + position=1, exists=True, desc=desc, argstr="-source %s", mandatory=True + ) # Output options: t1map_file = File( - name_source=['source_file'], - name_template='%s_t1map.nii.gz', - argstr='-t1map %s', - desc='Filename of the estimated output T1 map (in ms).') + name_source=["source_file"], + name_template="%s_t1map.nii.gz", + argstr="-t1map %s", + desc="Filename of the estimated output T1 map (in ms).", + ) m0map_file = File( - name_source=['source_file'], - name_template='%s_m0map.nii.gz', - argstr='-m0map %s', - desc='Filename of the estimated input M0 map.') - desc = 'Filename of the estimated output multi-parameter map.' + name_source=["source_file"], + name_template="%s_m0map.nii.gz", + argstr="-m0map %s", + desc="Filename of the estimated input M0 map.", + ) + desc = "Filename of the estimated output multi-parameter map." mcmap_file = File( - name_source=['source_file'], - name_template='%s_mcmap.nii.gz', - argstr='-mcmap %s', - desc=desc) + name_source=["source_file"], + name_template="%s_mcmap.nii.gz", + argstr="-mcmap %s", + desc=desc, + ) comp_file = File( - name_source=['source_file'], - name_template='%s_comp.nii.gz', - argstr='-comp %s', - desc='Filename of the estimated multi-component T1 map.') - desc = 'Filename of the error map (symmetric matrix, [Diag,OffDiag]).' + name_source=["source_file"], + name_template="%s_comp.nii.gz", + argstr="-comp %s", + desc="Filename of the estimated multi-component T1 map.", + ) + desc = "Filename of the error map (symmetric matrix, [Diag,OffDiag])." error_file = File( - name_source=['source_file'], - name_template='%s_error.nii.gz', - argstr='-error %s', - desc=desc) + name_source=["source_file"], + name_template="%s_error.nii.gz", + argstr="-error %s", + desc=desc, + ) syn_file = File( - name_source=['source_file'], - name_template='%s_syn.nii.gz', - argstr='-syn %s', - desc='Filename of the synthetic ASL data.') + name_source=["source_file"], + name_template="%s_syn.nii.gz", + argstr="-syn %s", + desc="Filename of the synthetic ASL data.", + ) res_file = File( - name_source=['source_file'], - name_template='%s_res.nii.gz', - argstr='-res %s', - desc='Filename of the model fit residuals') + name_source=["source_file"], + name_template="%s_res.nii.gz", + argstr="-res %s", + desc="Filename of the model fit residuals", + ) # Other options: mask = File( - position=2, - exists=True, - desc='Filename of image mask.', - argstr='-mask %s') + position=2, exists=True, desc="Filename of image mask.", argstr="-mask %s" + ) prior = File( - position=3, - exists=True, - desc='Filename of parameter prior.', - argstr='-prior %s') - te_value = traits.Float( - desc='TE Echo Time [0ms!].', argstr='-TE %f', position=4) + position=3, exists=True, desc="Filename of parameter prior.", argstr="-prior %s" + ) + te_value = traits.Float(desc="TE Echo Time [0ms!].", argstr="-TE %f", position=4) tr_value = traits.Float( - desc='TR Repetition Time [10s!].', argstr='-TR %f', position=5) - desc = 'Number of components to fit [1] (currently IR/SR only)' + desc="TR Repetition Time [10s!].", argstr="-TR %f", position=5 + ) + desc = "Number of components to fit [1] (currently IR/SR only)" # set position to be ahead of TIs - nb_comp = traits.Int(desc=desc, position=6, argstr='-nc %d') - desc = 'Set LM parameters (initial value, decrease rate) [100,1.2].' + nb_comp = traits.Int(desc=desc, position=6, argstr="-nc %d") + desc = "Set LM parameters (initial value, decrease rate) [100,1.2]." lm_val = traits.Tuple( - traits.Float, traits.Float, desc=desc, argstr='-lm %f %f', position=7) - desc = 'Use Gauss-Newton algorithm [Levenberg-Marquardt].' - gn_flag = traits.Bool(desc=desc, argstr='-gn', position=8) + traits.Float, traits.Float, desc=desc, argstr="-lm %f %f", position=7 + ) + desc = "Use Gauss-Newton algorithm [Levenberg-Marquardt]." + gn_flag = traits.Bool(desc=desc, argstr="-gn", position=8) slice_no = traits.Int( - desc='Fit to single slice number.', argstr='-slice %d', position=9) + desc="Fit to single slice number.", argstr="-slice %d", position=9 + ) voxel = traits.Tuple( traits.Int, traits.Int, traits.Int, - desc='Fit to single voxel only.', - argstr='-voxel %d %d %d', - position=10) - maxit = traits.Int( - desc='NLSQR iterations [100].', argstr='-maxit %d', position=11) + desc="Fit to single voxel only.", + argstr="-voxel %d %d %d", + position=10, + ) + maxit = traits.Int(desc="NLSQR iterations [100].", argstr="-maxit %d", position=11) # IR options: sr_flag = traits.Bool( - desc='Saturation Recovery fitting [default].', - argstr='-SR', - position=12) + desc="Saturation Recovery fitting [default].", argstr="-SR", position=12 + ) ir_flag = traits.Bool( - desc='Inversion Recovery fitting [default].', - argstr='-IR', - position=13) + desc="Inversion Recovery fitting [default].", argstr="-IR", position=13 + ) tis = traits.List( traits.Float, position=14, - desc='Inversion times for T1 data [1s,2s,5s].', - argstr='-TIs %s', - sep=' ') + desc="Inversion times for T1 data [1s,2s,5s].", + argstr="-TIs %s", + sep=" ", + ) tis_list = File( - exists=True, - argstr='-TIlist %s', - desc='Filename of list of pre-defined TIs.') + exists=True, argstr="-TIlist %s", desc="Filename of list of pre-defined TIs." + ) t1_list = File( - exists=True, - argstr='-T1list %s', - desc='Filename of list of pre-defined T1s') - t1min = traits.Float( - desc='Minimum tissue T1 value [400ms].', argstr='-T1min %f') - t1max = traits.Float( - desc='Maximum tissue T1 value [4000ms].', argstr='-T1max %f') + exists=True, argstr="-T1list %s", desc="Filename of list of pre-defined T1s" + ) + t1min = traits.Float(desc="Minimum tissue T1 value [400ms].", argstr="-T1min %f") + t1max = traits.Float(desc="Maximum tissue T1 value [4000ms].", argstr="-T1max %f") # SPGR options - spgr = traits.Bool(desc='Spoiled Gradient Echo fitting', argstr='-SPGR') - flips = traits.List( - traits.Float, desc='Flip angles', argstr='-flips %s', sep=' ') - desc = 'Filename of list of pre-defined flip angles (deg).' - flips_list = File(exists=True, argstr='-fliplist %s', desc=desc) - desc = 'Filename of B1 estimate for fitting (or include in prior).' - b1map = File(exists=True, argstr='-b1map %s', desc=desc) + spgr = traits.Bool(desc="Spoiled Gradient Echo fitting", argstr="-SPGR") + flips = traits.List(traits.Float, desc="Flip angles", argstr="-flips %s", sep=" ") + desc = "Filename of list of pre-defined flip angles (deg)." + flips_list = File(exists=True, argstr="-fliplist %s", desc=desc) + desc = "Filename of B1 estimate for fitting (or include in prior)." + b1map = File(exists=True, argstr="-b1map %s", desc=desc) # MCMC options: mcout = File( - exists=True, - desc='Filename of mc samples (ascii text file)', - argstr='-mcout %s') + exists=True, desc="Filename of mc samples (ascii text file)", argstr="-mcout %s" + ) mcsamples = traits.Int( - desc='Number of samples to keep [100].', argstr='-mcsamples %d') + desc="Number of samples to keep [100].", argstr="-mcsamples %d" + ) mcmaxit = traits.Int( - desc='Number of iterations to run [10,000].', argstr='-mcmaxit %d') + desc="Number of iterations to run [10,000].", argstr="-mcmaxit %d" + ) acceptance = traits.Float( - desc='Fraction of iterations to accept [0.23].', - argstr='-acceptance %f') + desc="Fraction of iterations to accept [0.23].", argstr="-acceptance %f" + ) class FitQt1OutputSpec(TraitedSpec): """ Output Spec for FitQt1. """ - t1map_file = File(desc='Filename of the estimated output T1 map (in ms)') - m0map_file = File(desc='Filename of the m0 map') - desc = 'Filename of the estimated output multi-parameter map' + + t1map_file = File(desc="Filename of the estimated output T1 map (in ms)") + m0map_file = File(desc="Filename of the m0 map") + desc = "Filename of the estimated output multi-parameter map" mcmap_file = File(desc=desc) - comp_file = File(desc='Filename of the estimated multi-component T1 map.') - desc = 'Filename of the error map (symmetric matrix, [Diag,OffDiag])' + comp_file = File(desc="Filename of the estimated multi-component T1 map.") + desc = "Filename of the error map (symmetric matrix, [Diag,OffDiag])" error_file = File(desc=desc) - syn_file = File(desc='Filename of the synthetic ASL data') - res_file = File(desc='Filename of the model fit residuals') + syn_file = File(desc="Filename of the synthetic ASL data") + res_file = File(desc="Filename of the model fit residuals") class FitQt1(NiftyFitCommand): @@ -180,7 +179,8 @@ class FitQt1(NiftyFitCommand): -res TI4D_res.nii.gz -syn TI4D_syn.nii.gz -t1map TI4D_t1map.nii.gz' """ - _cmd = get_custom_path('fit_qt1', env_dir='NIFTYFITDIR') + + _cmd = get_custom_path("fit_qt1", env_dir="NIFTYFITDIR") input_spec = FitQt1InputSpec output_spec = FitQt1OutputSpec - _suffix = '_fit_qt1' + _suffix = "_fit_qt1" diff --git a/nipype/interfaces/niftyfit/tests/test_asl.py b/nipype/interfaces/niftyfit/tests/test_asl.py index f703555c16..bbcfd4f01a 100644 --- a/nipype/interfaces/niftyfit/tests/test_asl.py +++ b/nipype/interfaces/niftyfit/tests/test_asl.py @@ -11,15 +11,14 @@ from ...niftyreg.tests.test_regutils import no_nifty_tool -@pytest.mark.skipif( - no_nifty_tool(cmd='fit_asl'), reason="niftyfit is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="fit_asl"), reason="niftyfit is not installed") def test_fit_asl(): """ Testing FitAsl interface.""" # Create the test node fit_asl = FitAsl() # Check if the command is properly defined - cmd = get_custom_path('fit_asl', env_dir='NIFTYFIT_DIR') + cmd = get_custom_path("fit_asl", env_dir="NIFTYFIT_DIR") assert fit_asl.cmd == cmd # test raising error with mandatory args absent @@ -28,16 +27,16 @@ def test_fit_asl(): # Tests on the interface: # Runs cbf fitting assuming all tissue is GM! - in_file = example_data('asl.nii.gz') + in_file = example_data("asl.nii.gz") fit_asl.inputs.source_file = in_file - cmd_tmp = '{cmd} -source {in_file} -cbf {cbf} -error {error} -syn {syn}' + cmd_tmp = "{cmd} -source {in_file} -cbf {cbf} -error {error} -syn {syn}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, - cbf='asl_cbf.nii.gz', - error='asl_error.nii.gz', - syn='asl_syn.nii.gz', + cbf="asl_cbf.nii.gz", + error="asl_error.nii.gz", + syn="asl_syn.nii.gz", ) assert fit_asl.cmdline == expected_cmd @@ -46,24 +45,24 @@ def test_fit_asl(): # the segmentation data to fit tissue specific blood flow parameters # (lambda,transit times,T1) fit_asl2 = FitAsl(sig=True) - in_file = example_data('asl.nii.gz') - t1map = example_data('T1map.nii.gz') - seg = example_data('segmentation0.nii.gz') + in_file = example_data("asl.nii.gz") + t1map = example_data("T1map.nii.gz") + seg = example_data("segmentation0.nii.gz") fit_asl2.inputs.source_file = in_file fit_asl2.inputs.t1map = t1map fit_asl2.inputs.seg = seg - cmd_tmp = '{cmd} -source {in_file} -cbf {cbf} -error {error} \ --seg {seg} -sig -syn {syn} -t1map {t1map}' + cmd_tmp = "{cmd} -source {in_file} -cbf {cbf} -error {error} \ +-seg {seg} -sig -syn {syn} -t1map {t1map}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, t1map=t1map, seg=seg, - cbf='asl_cbf.nii.gz', - error='asl_error.nii.gz', - syn='asl_syn.nii.gz', + cbf="asl_cbf.nii.gz", + error="asl_error.nii.gz", + syn="asl_syn.nii.gz", ) assert fit_asl2.cmdline == expected_cmd diff --git a/nipype/interfaces/niftyfit/tests/test_auto_DwiTool.py b/nipype/interfaces/niftyfit/tests/test_auto_DwiTool.py index 9a59243c9e..cf81acdc55 100644 --- a/nipype/interfaces/niftyfit/tests/test_auto_DwiTool.py +++ b/nipype/interfaces/niftyfit/tests/test_auto_DwiTool.py @@ -4,146 +4,163 @@ def test_DwiTool_inputs(): input_map = dict( - args=dict(argstr='%s', ), - b0_file=dict( - argstr='-b0 %s', - extensions=None, - position=4, - ), + args=dict(argstr="%s",), + b0_file=dict(argstr="-b0 %s", extensions=None, position=4,), ball_flag=dict( - argstr='-ball', + argstr="-ball", position=6, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'dti_flag2', - 'ballv_flag', 'nod_flag', 'nodv_flag' + "mono_flag", + "ivim_flag", + "dti_flag", + "dti_flag2", + "ballv_flag", + "nod_flag", + "nodv_flag", ], ), ballv_flag=dict( - argstr='-ballv', + argstr="-ballv", position=6, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'dti_flag2', 'ball_flag', - 'nod_flag', 'nodv_flag' + "mono_flag", + "ivim_flag", + "dti_flag", + "dti_flag2", + "ball_flag", + "nod_flag", + "nodv_flag", ], ), - bval_file=dict( - argstr='-bval %s', - extensions=None, - mandatory=True, - position=2, - ), - bvec_file=dict( - argstr='-bvec %s', - extensions=None, - position=3, - ), - diso_val=dict(argstr='-diso %f', ), - dpr_val=dict(argstr='-dpr %f', ), + bval_file=dict(argstr="-bval %s", extensions=None, mandatory=True, position=2,), + bvec_file=dict(argstr="-bvec %s", extensions=None, position=3,), + diso_val=dict(argstr="-diso %f",), + dpr_val=dict(argstr="-dpr %f",), dti_flag=dict( - argstr='-dti', + argstr="-dti", position=6, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag2', 'ball_flag', - 'ballv_flag', 'nod_flag', 'nodv_flag' + "mono_flag", + "ivim_flag", + "dti_flag2", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", ], ), dti_flag2=dict( - argstr='-dti2', + argstr="-dti2", position=6, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'ball_flag', - 'ballv_flag', 'nod_flag', 'nodv_flag' + "mono_flag", + "ivim_flag", + "dti_flag", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", ], ), - environ=dict( - nohash=True, - usedefault=True, - ), + environ=dict(nohash=True, usedefault=True,), famap_file=dict( - argstr='-famap %s', + argstr="-famap %s", extensions=None, - name_source=['source_file'], - name_template='%s_famap.nii.gz', + name_source=["source_file"], + name_template="%s_famap.nii.gz", ), ivim_flag=dict( - argstr='-ivim', + argstr="-ivim", position=6, xor=[ - 'mono_flag', 'dti_flag', 'dti_flag2', 'ball_flag', - 'ballv_flag', 'nod_flag', 'nodv_flag' + "mono_flag", + "dti_flag", + "dti_flag2", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", ], ), logdti_file=dict( - argstr='-logdti2 %s', - extensions=None, - name_source=['source_file'], - name_template='%s_logdti2.nii.gz', - ), - mask_file=dict( - argstr='-mask %s', + argstr="-logdti2 %s", extensions=None, - position=5, + name_source=["source_file"], + name_template="%s_logdti2.nii.gz", ), + mask_file=dict(argstr="-mask %s", extensions=None, position=5,), mcmap_file=dict( - argstr='-mcmap %s', + argstr="-mcmap %s", extensions=None, - name_source=['source_file'], - name_template='%s_mcmap.nii.gz', + name_source=["source_file"], + name_template="%s_mcmap.nii.gz", ), mdmap_file=dict( - argstr='-mdmap %s', + argstr="-mdmap %s", extensions=None, - name_source=['source_file'], - name_template='%s_mdmap.nii.gz', + name_source=["source_file"], + name_template="%s_mdmap.nii.gz", ), mono_flag=dict( - argstr='-mono', + argstr="-mono", position=6, xor=[ - 'ivim_flag', 'dti_flag', 'dti_flag2', 'ball_flag', - 'ballv_flag', 'nod_flag', 'nodv_flag' + "ivim_flag", + "dti_flag", + "dti_flag2", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", ], ), nod_flag=dict( - argstr='-nod', + argstr="-nod", position=6, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'dti_flag2', 'ball_flag', - 'ballv_flag', 'nodv_flag' + "mono_flag", + "ivim_flag", + "dti_flag", + "dti_flag2", + "ball_flag", + "ballv_flag", + "nodv_flag", ], ), nodv_flag=dict( - argstr='-nodv', + argstr="-nodv", position=6, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'dti_flag2', 'ball_flag', - 'ballv_flag', 'nod_flag' + "mono_flag", + "ivim_flag", + "dti_flag", + "dti_flag2", + "ball_flag", + "ballv_flag", + "nod_flag", ], ), rgbmap_file=dict( - argstr='-rgbmap %s', + argstr="-rgbmap %s", extensions=None, - name_source=['source_file'], - name_template='%s_rgbmap.nii.gz', + name_source=["source_file"], + name_template="%s_rgbmap.nii.gz", ), source_file=dict( - argstr='-source %s', - extensions=None, - mandatory=True, - position=1, + argstr="-source %s", extensions=None, mandatory=True, position=1, ), syn_file=dict( - argstr='-syn %s', + argstr="-syn %s", extensions=None, - name_source=['source_file'], - name_template='%s_syn.nii.gz', - requires=['bvec_file', 'b0_file'], + name_source=["source_file"], + name_template="%s_syn.nii.gz", + requires=["bvec_file", "b0_file"], ), v1map_file=dict( - argstr='-v1map %s', + argstr="-v1map %s", extensions=None, - name_source=['source_file'], - name_template='%s_v1map.nii.gz', + name_source=["source_file"], + name_template="%s_v1map.nii.gz", ), ) inputs = DwiTool.input_spec() @@ -151,15 +168,17 @@ def test_DwiTool_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DwiTool_outputs(): output_map = dict( - famap_file=dict(extensions=None, ), - logdti_file=dict(extensions=None, ), - mcmap_file=dict(extensions=None, ), - mdmap_file=dict(extensions=None, ), - rgbmap_file=dict(extensions=None, ), - syn_file=dict(extensions=None, ), - v1map_file=dict(extensions=None, ), + famap_file=dict(extensions=None,), + logdti_file=dict(extensions=None,), + mcmap_file=dict(extensions=None,), + mdmap_file=dict(extensions=None,), + rgbmap_file=dict(extensions=None,), + syn_file=dict(extensions=None,), + v1map_file=dict(extensions=None,), ) outputs = DwiTool.output_spec() diff --git a/nipype/interfaces/niftyfit/tests/test_auto_FitAsl.py b/nipype/interfaces/niftyfit/tests/test_auto_FitAsl.py index 7e2401ed7f..567831f9f4 100644 --- a/nipype/interfaces/niftyfit/tests/test_auto_FitAsl.py +++ b/nipype/interfaces/niftyfit/tests/test_auto_FitAsl.py @@ -4,101 +4,75 @@ def test_FitAsl_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), cbf_file=dict( - argstr='-cbf %s', + argstr="-cbf %s", extensions=None, - name_source=['source_file'], - name_template='%s_cbf.nii.gz', - ), - dpld=dict(argstr='-dPLD %f', ), - dt_inv2=dict(argstr='-dTinv2 %f', ), - eff=dict(argstr='-eff %f', ), - environ=dict( - nohash=True, - usedefault=True, + name_source=["source_file"], + name_template="%s_cbf.nii.gz", ), + dpld=dict(argstr="-dPLD %f",), + dt_inv2=dict(argstr="-dTinv2 %f",), + eff=dict(argstr="-eff %f",), + environ=dict(nohash=True, usedefault=True,), error_file=dict( - argstr='-error %s', - extensions=None, - name_source=['source_file'], - name_template='%s_error.nii.gz', - ), - gm_plasma=dict(argstr='-gmL %f', ), - gm_t1=dict(argstr='-gmT1 %f', ), - gm_ttt=dict(argstr='-gmTTT %f', ), - ir_output=dict( - argstr='-IRoutput %s', - extensions=None, - ), - ir_volume=dict( - argstr='-IRvolume %s', - extensions=None, - ), - ldd=dict(argstr='-LDD %f', ), - m0map=dict( - argstr='-m0map %s', - extensions=None, - ), - m0mape=dict( - argstr='-m0mape %s', + argstr="-error %s", extensions=None, + name_source=["source_file"], + name_template="%s_error.nii.gz", ), - mask=dict( - argstr='-mask %s', - extensions=None, - position=2, - ), - mul=dict(argstr='-mul %f', ), - mulgm=dict(argstr='-sig', ), - out=dict(argstr='-out %f', ), - pasl=dict(argstr='-pasl', ), - pcasl=dict(argstr='-pcasl', ), - plasma_coeff=dict(argstr='-L %f', ), - pld=dict(argstr='-PLD %f', ), - pv0=dict(argstr='-pv0 %d', ), - pv2=dict(argstr='-pv2 %d', ), - pv3=dict(argstr='-pv3 %d %d %d', ), - pv_threshold=dict(argstr='-pvthreshold', ), - seg=dict( - argstr='-seg %s', - extensions=None, - ), - segstyle=dict(argstr='-segstyle', ), - sig=dict(argstr='-sig', ), + gm_plasma=dict(argstr="-gmL %f",), + gm_t1=dict(argstr="-gmT1 %f",), + gm_ttt=dict(argstr="-gmTTT %f",), + ir_output=dict(argstr="-IRoutput %s", extensions=None,), + ir_volume=dict(argstr="-IRvolume %s", extensions=None,), + ldd=dict(argstr="-LDD %f",), + m0map=dict(argstr="-m0map %s", extensions=None,), + m0mape=dict(argstr="-m0mape %s", extensions=None,), + mask=dict(argstr="-mask %s", extensions=None, position=2,), + mul=dict(argstr="-mul %f",), + mulgm=dict(argstr="-sig",), + out=dict(argstr="-out %f",), + pasl=dict(argstr="-pasl",), + pcasl=dict(argstr="-pcasl",), + plasma_coeff=dict(argstr="-L %f",), + pld=dict(argstr="-PLD %f",), + pv0=dict(argstr="-pv0 %d",), + pv2=dict(argstr="-pv2 %d",), + pv3=dict(argstr="-pv3 %d %d %d",), + pv_threshold=dict(argstr="-pvthreshold",), + seg=dict(argstr="-seg %s", extensions=None,), + segstyle=dict(argstr="-segstyle",), + sig=dict(argstr="-sig",), source_file=dict( - argstr='-source %s', - extensions=None, - mandatory=True, - position=1, + argstr="-source %s", extensions=None, mandatory=True, position=1, ), syn_file=dict( - argstr='-syn %s', + argstr="-syn %s", extensions=None, - name_source=['source_file'], - name_template='%s_syn.nii.gz', + name_source=["source_file"], + name_template="%s_syn.nii.gz", ), - t1_art_cmp=dict(argstr='-T1a %f', ), - t1map=dict( - argstr='-t1map %s', - extensions=None, - ), - t_inv1=dict(argstr='-Tinv1 %f', ), - t_inv2=dict(argstr='-Tinv2 %f', ), - wm_plasma=dict(argstr='-wmL %f', ), - wm_t1=dict(argstr='-wmT1 %f', ), - wm_ttt=dict(argstr='-wmTTT %f', ), + t1_art_cmp=dict(argstr="-T1a %f",), + t1map=dict(argstr="-t1map %s", extensions=None,), + t_inv1=dict(argstr="-Tinv1 %f",), + t_inv2=dict(argstr="-Tinv2 %f",), + wm_plasma=dict(argstr="-wmL %f",), + wm_t1=dict(argstr="-wmT1 %f",), + wm_ttt=dict(argstr="-wmTTT %f",), ) inputs = FitAsl.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FitAsl_outputs(): output_map = dict( - cbf_file=dict(extensions=None, ), - error_file=dict(extensions=None, ), - syn_file=dict(extensions=None, ), + cbf_file=dict(extensions=None,), + error_file=dict(extensions=None,), + syn_file=dict(extensions=None,), ) outputs = FitAsl.output_spec() diff --git a/nipype/interfaces/niftyfit/tests/test_auto_FitDwi.py b/nipype/interfaces/niftyfit/tests/test_auto_FitDwi.py index afb70755fc..57bee972e1 100644 --- a/nipype/interfaces/niftyfit/tests/test_auto_FitDwi.py +++ b/nipype/interfaces/niftyfit/tests/test_auto_FitDwi.py @@ -4,233 +4,218 @@ def test_FitDwi_inputs(): input_map = dict( - acceptance=dict(argstr='-accpetance %f', ), - args=dict(argstr='%s', ), + acceptance=dict(argstr="-accpetance %f",), + args=dict(argstr="%s",), ball_flag=dict( - argstr='-ball', + argstr="-ball", position=4, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'ballv_flag', 'nod_flag', - 'nodv_flag' + "mono_flag", + "ivim_flag", + "dti_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", ], ), ballv_flag=dict( - argstr='-ballv', + argstr="-ballv", position=4, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'ball_flag', 'nod_flag', - 'nodv_flag' + "mono_flag", + "ivim_flag", + "dti_flag", + "ball_flag", + "nod_flag", + "nodv_flag", ], ), - bval_file=dict( - argstr='-bval %s', - extensions=None, - mandatory=True, - position=2, - ), - bvec_file=dict( - argstr='-bvec %s', - extensions=None, - mandatory=True, - position=3, - ), - cov_file=dict( - argstr='-cov %s', - extensions=None, - ), - csf_t2_val=dict(argstr='-csfT2 %f', ), - diso_val=dict(argstr='-diso %f', ), - dpr_val=dict(argstr='-dpr %f', ), + bval_file=dict(argstr="-bval %s", extensions=None, mandatory=True, position=2,), + bvec_file=dict(argstr="-bvec %s", extensions=None, mandatory=True, position=3,), + cov_file=dict(argstr="-cov %s", extensions=None,), + csf_t2_val=dict(argstr="-csfT2 %f",), + diso_val=dict(argstr="-diso %f",), + dpr_val=dict(argstr="-dpr %f",), dti_flag=dict( - argstr='-dti', + argstr="-dti", position=4, xor=[ - 'mono_flag', 'ivim_flag', 'ball_flag', 'ballv_flag', - 'nod_flag', 'nodv_flag' + "mono_flag", + "ivim_flag", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", ], ), - environ=dict( - nohash=True, - usedefault=True, - ), + environ=dict(nohash=True, usedefault=True,), error_file=dict( - argstr='-error %s', + argstr="-error %s", extensions=None, - name_source=['source_file'], - name_template='%s_error.nii.gz', + name_source=["source_file"], + name_template="%s_error.nii.gz", ), famap_file=dict( - argstr='-famap %s', + argstr="-famap %s", extensions=None, - name_source=['source_file'], - name_template='%s_famap.nii.gz', - ), - gn_flag=dict( - argstr='-gn', - xor=['wls_flag'], + name_source=["source_file"], + name_template="%s_famap.nii.gz", ), + gn_flag=dict(argstr="-gn", xor=["wls_flag"],), ivim_flag=dict( - argstr='-ivim', + argstr="-ivim", position=4, xor=[ - 'mono_flag', 'dti_flag', 'ball_flag', 'ballv_flag', 'nod_flag', - 'nodv_flag' + "mono_flag", + "dti_flag", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", ], ), - lm_vals=dict( - argstr='-lm %f %f', - requires=['gn_flag'], - ), - mask_file=dict( - argstr='-mask %s', - extensions=None, - ), - maxit_val=dict( - argstr='-maxit %d', - requires=['gn_flag'], - ), + lm_vals=dict(argstr="-lm %f %f", requires=["gn_flag"],), + mask_file=dict(argstr="-mask %s", extensions=None,), + maxit_val=dict(argstr="-maxit %d", requires=["gn_flag"],), mcmap_file=dict( - argstr='-mcmap %s', + argstr="-mcmap %s", extensions=None, - name_source=['source_file'], - name_template='%s_mcmap.nii.gz', - requires=['nodv_flag'], + name_source=["source_file"], + name_template="%s_mcmap.nii.gz", + requires=["nodv_flag"], ), - mcmaxit=dict(argstr='-mcmaxit %d', ), + mcmaxit=dict(argstr="-mcmaxit %d",), mcout=dict( - argstr='-mcout %s', + argstr="-mcout %s", extensions=None, - name_source=['source_file'], - name_template='%s_mcout.txt', + name_source=["source_file"], + name_template="%s_mcout.txt", ), - mcsamples=dict(argstr='-mcsamples %d', ), + mcsamples=dict(argstr="-mcsamples %d",), mdmap_file=dict( - argstr='-mdmap %s', + argstr="-mdmap %s", extensions=None, - name_source=['source_file'], - name_template='%s_mdmap.nii.gz', + name_source=["source_file"], + name_template="%s_mdmap.nii.gz", ), mono_flag=dict( - argstr='-mono', + argstr="-mono", position=4, xor=[ - 'ivim_flag', 'dti_flag', 'ball_flag', 'ballv_flag', 'nod_flag', - 'nodv_flag' + "ivim_flag", + "dti_flag", + "ball_flag", + "ballv_flag", + "nod_flag", + "nodv_flag", ], ), nod_flag=dict( - argstr='-nod', + argstr="-nod", position=4, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'ball_flag', - 'ballv_flag', 'nodv_flag' + "mono_flag", + "ivim_flag", + "dti_flag", + "ball_flag", + "ballv_flag", + "nodv_flag", ], ), nodiff_file=dict( - argstr='-nodiff %s', + argstr="-nodiff %s", extensions=None, - name_source=['source_file'], - name_template='%s_no_diff.nii.gz', + name_source=["source_file"], + name_template="%s_no_diff.nii.gz", ), nodv_flag=dict( - argstr='-nodv', + argstr="-nodv", position=4, xor=[ - 'mono_flag', 'ivim_flag', 'dti_flag', 'ball_flag', - 'ballv_flag', 'nod_flag' + "mono_flag", + "ivim_flag", + "dti_flag", + "ball_flag", + "ballv_flag", + "nod_flag", ], ), - perf_thr=dict(argstr='-perfthreshold %f', ), - prior_file=dict( - argstr='-prior %s', - extensions=None, - ), + perf_thr=dict(argstr="-perfthreshold %f",), + prior_file=dict(argstr="-prior %s", extensions=None,), res_file=dict( - argstr='-res %s', + argstr="-res %s", extensions=None, - name_source=['source_file'], - name_template='%s_resmap.nii.gz', + name_source=["source_file"], + name_template="%s_resmap.nii.gz", ), rgbmap_file=dict( - argstr='-rgbmap %s', + argstr="-rgbmap %s", extensions=None, - name_source=['source_file'], - name_template='%s_rgbmap.nii.gz', - requires=['dti_flag'], + name_source=["source_file"], + name_template="%s_rgbmap.nii.gz", + requires=["dti_flag"], ), - rot_sform_flag=dict(argstr='-rotsform %d', ), - slice_no=dict(argstr='-slice %d', ), + rot_sform_flag=dict(argstr="-rotsform %d",), + slice_no=dict(argstr="-slice %d",), source_file=dict( - argstr='-source %s', - extensions=None, - mandatory=True, - position=1, + argstr="-source %s", extensions=None, mandatory=True, position=1, ), - swls_val=dict(argstr='-swls %f', ), + swls_val=dict(argstr="-swls %f",), syn_file=dict( - argstr='-syn %s', - extensions=None, - name_source=['source_file'], - name_template='%s_syn.nii.gz', - ), - te_file=dict( - argstr='-TE %s', + argstr="-syn %s", extensions=None, - xor=['te_file'], + name_source=["source_file"], + name_template="%s_syn.nii.gz", ), - te_value=dict( - argstr='-TE %s', - extensions=None, - xor=['te_file'], - ), - ten_type=dict(usedefault=True, ), + te_file=dict(argstr="-TE %s", extensions=None, xor=["te_file"],), + te_value=dict(argstr="-TE %s", extensions=None, xor=["te_file"],), + ten_type=dict(usedefault=True,), tenmap2_file=dict( - argstr='-tenmap2 %s', + argstr="-tenmap2 %s", extensions=None, - name_source=['source_file'], - name_template='%s_tenmap2.nii.gz', - requires=['dti_flag'], + name_source=["source_file"], + name_template="%s_tenmap2.nii.gz", + requires=["dti_flag"], ), tenmap_file=dict( - argstr='-tenmap %s', + argstr="-tenmap %s", extensions=None, - name_source=['source_file'], - name_template='%s_tenmap.nii.gz', - requires=['dti_flag'], + name_source=["source_file"], + name_template="%s_tenmap.nii.gz", + requires=["dti_flag"], ), v1map_file=dict( - argstr='-v1map %s', + argstr="-v1map %s", extensions=None, - name_source=['source_file'], - name_template='%s_v1map.nii.gz', - ), - vb_flag=dict(argstr='-vb', ), - voxel=dict(argstr='-voxel %d %d %d', ), - wls_flag=dict( - argstr='-wls', - xor=['gn_flag'], + name_source=["source_file"], + name_template="%s_v1map.nii.gz", ), - wm_t2_val=dict(argstr='-wmT2 %f', ), + vb_flag=dict(argstr="-vb",), + voxel=dict(argstr="-voxel %d %d %d",), + wls_flag=dict(argstr="-wls", xor=["gn_flag"],), + wm_t2_val=dict(argstr="-wmT2 %f",), ) inputs = FitDwi.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FitDwi_outputs(): output_map = dict( - error_file=dict(extensions=None, ), - famap_file=dict(extensions=None, ), - mcmap_file=dict(extensions=None, ), - mcout=dict(extensions=None, ), - mdmap_file=dict(extensions=None, ), - nodiff_file=dict(extensions=None, ), - res_file=dict(extensions=None, ), - rgbmap_file=dict(extensions=None, ), - syn_file=dict(extensions=None, ), - tenmap2_file=dict(extensions=None, ), - tenmap_file=dict(extensions=None, ), - v1map_file=dict(extensions=None, ), + error_file=dict(extensions=None,), + famap_file=dict(extensions=None,), + mcmap_file=dict(extensions=None,), + mcout=dict(extensions=None,), + mdmap_file=dict(extensions=None,), + nodiff_file=dict(extensions=None,), + res_file=dict(extensions=None,), + rgbmap_file=dict(extensions=None,), + syn_file=dict(extensions=None,), + tenmap2_file=dict(extensions=None,), + tenmap_file=dict(extensions=None,), + v1map_file=dict(extensions=None,), ) outputs = FitDwi.output_spec() diff --git a/nipype/interfaces/niftyfit/tests/test_auto_FitQt1.py b/nipype/interfaces/niftyfit/tests/test_auto_FitQt1.py index e27aee7da2..be09fc90fc 100644 --- a/nipype/interfaces/niftyfit/tests/test_auto_FitQt1.py +++ b/nipype/interfaces/niftyfit/tests/test_auto_FitQt1.py @@ -4,159 +4,95 @@ def test_FitQt1_inputs(): input_map = dict( - acceptance=dict(argstr='-acceptance %f', ), - args=dict(argstr='%s', ), - b1map=dict( - argstr='-b1map %s', - extensions=None, - ), + acceptance=dict(argstr="-acceptance %f",), + args=dict(argstr="%s",), + b1map=dict(argstr="-b1map %s", extensions=None,), comp_file=dict( - argstr='-comp %s', + argstr="-comp %s", extensions=None, - name_source=['source_file'], - name_template='%s_comp.nii.gz', - ), - environ=dict( - nohash=True, - usedefault=True, + name_source=["source_file"], + name_template="%s_comp.nii.gz", ), + environ=dict(nohash=True, usedefault=True,), error_file=dict( - argstr='-error %s', + argstr="-error %s", extensions=None, - name_source=['source_file'], - name_template='%s_error.nii.gz', - ), - flips=dict( - argstr='-flips %s', - sep=' ', - ), - flips_list=dict( - argstr='-fliplist %s', - extensions=None, - ), - gn_flag=dict( - argstr='-gn', - position=8, - ), - ir_flag=dict( - argstr='-IR', - position=13, - ), - lm_val=dict( - argstr='-lm %f %f', - position=7, - ), + name_source=["source_file"], + name_template="%s_error.nii.gz", + ), + flips=dict(argstr="-flips %s", sep=" ",), + flips_list=dict(argstr="-fliplist %s", extensions=None,), + gn_flag=dict(argstr="-gn", position=8,), + ir_flag=dict(argstr="-IR", position=13,), + lm_val=dict(argstr="-lm %f %f", position=7,), m0map_file=dict( - argstr='-m0map %s', - extensions=None, - name_source=['source_file'], - name_template='%s_m0map.nii.gz', - ), - mask=dict( - argstr='-mask %s', + argstr="-m0map %s", extensions=None, - position=2, - ), - maxit=dict( - argstr='-maxit %d', - position=11, + name_source=["source_file"], + name_template="%s_m0map.nii.gz", ), + mask=dict(argstr="-mask %s", extensions=None, position=2,), + maxit=dict(argstr="-maxit %d", position=11,), mcmap_file=dict( - argstr='-mcmap %s', + argstr="-mcmap %s", extensions=None, - name_source=['source_file'], - name_template='%s_mcmap.nii.gz', - ), - mcmaxit=dict(argstr='-mcmaxit %d', ), - mcout=dict( - argstr='-mcout %s', - extensions=None, - ), - mcsamples=dict(argstr='-mcsamples %d', ), - nb_comp=dict( - argstr='-nc %d', - position=6, - ), - prior=dict( - argstr='-prior %s', - extensions=None, - position=3, - ), + name_source=["source_file"], + name_template="%s_mcmap.nii.gz", + ), + mcmaxit=dict(argstr="-mcmaxit %d",), + mcout=dict(argstr="-mcout %s", extensions=None,), + mcsamples=dict(argstr="-mcsamples %d",), + nb_comp=dict(argstr="-nc %d", position=6,), + prior=dict(argstr="-prior %s", extensions=None, position=3,), res_file=dict( - argstr='-res %s', + argstr="-res %s", extensions=None, - name_source=['source_file'], - name_template='%s_res.nii.gz', - ), - slice_no=dict( - argstr='-slice %d', - position=9, + name_source=["source_file"], + name_template="%s_res.nii.gz", ), + slice_no=dict(argstr="-slice %d", position=9,), source_file=dict( - argstr='-source %s', - extensions=None, - mandatory=True, - position=1, - ), - spgr=dict(argstr='-SPGR', ), - sr_flag=dict( - argstr='-SR', - position=12, + argstr="-source %s", extensions=None, mandatory=True, position=1, ), + spgr=dict(argstr="-SPGR",), + sr_flag=dict(argstr="-SR", position=12,), syn_file=dict( - argstr='-syn %s', - extensions=None, - name_source=['source_file'], - name_template='%s_syn.nii.gz', - ), - t1_list=dict( - argstr='-T1list %s', + argstr="-syn %s", extensions=None, + name_source=["source_file"], + name_template="%s_syn.nii.gz", ), + t1_list=dict(argstr="-T1list %s", extensions=None,), t1map_file=dict( - argstr='-t1map %s', - extensions=None, - name_source=['source_file'], - name_template='%s_t1map.nii.gz', - ), - t1max=dict(argstr='-T1max %f', ), - t1min=dict(argstr='-T1min %f', ), - te_value=dict( - argstr='-TE %f', - position=4, - ), - tis=dict( - argstr='-TIs %s', - position=14, - sep=' ', - ), - tis_list=dict( - argstr='-TIlist %s', + argstr="-t1map %s", extensions=None, - ), - tr_value=dict( - argstr='-TR %f', - position=5, - ), - voxel=dict( - argstr='-voxel %d %d %d', - position=10, - ), + name_source=["source_file"], + name_template="%s_t1map.nii.gz", + ), + t1max=dict(argstr="-T1max %f",), + t1min=dict(argstr="-T1min %f",), + te_value=dict(argstr="-TE %f", position=4,), + tis=dict(argstr="-TIs %s", position=14, sep=" ",), + tis_list=dict(argstr="-TIlist %s", extensions=None,), + tr_value=dict(argstr="-TR %f", position=5,), + voxel=dict(argstr="-voxel %d %d %d", position=10,), ) inputs = FitQt1.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FitQt1_outputs(): output_map = dict( - comp_file=dict(extensions=None, ), - error_file=dict(extensions=None, ), - m0map_file=dict(extensions=None, ), - mcmap_file=dict(extensions=None, ), - res_file=dict(extensions=None, ), - syn_file=dict(extensions=None, ), - t1map_file=dict(extensions=None, ), + comp_file=dict(extensions=None,), + error_file=dict(extensions=None,), + m0map_file=dict(extensions=None,), + mcmap_file=dict(extensions=None,), + res_file=dict(extensions=None,), + syn_file=dict(extensions=None,), + t1map_file=dict(extensions=None,), ) outputs = FitQt1.output_spec() diff --git a/nipype/interfaces/niftyfit/tests/test_auto_NiftyFitCommand.py b/nipype/interfaces/niftyfit/tests/test_auto_NiftyFitCommand.py index 83d600d26e..e6fb0b0bbb 100644 --- a/nipype/interfaces/niftyfit/tests/test_auto_NiftyFitCommand.py +++ b/nipype/interfaces/niftyfit/tests/test_auto_NiftyFitCommand.py @@ -4,11 +4,7 @@ def test_NiftyFitCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), environ=dict(nohash=True, usedefault=True,), ) inputs = NiftyFitCommand.input_spec() diff --git a/nipype/interfaces/niftyfit/tests/test_dwi.py b/nipype/interfaces/niftyfit/tests/test_dwi.py index 270d9c666a..a726301203 100644 --- a/nipype/interfaces/niftyfit/tests/test_dwi.py +++ b/nipype/interfaces/niftyfit/tests/test_dwi.py @@ -10,15 +10,14 @@ from ...niftyreg.tests.test_regutils import no_nifty_tool -@pytest.mark.skipif( - no_nifty_tool(cmd='fit_dwi'), reason="niftyfit is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="fit_dwi"), reason="niftyfit is not installed") def test_fit_dwi(): """ Testing FitDwi interface.""" # Create a node object fit_dwi = FitDwi() # Check if the command is properly defined - cmd = get_custom_path('fit_dwi', env_dir='NIFTYFITDIR') + cmd = get_custom_path("fit_dwi", env_dir="NIFTYFITDIR") assert fit_dwi.cmd == cmd # test raising error with mandatory args absent @@ -26,47 +25,47 @@ def test_fit_dwi(): fit_dwi.run() # Assign some input data - in_file = example_data('dwi.nii.gz') - bval_file = example_data('bvals') - bvec_file = example_data('bvecs') + in_file = example_data("dwi.nii.gz") + bval_file = example_data("bvals") + bvec_file = example_data("bvecs") fit_dwi.inputs.source_file = in_file fit_dwi.inputs.bval_file = bval_file fit_dwi.inputs.bvec_file = bvec_file fit_dwi.inputs.dti_flag = True - cmd_tmp = '{cmd} -source {in_file} -bval {bval} -bvec {bvec} -dti \ + cmd_tmp = "{cmd} -source {in_file} -bval {bval} -bvec {bvec} -dti \ -error {error} -famap {fa} -mcmap {mc} -mcout {mcout} -mdmap {md} -nodiff \ -{nodiff} -res {res} -rgbmap {rgb} -syn {syn} -tenmap2 {ten2} -v1map {v1}' +{nodiff} -res {res} -rgbmap {rgb} -syn {syn} -tenmap2 {ten2} -v1map {v1}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, bval=bval_file, bvec=bvec_file, - error='dwi_error.nii.gz', - fa='dwi_famap.nii.gz', - mc='dwi_mcmap.nii.gz', - md='dwi_mdmap.nii.gz', - nodiff='dwi_no_diff.nii.gz', - res='dwi_resmap.nii.gz', - rgb='dwi_rgbmap.nii.gz', - syn='dwi_syn.nii.gz', - ten2='dwi_tenmap2.nii.gz', - v1='dwi_v1map.nii.gz', - mcout='dwi_mcout.txt') + error="dwi_error.nii.gz", + fa="dwi_famap.nii.gz", + mc="dwi_mcmap.nii.gz", + md="dwi_mdmap.nii.gz", + nodiff="dwi_no_diff.nii.gz", + res="dwi_resmap.nii.gz", + rgb="dwi_rgbmap.nii.gz", + syn="dwi_syn.nii.gz", + ten2="dwi_tenmap2.nii.gz", + v1="dwi_v1map.nii.gz", + mcout="dwi_mcout.txt", + ) assert fit_dwi.cmdline == expected_cmd -@pytest.mark.skipif( - no_nifty_tool(cmd='dwi_tool'), reason="niftyfit is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="dwi_tool"), reason="niftyfit is not installed") def test_dwi_tool(): """ Testing DwiTool interface.""" # Create a node object dwi_tool = DwiTool() # Check if the command is properly defined - cmd = get_custom_path('dwi_tool', env_dir='NIFTYFITDIR') + cmd = get_custom_path("dwi_tool", env_dir="NIFTYFITDIR") assert dwi_tool.cmd == cmd # test raising error with mandatory args absent @@ -74,11 +73,11 @@ def test_dwi_tool(): dwi_tool.run() # Assign some input data - in_file = example_data('dwi.nii.gz') - bval_file = example_data('bvals') - bvec_file = example_data('bvecs') - b0_file = example_data('b0.nii') - mask_file = example_data('mask.nii.gz') + in_file = example_data("dwi.nii.gz") + bval_file = example_data("bvals") + bvec_file = example_data("bvecs") + b0_file = example_data("b0.nii") + mask_file = example_data("mask.nii.gz") dwi_tool.inputs.source_file = in_file dwi_tool.inputs.mask_file = mask_file dwi_tool.inputs.bval_file = bval_file @@ -86,9 +85,9 @@ def test_dwi_tool(): dwi_tool.inputs.b0_file = b0_file dwi_tool.inputs.dti_flag = True - cmd_tmp = '{cmd} -source {in_file} -bval {bval} -bvec {bvec} -b0 {b0} \ + cmd_tmp = "{cmd} -source {in_file} -bval {bval} -bvec {bvec} -b0 {b0} \ -mask {mask} -dti -famap {fa} -logdti2 {log} -mcmap {mc} -mdmap {md} \ --rgbmap {rgb} -syn {syn} -v1map {v1}' +-rgbmap {rgb} -syn {syn} -v1map {v1}" expected_cmd = cmd_tmp.format( cmd=cmd, @@ -97,12 +96,13 @@ def test_dwi_tool(): bvec=bvec_file, b0=b0_file, mask=mask_file, - fa='dwi_famap.nii.gz', - log='dwi_logdti2.nii.gz', - mc='dwi_mcmap.nii.gz', - md='dwi_mdmap.nii.gz', - rgb='dwi_rgbmap.nii.gz', - syn='dwi_syn.nii.gz', - v1='dwi_v1map.nii.gz') + fa="dwi_famap.nii.gz", + log="dwi_logdti2.nii.gz", + mc="dwi_mcmap.nii.gz", + md="dwi_mdmap.nii.gz", + rgb="dwi_rgbmap.nii.gz", + syn="dwi_syn.nii.gz", + v1="dwi_v1map.nii.gz", + ) assert dwi_tool.cmdline == expected_cmd diff --git a/nipype/interfaces/niftyfit/tests/test_qt1.py b/nipype/interfaces/niftyfit/tests/test_qt1.py index 894017e654..9146e7e97f 100644 --- a/nipype/interfaces/niftyfit/tests/test_qt1.py +++ b/nipype/interfaces/niftyfit/tests/test_qt1.py @@ -10,15 +10,14 @@ from ..qt1 import FitQt1 -@pytest.mark.skipif( - no_nifty_tool(cmd='fit_qt1'), reason="niftyfit is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="fit_qt1"), reason="niftyfit is not installed") def test_fit_qt1(): """ Testing FitQt1 interface.""" # Create a node object fit_qt1 = FitQt1() # Check if the command is properly defined - cmd = get_custom_path('fit_qt1', env_dir='NIFTYFITDIR') + cmd = get_custom_path("fit_qt1", env_dir="NIFTYFITDIR") assert fit_qt1.cmd == cmd # test raising error with mandatory args absent @@ -26,68 +25,68 @@ def test_fit_qt1(): fit_qt1.run() # Regular test: - in_file = example_data('TI4D.nii.gz') + in_file = example_data("TI4D.nii.gz") fit_qt1.inputs.source_file = in_file - cmd_tmp = '{cmd} -source {in_file} -comp {comp} -error {error} -m0map \ -{map0} -mcmap {cmap} -res {res} -syn {syn} -t1map {t1map}' + cmd_tmp = "{cmd} -source {in_file} -comp {comp} -error {error} -m0map \ +{map0} -mcmap {cmap} -res {res} -syn {syn} -t1map {t1map}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, - comp='TI4D_comp.nii.gz', - map0='TI4D_m0map.nii.gz', - error='TI4D_error.nii.gz', - cmap='TI4D_mcmap.nii.gz', - res='TI4D_res.nii.gz', - t1map='TI4D_t1map.nii.gz', - syn='TI4D_syn.nii.gz', + comp="TI4D_comp.nii.gz", + map0="TI4D_m0map.nii.gz", + error="TI4D_error.nii.gz", + cmap="TI4D_mcmap.nii.gz", + res="TI4D_res.nii.gz", + t1map="TI4D_t1map.nii.gz", + syn="TI4D_syn.nii.gz", ) assert fit_qt1.cmdline == expected_cmd # Runs T1 fitting to inversion and saturation recovery data (NLSQR) fit_qt1_2 = FitQt1(tis=[1, 2, 5], ir_flag=True) - in_file = example_data('TI4D.nii.gz') + in_file = example_data("TI4D.nii.gz") fit_qt1_2.inputs.source_file = in_file - cmd_tmp = '{cmd} -source {in_file} -IR -TIs 1.0 2.0 5.0 \ + cmd_tmp = "{cmd} -source {in_file} -IR -TIs 1.0 2.0 5.0 \ -comp {comp} -error {error} -m0map {map0} -mcmap {cmap} -res {res} \ --syn {syn} -t1map {t1map}' +-syn {syn} -t1map {t1map}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, - comp='TI4D_comp.nii.gz', - map0='TI4D_m0map.nii.gz', - error='TI4D_error.nii.gz', - cmap='TI4D_mcmap.nii.gz', - res='TI4D_res.nii.gz', - t1map='TI4D_t1map.nii.gz', - syn='TI4D_syn.nii.gz', + comp="TI4D_comp.nii.gz", + map0="TI4D_m0map.nii.gz", + error="TI4D_error.nii.gz", + cmap="TI4D_mcmap.nii.gz", + res="TI4D_res.nii.gz", + t1map="TI4D_t1map.nii.gz", + syn="TI4D_syn.nii.gz", ) assert fit_qt1_2.cmdline == expected_cmd # Runs T1 fitting to spoiled gradient echo (SPGR) data (NLSQR) fit_qt1_3 = FitQt1(flips=[2, 4, 8], spgr=True) - in_file = example_data('TI4D.nii.gz') + in_file = example_data("TI4D.nii.gz") fit_qt1_3.inputs.source_file = in_file - cmd_tmp = '{cmd} -source {in_file} -comp {comp} -error {error} \ + cmd_tmp = "{cmd} -source {in_file} -comp {comp} -error {error} \ -flips 2.0 4.0 8.0 -m0map {map0} -mcmap {cmap} -res {res} -SPGR -syn {syn} \ --t1map {t1map}' +-t1map {t1map}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, - comp='TI4D_comp.nii.gz', - map0='TI4D_m0map.nii.gz', - error='TI4D_error.nii.gz', - cmap='TI4D_mcmap.nii.gz', - res='TI4D_res.nii.gz', - t1map='TI4D_t1map.nii.gz', - syn='TI4D_syn.nii.gz', + comp="TI4D_comp.nii.gz", + map0="TI4D_m0map.nii.gz", + error="TI4D_error.nii.gz", + cmap="TI4D_mcmap.nii.gz", + res="TI4D_res.nii.gz", + t1map="TI4D_t1map.nii.gz", + syn="TI4D_syn.nii.gz", ) assert fit_qt1_3.cmdline == expected_cmd diff --git a/nipype/interfaces/niftyreg/__init__.py b/nipype/interfaces/niftyreg/__init__.py index 9854ebaea3..2ea7b95b26 100644 --- a/nipype/interfaces/niftyreg/__init__.py +++ b/nipype/interfaces/niftyreg/__init__.py @@ -10,5 +10,11 @@ from .base import get_custom_path from .reg import RegAladin, RegF3D -from .regutils import (RegResample, RegJacobian, RegAverage, RegTools, - RegTransform, RegMeasure) +from .regutils import ( + RegResample, + RegJacobian, + RegAverage, + RegTools, + RegTransform, + RegMeasure, +) diff --git a/nipype/interfaces/niftyreg/base.py b/nipype/interfaces/niftyreg/base.py index 0b1e0c514a..aa343dcfcb 100644 --- a/nipype/interfaces/niftyreg/base.py +++ b/nipype/interfaces/niftyreg/base.py @@ -22,29 +22,32 @@ from ..base import CommandLine, CommandLineInputSpec, traits, Undefined from ...utils.filemanip import split_filename -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") -def get_custom_path(command, env_dir='NIFTYREGDIR'): - return os.path.join(os.getenv(env_dir, ''), command) +def get_custom_path(command, env_dir="NIFTYREGDIR"): + return os.path.join(os.getenv(env_dir, ""), command) class NiftyRegCommandInputSpec(CommandLineInputSpec): """Input Spec for niftyreg interfaces.""" + # Set the number of omp thread to use omp_core_val = traits.Int( - int(os.environ.get('OMP_NUM_THREADS', '1')), - desc='Number of openmp thread to use', - argstr='-omp %i', - usedefault=True) + int(os.environ.get("OMP_NUM_THREADS", "1")), + desc="Number of openmp thread to use", + argstr="-omp %i", + usedefault=True, + ) class NiftyRegCommand(CommandLine): """ Base support interface for NiftyReg commands. """ - _suffix = '_nr' - _min_version = '1.5.30' + + _suffix = "_nr" + _min_version = "1.5.30" input_spec = NiftyRegCommandInputSpec @@ -55,34 +58,33 @@ def __init__(self, required_version=None, **inputs): _version = self.version_from_command() if _version: _version = _version.decode("utf-8") - if self._min_version is not None and \ - StrictVersion(_version) < StrictVersion(self._min_version): - msg = 'A later version of Niftyreg is required (%s < %s)' + if self._min_version is not None and StrictVersion( + _version + ) < StrictVersion(self._min_version): + msg = "A later version of Niftyreg is required (%s < %s)" iflogger.warning(msg, _version, self._min_version) if required_version is not None: if StrictVersion(_version) != StrictVersion(required_version): - msg = 'The version of NiftyReg differs from the required' - msg += '(%s != %s)' + msg = "The version of NiftyReg differs from the required" + msg += "(%s != %s)" iflogger.warning(msg, _version, self.required_version) - self.inputs.on_trait_change(self._omp_update, 'omp_core_val') - self.inputs.on_trait_change(self._environ_update, 'environ') + self.inputs.on_trait_change(self._omp_update, "omp_core_val") + self.inputs.on_trait_change(self._environ_update, "environ") self._omp_update() def _omp_update(self): if self.inputs.omp_core_val: - self.inputs.environ['OMP_NUM_THREADS'] = \ - str(self.inputs.omp_core_val) + self.inputs.environ["OMP_NUM_THREADS"] = str(self.inputs.omp_core_val) self.num_threads = self.inputs.omp_core_val else: - if 'OMP_NUM_THREADS' in self.inputs.environ: - del self.inputs.environ['OMP_NUM_THREADS'] + if "OMP_NUM_THREADS" in self.inputs.environ: + del self.inputs.environ["OMP_NUM_THREADS"] self.num_threads = 1 def _environ_update(self): if self.inputs.environ: - if 'OMP_NUM_THREADS' in self.inputs.environ: - self.inputs.omp_core_val = \ - int(self.inputs.environ['OMP_NUM_THREADS']) + if "OMP_NUM_THREADS" in self.inputs.environ: + self.inputs.omp_core_val = int(self.inputs.environ["OMP_NUM_THREADS"]) else: self.inputs.omp_core_val = Undefined else: @@ -91,16 +93,16 @@ def _environ_update(self): def check_version(self): _version = self.version_from_command() if not _version: - raise Exception('Niftyreg not found') + raise Exception("Niftyreg not found") # Decoding to string: _version = _version.decode("utf-8") if StrictVersion(_version) < StrictVersion(self._min_version): - err = 'A later version of Niftyreg is required (%s < %s)' + err = "A later version of Niftyreg is required (%s < %s)" raise ValueError(err % (_version, self._min_version)) if self.required_version: if StrictVersion(_version) != StrictVersion(self.required_version): - err = 'The version of NiftyReg differs from the required' - err += '(%s != %s)' + err = "The version of NiftyReg differs from the required" + err += "(%s != %s)" raise ValueError(err % (_version, self.required_version)) @property @@ -111,14 +113,14 @@ def exists(self): return self.version_from_command() is not None def _format_arg(self, name, spec, value): - if name == 'omp_core_val': + if name == "omp_core_val": self.numthreads = value return super(NiftyRegCommand, self)._format_arg(name, spec, value) def _gen_fname(self, basename, out_dir=None, suffix=None, ext=None): - if basename == '': - msg = 'Unable to generate filename for command %s. ' % self.cmd - msg += 'basename is not set!' + if basename == "": + msg = "Unable to generate filename for command %s. " % self.cmd + msg += "basename is not set!" raise ValueError(msg) _, final_bn, final_ext = split_filename(basename) if out_dir is None: @@ -126,5 +128,5 @@ def _gen_fname(self, basename, out_dir=None, suffix=None, ext=None): if ext is not None: final_ext = ext if suffix is not None: - final_bn = ''.join((final_bn, suffix)) + final_bn = "".join((final_bn, suffix)) return os.path.abspath(os.path.join(out_dir, final_bn + final_ext)) diff --git a/nipype/interfaces/niftyreg/reg.py b/nipype/interfaces/niftyreg/reg.py index 26985a3e58..f149006d49 100644 --- a/nipype/interfaces/niftyreg/reg.py +++ b/nipype/interfaces/niftyreg/reg.py @@ -16,105 +16,111 @@ class RegAladinInputSpec(NiftyRegCommandInputSpec): """ Input Spec for RegAladin. """ + # Input reference file ref_file = File( exists=True, - desc='The input reference/target image', - argstr='-ref %s', - mandatory=True) + desc="The input reference/target image", + argstr="-ref %s", + mandatory=True, + ) # Input floating file flo_file = File( exists=True, - desc='The input floating/source image', - argstr='-flo %s', - mandatory=True) + desc="The input floating/source image", + argstr="-flo %s", + mandatory=True, + ) # No symmetric flag - nosym_flag = traits.Bool( - argstr='-noSym', desc='Turn off symmetric registration') + nosym_flag = traits.Bool(argstr="-noSym", desc="Turn off symmetric registration") # Rigid only registration - rig_only_flag = traits.Bool( - argstr='-rigOnly', desc='Do only a rigid registration') + rig_only_flag = traits.Bool(argstr="-rigOnly", desc="Do only a rigid registration") # Directly optimise affine flag - desc = 'Directly optimise the affine parameters' - aff_direct_flag = traits.Bool(argstr='-affDirect', desc=desc) + desc = "Directly optimise the affine parameters" + aff_direct_flag = traits.Bool(argstr="-affDirect", desc=desc) # Input affine in_aff_file = File( - exists=True, - desc='The input affine transformation', - argstr='-inaff %s') + exists=True, desc="The input affine transformation", argstr="-inaff %s" + ) # Input reference mask - rmask_file = File( - exists=True, desc='The input reference mask', argstr='-rmask %s') + rmask_file = File(exists=True, desc="The input reference mask", argstr="-rmask %s") # Input floating mask - fmask_file = File( - exists=True, desc='The input floating mask', argstr='-fmask %s') + fmask_file = File(exists=True, desc="The input floating mask", argstr="-fmask %s") # Maximum number of iterations maxit_val = traits.Range( - desc='Maximum number of iterations', argstr='-maxit %d', low=0) + desc="Maximum number of iterations", argstr="-maxit %d", low=0 + ) # Multiresolution levels ln_val = traits.Range( - desc='Number of resolution levels to create', argstr='-ln %d', low=0) + desc="Number of resolution levels to create", argstr="-ln %d", low=0 + ) # Number of resolution levels to process lp_val = traits.Range( - desc='Number of resolution levels to perform', argstr='-lp %d', low=0) + desc="Number of resolution levels to perform", argstr="-lp %d", low=0 + ) # Smoothing to apply on reference image - desc = 'Amount of smoothing to apply to reference image' - smoo_r_val = traits.Float(desc=desc, argstr='-smooR %f') + desc = "Amount of smoothing to apply to reference image" + smoo_r_val = traits.Float(desc=desc, argstr="-smooR %f") # Smoothing to apply on floating image - desc = 'Amount of smoothing to apply to floating image' - smoo_f_val = traits.Float(desc=desc, argstr='-smooF %f') + desc = "Amount of smoothing to apply to floating image" + smoo_f_val = traits.Float(desc=desc, argstr="-smooF %f") # Use nifti header to initialise transformation - desc = 'Use nifti header to initialise transformation' - nac_flag = traits.Bool(desc=desc, argstr='-nac') + desc = "Use nifti header to initialise transformation" + nac_flag = traits.Bool(desc=desc, argstr="-nac") # Use the input masks centre of mass to initialise the transformation - desc = 'Use the masks centre of mass to initialise the transformation' - cog_flag = traits.Bool(desc=desc, argstr='-cog') + desc = "Use the masks centre of mass to initialise the transformation" + cog_flag = traits.Bool(desc=desc, argstr="-cog") # Percent of blocks that are considered active. v_val = traits.Range( - desc='Percent of blocks that are active', argstr='-pv %d', low=0) + desc="Percent of blocks that are active", argstr="-pv %d", low=0 + ) # Percent of inlier blocks - i_val = traits.Range( - desc='Percent of inlier blocks', argstr='-pi %d', low=0) + i_val = traits.Range(desc="Percent of inlier blocks", argstr="-pi %d", low=0) # Lower threshold on reference image ref_low_val = traits.Float( - desc='Lower threshold value on reference image', - argstr='-refLowThr %f') + desc="Lower threshold value on reference image", argstr="-refLowThr %f" + ) # Upper threshold on reference image ref_up_val = traits.Float( - desc='Upper threshold value on reference image', argstr='-refUpThr %f') + desc="Upper threshold value on reference image", argstr="-refUpThr %f" + ) # Lower threshold on floating image flo_low_val = traits.Float( - desc='Lower threshold value on floating image', argstr='-floLowThr %f') + desc="Lower threshold value on floating image", argstr="-floLowThr %f" + ) # Upper threshold on floating image flo_up_val = traits.Float( - desc='Upper threshold value on floating image', argstr='-floUpThr %f') + desc="Upper threshold value on floating image", argstr="-floUpThr %f" + ) # Platform to use - platform_val = traits.Int(desc='Platform index', argstr='-platf %i') + platform_val = traits.Int(desc="Platform index", argstr="-platf %i") # Platform to use - gpuid_val = traits.Int(desc='Device to use id', argstr='-gpuid %i') + gpuid_val = traits.Int(desc="Device to use id", argstr="-gpuid %i") # Verbosity off - verbosity_off_flag = traits.Bool( - argstr='-voff', desc='Turn off verbose output') + verbosity_off_flag = traits.Bool(argstr="-voff", desc="Turn off verbose output") # Affine output transformation matrix file aff_file = File( - name_source=['flo_file'], - name_template='%s_aff.txt', - desc='The output affine matrix file', - argstr='-aff %s') + name_source=["flo_file"], + name_template="%s_aff.txt", + desc="The output affine matrix file", + argstr="-aff %s", + ) # Result warped image file res_file = File( - name_source=['flo_file'], - name_template='%s_res.nii.gz', - desc='The affine transformed floating image', - argstr='-res %s') + name_source=["flo_file"], + name_template="%s_res.nii.gz", + desc="The affine transformed floating image", + argstr="-res %s", + ) class RegAladinOutputSpec(TraitedSpec): """ Output Spec for RegAladin. """ - aff_file = File(desc='The output affine file') - res_file = File(desc='The output transformed image') - desc = 'Output string in the format for reg_average' + + aff_file = File(desc="The output affine file") + res_file = File(desc="The output transformed image") + desc = "Output string in the format for reg_average" avg_output = traits.String(desc=desc) @@ -141,7 +147,8 @@ class RegAladin(NiftyRegCommand): -res im2_res.nii.gz -rmask mask.nii' """ - _cmd = get_custom_path('reg_aladin') + + _cmd = get_custom_path("reg_aladin") input_spec = RegAladinInputSpec output_spec = RegAladinOutputSpec @@ -149,196 +156,200 @@ def _list_outputs(self): outputs = super(RegAladin, self)._list_outputs() # Make a list of the linear transformation file and the input image - aff = os.path.abspath(outputs['aff_file']) + aff = os.path.abspath(outputs["aff_file"]) flo = os.path.abspath(self.inputs.flo_file) - outputs['avg_output'] = '%s %s' % (aff, flo) + outputs["avg_output"] = "%s %s" % (aff, flo) return outputs class RegF3DInputSpec(NiftyRegCommandInputSpec): """ Input Spec for RegF3D. """ + # Input reference file ref_file = File( exists=True, - desc='The input reference/target image', - argstr='-ref %s', - mandatory=True) + desc="The input reference/target image", + argstr="-ref %s", + mandatory=True, + ) # Input floating file flo_file = File( exists=True, - desc='The input floating/source image', - argstr='-flo %s', - mandatory=True) + desc="The input floating/source image", + argstr="-flo %s", + mandatory=True, + ) # Input Affine file aff_file = File( - exists=True, - desc='The input affine transformation file', - argstr='-aff %s') + exists=True, desc="The input affine transformation file", argstr="-aff %s" + ) # Input cpp file incpp_file = File( - exists=True, - desc='The input cpp transformation file', - argstr='-incpp %s') + exists=True, desc="The input cpp transformation file", argstr="-incpp %s" + ) # Reference mask - rmask_file = File( - exists=True, desc='Reference image mask', argstr='-rmask %s') + rmask_file = File(exists=True, desc="Reference image mask", argstr="-rmask %s") # Smoothing kernel for reference - desc = 'Smoothing kernel width for reference image' - ref_smooth_val = traits.Float(desc=desc, argstr='-smooR %f') + desc = "Smoothing kernel width for reference image" + ref_smooth_val = traits.Float(desc=desc, argstr="-smooR %f") # Smoothing kernel for floating - desc = 'Smoothing kernel width for floating image' - flo_smooth_val = traits.Float(desc=desc, argstr='-smooF %f') + desc = "Smoothing kernel width for floating image" + flo_smooth_val = traits.Float(desc=desc, argstr="-smooF %f") # Lower threshold for reference image rlwth_thr_val = traits.Float( - desc='Lower threshold for reference image', argstr='--rLwTh %f') + desc="Lower threshold for reference image", argstr="--rLwTh %f" + ) # Upper threshold for reference image rupth_thr_val = traits.Float( - desc='Upper threshold for reference image', argstr='--rUpTh %f') + desc="Upper threshold for reference image", argstr="--rUpTh %f" + ) # Lower threshold for reference image flwth_thr_val = traits.Float( - desc='Lower threshold for floating image', argstr='--fLwTh %f') + desc="Lower threshold for floating image", argstr="--fLwTh %f" + ) # Upper threshold for reference image fupth_thr_val = traits.Float( - desc='Upper threshold for floating image', argstr='--fUpTh %f') + desc="Upper threshold for floating image", argstr="--fUpTh %f" + ) # Lower threshold for reference image - desc = 'Lower threshold for reference image at the specified time point' + desc = "Lower threshold for reference image at the specified time point" rlwth2_thr_val = traits.Tuple( - traits.Range(low=0), traits.Float, desc=desc, argstr='-rLwTh %d %f') + traits.Range(low=0), traits.Float, desc=desc, argstr="-rLwTh %d %f" + ) # Upper threshold for reference image - desc = 'Upper threshold for reference image at the specified time point' + desc = "Upper threshold for reference image at the specified time point" rupth2_thr_val = traits.Tuple( - traits.Range(low=0), traits.Float, desc=desc, argstr='-rUpTh %d %f') + traits.Range(low=0), traits.Float, desc=desc, argstr="-rUpTh %d %f" + ) # Lower threshold for reference image - desc = 'Lower threshold for floating image at the specified time point' + desc = "Lower threshold for floating image at the specified time point" flwth2_thr_val = traits.Tuple( - traits.Range(low=0), traits.Float, desc=desc, argstr='-fLwTh %d %f') + traits.Range(low=0), traits.Float, desc=desc, argstr="-fLwTh %d %f" + ) # Upper threshold for reference image - desc = 'Upper threshold for floating image at the specified time point' + desc = "Upper threshold for floating image at the specified time point" fupth2_thr_val = traits.Tuple( - traits.Range(low=0), traits.Float, desc=desc, argstr='-fUpTh %d %f') + traits.Range(low=0), traits.Float, desc=desc, argstr="-fUpTh %d %f" + ) # Final grid spacing along the 3 axes - sx_val = traits.Float( - desc='Final grid spacing along the x axes', argstr='-sx %f') - sy_val = traits.Float( - desc='Final grid spacing along the y axes', argstr='-sy %f') - sz_val = traits.Float( - desc='Final grid spacing along the z axes', argstr='-sz %f') + sx_val = traits.Float(desc="Final grid spacing along the x axes", argstr="-sx %f") + sy_val = traits.Float(desc="Final grid spacing along the y axes", argstr="-sy %f") + sz_val = traits.Float(desc="Final grid spacing along the z axes", argstr="-sz %f") # Regularisation options - be_val = traits.Float(desc='Bending energy value', argstr='-be %f') - le_val = traits.Float( - desc='Linear elasticity penalty term', argstr='-le %f') + be_val = traits.Float(desc="Bending energy value", argstr="-be %f") + le_val = traits.Float(desc="Linear elasticity penalty term", argstr="-le %f") jl_val = traits.Float( - desc='Log of jacobian of deformation penalty value', argstr='-jl %f') - desc = 'Do not approximate the log of jacobian penalty at control points \ -only' + desc="Log of jacobian of deformation penalty value", argstr="-jl %f" + ) + desc = "Do not approximate the log of jacobian penalty at control points \ +only" - no_app_jl_flag = traits.Bool(argstr='-noAppJL', desc=desc) + no_app_jl_flag = traits.Bool(argstr="-noAppJL", desc=desc) # Similarity measure options - desc = 'use NMI even when other options are specified' - nmi_flag = traits.Bool(argstr='--nmi', desc=desc) - desc = 'Number of bins in the histogram for reference image' - rbn_val = traits.Range(low=0, desc=desc, argstr='--rbn %d') - desc = 'Number of bins in the histogram for reference image' - fbn_val = traits.Range(low=0, desc=desc, argstr='--fbn %d') - desc = 'Number of bins in the histogram for reference image for given \ -time point' + desc = "use NMI even when other options are specified" + nmi_flag = traits.Bool(argstr="--nmi", desc=desc) + desc = "Number of bins in the histogram for reference image" + rbn_val = traits.Range(low=0, desc=desc, argstr="--rbn %d") + desc = "Number of bins in the histogram for reference image" + fbn_val = traits.Range(low=0, desc=desc, argstr="--fbn %d") + desc = "Number of bins in the histogram for reference image for given \ +time point" rbn2_val = traits.Tuple( - traits.Range(low=0), - traits.Range(low=0), - desc=desc, - argstr='-rbn %d %d') + traits.Range(low=0), traits.Range(low=0), desc=desc, argstr="-rbn %d %d" + ) - desc = 'Number of bins in the histogram for reference image for given \ -time point' + desc = "Number of bins in the histogram for reference image for given \ +time point" fbn2_val = traits.Tuple( - traits.Range(low=0), - traits.Range(low=0), - desc=desc, - argstr='-fbn %d %d') + traits.Range(low=0), traits.Range(low=0), desc=desc, argstr="-fbn %d %d" + ) lncc_val = traits.Float( - desc='SD of the Gaussian for computing LNCC', argstr='--lncc %f') - desc = 'SD of the Gaussian for computing LNCC for a given time point' + desc="SD of the Gaussian for computing LNCC", argstr="--lncc %f" + ) + desc = "SD of the Gaussian for computing LNCC for a given time point" lncc2_val = traits.Tuple( - traits.Range(low=0), traits.Float, desc=desc, argstr='-lncc %d %f') + traits.Range(low=0), traits.Float, desc=desc, argstr="-lncc %d %f" + ) - ssd_flag = traits.Bool( - desc='Use SSD as the similarity measure', argstr='--ssd') - desc = 'Use SSD as the similarity measure for a given time point' - ssd2_flag = traits.Range(low=0, desc=desc, argstr='-ssd %d') + ssd_flag = traits.Bool(desc="Use SSD as the similarity measure", argstr="--ssd") + desc = "Use SSD as the similarity measure for a given time point" + ssd2_flag = traits.Range(low=0, desc=desc, argstr="-ssd %d") kld_flag = traits.Bool( - desc='Use KL divergence as the similarity measure', argstr='--kld') - desc = 'Use KL divergence as the similarity measure for a given time point' - kld2_flag = traits.Range(low=0, desc=desc, argstr='-kld %d') - amc_flag = traits.Bool(desc='Use additive NMI', argstr='-amc') + desc="Use KL divergence as the similarity measure", argstr="--kld" + ) + desc = "Use KL divergence as the similarity measure for a given time point" + kld2_flag = traits.Range(low=0, desc=desc, argstr="-kld %d") + amc_flag = traits.Bool(desc="Use additive NMI", argstr="-amc") - nox_flag = traits.Bool(desc="Don't optimise in x direction", argstr='-nox') - noy_flag = traits.Bool(desc="Don't optimise in y direction", argstr='-noy') - noz_flag = traits.Bool(desc="Don't optimise in z direction", argstr='-noz') + nox_flag = traits.Bool(desc="Don't optimise in x direction", argstr="-nox") + noy_flag = traits.Bool(desc="Don't optimise in y direction", argstr="-noy") + noz_flag = traits.Bool(desc="Don't optimise in z direction", argstr="-noz") # Optimization options maxit_val = traits.Range( - low=0, - argstr='-maxit %d', - desc='Maximum number of iterations per level') + low=0, argstr="-maxit %d", desc="Maximum number of iterations per level" + ) ln_val = traits.Range( - low=0, argstr='-ln %d', desc='Number of resolution levels to create') + low=0, argstr="-ln %d", desc="Number of resolution levels to create" + ) lp_val = traits.Range( - low=0, argstr='-lp %d', desc='Number of resolution levels to perform') + low=0, argstr="-lp %d", desc="Number of resolution levels to perform" + ) nopy_flag = traits.Bool( - desc='Do not use the multiresolution approach', argstr='-nopy') - noconj_flag = traits.Bool( - desc='Use simple GD optimization', argstr='-noConj') - desc = 'Add perturbation steps after each optimization step' - pert_val = traits.Range(low=0, desc=desc, argstr='-pert %d') + desc="Do not use the multiresolution approach", argstr="-nopy" + ) + noconj_flag = traits.Bool(desc="Use simple GD optimization", argstr="-noConj") + desc = "Add perturbation steps after each optimization step" + pert_val = traits.Range(low=0, desc=desc, argstr="-pert %d") # F3d2 options - vel_flag = traits.Bool( - desc='Use velocity field integration', argstr='-vel') - fmask_file = File( - exists=True, desc='Floating image mask', argstr='-fmask %s') + vel_flag = traits.Bool(desc="Use velocity field integration", argstr="-vel") + fmask_file = File(exists=True, desc="Floating image mask", argstr="-fmask %s") # Other options - desc = 'Kernel width for smoothing the metric gradient' - smooth_grad_val = traits.Float(desc=desc, argstr='-smoothGrad %f') + desc = "Kernel width for smoothing the metric gradient" + smooth_grad_val = traits.Float(desc=desc, argstr="-smoothGrad %f") # Padding value - pad_val = traits.Float(desc='Padding value', argstr='-pad %f') + pad_val = traits.Float(desc="Padding value", argstr="-pad %f") # verbosity off - verbosity_off_flag = traits.Bool( - argstr='-voff', desc='Turn off verbose output') + verbosity_off_flag = traits.Bool(argstr="-voff", desc="Turn off verbose output") # Output CPP image file cpp_file = File( - name_source=['flo_file'], - name_template='%s_cpp.nii.gz', - desc='The output CPP file', - argstr='-cpp %s') + name_source=["flo_file"], + name_template="%s_cpp.nii.gz", + desc="The output CPP file", + argstr="-cpp %s", + ) # Output warped image file res_file = File( - name_source=['flo_file'], - name_template='%s_res.nii.gz', - desc='The output resampled image', - argstr='-res %s') + name_source=["flo_file"], + name_template="%s_res.nii.gz", + desc="The output resampled image", + argstr="-res %s", + ) class RegF3DOutputSpec(TraitedSpec): """ Output Spec for RegF3D. """ - cpp_file = File(desc='The output CPP file') - res_file = File(desc='The output resampled image') - invcpp_file = File(desc='The output inverse CPP file') - invres_file = File(desc='The output inverse res file') - desc = 'Output string in the format for reg_average' + + cpp_file = File(desc="The output CPP file") + res_file = File(desc="The output resampled image") + invcpp_file = File(desc="The output inverse CPP file") + invres_file = File(desc="The output inverse res file") + desc = "Output string in the format for reg_average" avg_output = traits.String(desc=desc) @@ -364,7 +375,8 @@ class RegF3D(NiftyRegCommand): -res im2_res.nii.gz -rmask mask.nii' """ - _cmd = get_custom_path('reg_f3d') + + _cmd = get_custom_path("reg_f3d") input_spec = RegF3DInputSpec output_spec = RegF3DOutputSpec @@ -377,20 +389,23 @@ def _list_outputs(self): outputs = super(RegF3D, self)._list_outputs() if self.inputs.vel_flag is True: - res_name = self._remove_extension(outputs['res_file']) - cpp_name = self._remove_extension(outputs['cpp_file']) - outputs['invres_file'] = '%s_backward.nii.gz' % res_name - outputs['invcpp_file'] = '%s_backward.nii.gz' % cpp_name + res_name = self._remove_extension(outputs["res_file"]) + cpp_name = self._remove_extension(outputs["cpp_file"]) + outputs["invres_file"] = "%s_backward.nii.gz" % res_name + outputs["invcpp_file"] = "%s_backward.nii.gz" % cpp_name # Make a list of the linear transformation file and the input image if self.inputs.vel_flag is True and isdefined(self.inputs.aff_file): - cpp_file = os.path.abspath(outputs['cpp_file']) + cpp_file = os.path.abspath(outputs["cpp_file"]) flo_file = os.path.abspath(self.inputs.flo_file) - outputs['avg_output'] = '%s %s %s' % (self.inputs.aff_file, - cpp_file, flo_file) + outputs["avg_output"] = "%s %s %s" % ( + self.inputs.aff_file, + cpp_file, + flo_file, + ) else: - cpp_file = os.path.abspath(outputs['cpp_file']) + cpp_file = os.path.abspath(outputs["cpp_file"]) flo_file = os.path.abspath(self.inputs.flo_file) - outputs['avg_output'] = '%s %s' % (cpp_file, flo_file) + outputs["avg_output"] = "%s %s" % (cpp_file, flo_file) return outputs diff --git a/nipype/interfaces/niftyreg/regutils.py b/nipype/interfaces/niftyreg/regutils.py index 2d08a7119d..032f106933 100644 --- a/nipype/interfaces/niftyreg/regutils.py +++ b/nipype/interfaces/niftyreg/regutils.py @@ -15,70 +15,72 @@ class RegResampleInputSpec(NiftyRegCommandInputSpec): """ Input Spec for RegResample. """ + # Input reference file ref_file = File( exists=True, - desc='The input reference/target image', - argstr='-ref %s', - mandatory=True) + desc="The input reference/target image", + argstr="-ref %s", + mandatory=True, + ) # Input floating file flo_file = File( exists=True, - desc='The input floating/source image', - argstr='-flo %s', - mandatory=True) + desc="The input floating/source image", + argstr="-flo %s", + mandatory=True, + ) # Input deformation field trans_file = File( - exists=True, desc='The input transformation file', argstr='-trans %s') + exists=True, desc="The input transformation file", argstr="-trans %s" + ) type = traits.Enum( - 'res', - 'blank', - argstr='-%s', + "res", + "blank", + argstr="-%s", position=-2, usedefault=True, - desc='Type of output') + desc="Type of output", + ) # Output file name out_file = File( - name_source=['flo_file'], - name_template='%s', - argstr='%s', + name_source=["flo_file"], + name_template="%s", + argstr="%s", position=-1, - desc='The output filename of the transformed image') + desc="The output filename of the transformed image", + ) # Interpolation type inter_val = traits.Enum( - 'NN', - 'LIN', - 'CUB', - 'SINC', - desc='Interpolation type', - argstr='-inter %d') + "NN", "LIN", "CUB", "SINC", desc="Interpolation type", argstr="-inter %d" + ) # Padding value - pad_val = traits.Float(desc='Padding value', argstr='-pad %f') + pad_val = traits.Float(desc="Padding value", argstr="-pad %f") # Tensor flag - tensor_flag = traits.Bool(desc='Resample Tensor Map', argstr='-tensor ') + tensor_flag = traits.Bool(desc="Resample Tensor Map", argstr="-tensor ") # Verbosity off - verbosity_off_flag = traits.Bool( - argstr='-voff', desc='Turn off verbose output') + verbosity_off_flag = traits.Bool(argstr="-voff", desc="Turn off verbose output") # PSF flag - desc = 'Perform the resampling in two steps to resample an image to a \ -lower resolution' + desc = "Perform the resampling in two steps to resample an image to a \ +lower resolution" - psf_flag = traits.Bool(argstr='-psf', desc=desc) - desc = 'Minimise the matrix metric (0) or the determinant (1) when \ -estimating the PSF [0]' + psf_flag = traits.Bool(argstr="-psf", desc=desc) + desc = "Minimise the matrix metric (0) or the determinant (1) when \ +estimating the PSF [0]" - psf_alg = traits.Enum(0, 1, argstr='-psf_alg %d', desc=desc) + psf_alg = traits.Enum(0, 1, argstr="-psf_alg %d", desc=desc) class RegResampleOutputSpec(TraitedSpec): """ Output Spec for RegResample. """ - out_file = File(desc='The output filename of the transformed image') + + out_file = File(desc="The output filename of the transformed image") class RegResample(NiftyRegCommand): @@ -104,14 +106,15 @@ class RegResample(NiftyRegCommand): warpfield.nii -res im2_res.nii.gz' """ - _cmd = get_custom_path('reg_resample') + + _cmd = get_custom_path("reg_resample") input_spec = RegResampleInputSpec output_spec = RegResampleOutputSpec # Need this overload to properly constraint the interpolation type input def _format_arg(self, name, spec, value): - if name == 'inter_val': - inter_val = {'NN': 0, 'LIN': 1, 'CUB': 3, 'SINC': 4} + if name == "inter_val": + inter_val = {"NN": 0, "LIN": 1, "CUB": 3, "SINC": 4} return spec.argstr % inter_val[value] else: return super(RegResample, self)._format_arg(name, spec, value) @@ -119,39 +122,44 @@ def _format_arg(self, name, spec, value): def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) suffix = self.inputs.type - return os.path.join(path, '{0}_{1}.nii.gz'.format(base, suffix)) + return os.path.join(path, "{0}_{1}.nii.gz".format(base, suffix)) class RegJacobianInputSpec(NiftyRegCommandInputSpec): """ Input Spec for RegJacobian. """ + # Reference file name - desc = 'Reference/target file (required if specifying CPP transformations.' - ref_file = File(exists=True, desc=desc, argstr='-ref %s') + desc = "Reference/target file (required if specifying CPP transformations." + ref_file = File(exists=True, desc=desc, argstr="-ref %s") # Input transformation file trans_file = File( exists=True, - desc='The input non-rigid transformation', - argstr='-trans %s', - mandatory=True) + desc="The input non-rigid transformation", + argstr="-trans %s", + mandatory=True, + ) type = traits.Enum( - 'jac', - 'jacL', - 'jacM', + "jac", + "jacL", + "jacM", usedefault=True, - argstr='-%s', + argstr="-%s", position=-2, - desc='Type of jacobian outcome') + desc="Type of jacobian outcome", + ) out_file = File( - name_source=['trans_file'], - name_template='%s', - desc='The output jacobian determinant file name', - argstr='%s', - position=-1) + name_source=["trans_file"], + name_template="%s", + desc="The output jacobian determinant file name", + argstr="%s", + position=-1, + ) class RegJacobianOutputSpec(TraitedSpec): """ Output Spec for RegJacobian. """ - out_file = File(desc='The output file') + + out_file = File(desc="The output file") class RegJacobian(NiftyRegCommand): @@ -174,129 +182,129 @@ class RegJacobian(NiftyRegCommand): warpfield_jac.nii.gz' """ - _cmd = get_custom_path('reg_jacobian') + + _cmd = get_custom_path("reg_jacobian") input_spec = RegJacobianInputSpec output_spec = RegJacobianOutputSpec def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) suffix = self.inputs.type - return os.path.join(path, '{0}_{1}.nii.gz'.format(base, suffix)) + return os.path.join(path, "{0}_{1}.nii.gz".format(base, suffix)) class RegToolsInputSpec(NiftyRegCommandInputSpec): """ Input Spec for RegTools. """ + # Input image file in_file = File( - exists=True, - desc='The input image file path', - argstr='-in %s', - mandatory=True) + exists=True, desc="The input image file path", argstr="-in %s", mandatory=True + ) # Output file path out_file = File( - name_source=['in_file'], - name_template='%s_tools.nii.gz', - desc='The output file name', - argstr='-out %s') + name_source=["in_file"], + name_template="%s_tools.nii.gz", + desc="The output file name", + argstr="-out %s", + ) # Make the output image isotropic - iso_flag = traits.Bool(argstr='-iso', desc='Make output image isotropic') + iso_flag = traits.Bool(argstr="-iso", desc="Make output image isotropic") # Set scale, slope to 0 and 1. - noscl_flag = traits.Bool( - argstr='-noscl', desc='Set scale, slope to 0 and 1') + noscl_flag = traits.Bool(argstr="-noscl", desc="Set scale, slope to 0 and 1") # Values outside the mask are set to NaN mask_file = File( - exists=True, - desc='Values outside the mask are set to NaN', - argstr='-nan %s') + exists=True, desc="Values outside the mask are set to NaN", argstr="-nan %s" + ) # Threshold the input image - desc = 'Binarise the input image with the given threshold' - thr_val = traits.Float(desc=desc, argstr='-thr %f') + desc = "Binarise the input image with the given threshold" + thr_val = traits.Float(desc=desc, argstr="-thr %f") # Binarise the input image - bin_flag = traits.Bool(argstr='-bin', desc='Binarise the input image') + bin_flag = traits.Bool(argstr="-bin", desc="Binarise the input image") # Compute the mean RMS between the two images rms_val = File( - exists=True, - desc='Compute the mean RMS between the images', - argstr='-rms %s') + exists=True, desc="Compute the mean RMS between the images", argstr="-rms %s" + ) # Perform division by image or value div_val = traits.Either( traits.Float, File(exists=True), - desc='Divide the input by image or value', - argstr='-div %s') + desc="Divide the input by image or value", + argstr="-div %s", + ) # Perform multiplication by image or value mul_val = traits.Either( traits.Float, File(exists=True), - desc='Multiply the input by image or value', - argstr='-mul %s') + desc="Multiply the input by image or value", + argstr="-mul %s", + ) # Perform addition by image or value add_val = traits.Either( traits.Float, File(exists=True), - desc='Add to the input image or value', - argstr='-add %s') + desc="Add to the input image or value", + argstr="-add %s", + ) # Perform subtraction by image or value sub_val = traits.Either( traits.Float, File(exists=True), - desc='Add to the input image or value', - argstr='-sub %s') + desc="Add to the input image or value", + argstr="-sub %s", + ) # Downsample the image by a factor of 2. down_flag = traits.Bool( - desc='Downsample the image by a factor of 2', argstr='-down') + desc="Downsample the image by a factor of 2", argstr="-down" + ) # Smoothing using spline kernel - desc = 'Smooth the input image using a cubic spline kernel' + desc = "Smooth the input image using a cubic spline kernel" smo_s_val = traits.Tuple( - traits.Float, - traits.Float, - traits.Float, - desc=desc, - argstr='-smoS %f %f %f') + traits.Float, traits.Float, traits.Float, desc=desc, argstr="-smoS %f %f %f" + ) # Change the resolution of the input image chg_res_val = traits.Tuple( traits.Float, traits.Float, traits.Float, - desc='Change the resolution of the input image', - argstr='-chgres %f %f %f') + desc="Change the resolution of the input image", + argstr="-chgres %f %f %f", + ) # Smoothing using Gaussian kernel - desc = 'Smooth the input image using a Gaussian kernel' + desc = "Smooth the input image using a Gaussian kernel" smo_g_val = traits.Tuple( - traits.Float, - traits.Float, - traits.Float, - desc=desc, - argstr='-smoG %f %f %f') + traits.Float, traits.Float, traits.Float, desc=desc, argstr="-smoG %f %f %f" + ) # Interpolation type inter_val = traits.Enum( - 'NN', - 'LIN', - 'CUB', - 'SINC', - desc='Interpolation order to use to warp the floating image', - argstr='-interp %d') + "NN", + "LIN", + "CUB", + "SINC", + desc="Interpolation order to use to warp the floating image", + argstr="-interp %d", + ) class RegToolsOutputSpec(TraitedSpec): """ Output Spec for RegTools. """ - out_file = File(desc='The output file', exists=True) + + out_file = File(desc="The output file", exists=True) class RegTools(NiftyRegCommand): @@ -319,15 +327,16 @@ class RegTools(NiftyRegCommand): 'reg_tools -in im1.nii -mul 4.0 -omp 4 -out im1_tools.nii.gz' """ - _cmd = get_custom_path('reg_tools') + + _cmd = get_custom_path("reg_tools") input_spec = RegToolsInputSpec output_spec = RegToolsOutputSpec - _suffix = '_tools' + _suffix = "_tools" # Need this overload to properly constraint the interpolation type input def _format_arg(self, name, spec, value): - if name == 'inter_val': - inter_val = {'NN': 0, 'LIN': 1, 'CUB': 3, 'SINC': 4} + if name == "inter_val": + inter_val = {"NN": 0, "LIN": 1, "CUB": 3, "SINC": 4} return spec.argstr % inter_val[value] else: return super(RegTools, self)._format_arg(name, spec, value) @@ -335,99 +344,127 @@ def _format_arg(self, name, spec, value): class RegAverageInputSpec(NiftyRegCommandInputSpec): """ Input Spec for RegAverage. """ + avg_files = traits.List( File(exist=True), position=1, - argstr='-avg %s', - sep=' ', + argstr="-avg %s", + sep=" ", xor=[ - 'avg_lts_files', 'avg_ref_file', 'demean1_ref_file', - 'demean2_ref_file', 'demean3_ref_file', 'warp_files' + "avg_lts_files", + "avg_ref_file", + "demean1_ref_file", + "demean2_ref_file", + "demean3_ref_file", + "warp_files", ], - desc='Averaging of images/affine transformations') + desc="Averaging of images/affine transformations", + ) - desc = 'Robust average of affine transformations' + desc = "Robust average of affine transformations" avg_lts_files = traits.List( File(exist=True), position=1, - argstr='-avg_lts %s', - sep=' ', + argstr="-avg_lts %s", + sep=" ", xor=[ - 'avg_files', 'avg_ref_file', 'demean1_ref_file', - 'demean2_ref_file', 'demean3_ref_file', 'warp_files' + "avg_files", + "avg_ref_file", + "demean1_ref_file", + "demean2_ref_file", + "demean3_ref_file", + "warp_files", ], - desc=desc) + desc=desc, + ) - desc = 'All input images are resampled into the space of \ - and averaged. A cubic spline interpolation scheme is used for resampling' + desc = "All input images are resampled into the space of \ + and averaged. A cubic spline interpolation scheme is used for resampling" avg_ref_file = File( position=1, - argstr='-avg_tran %s', + argstr="-avg_tran %s", xor=[ - 'avg_files', 'avg_lts_files', 'demean1_ref_file', - 'demean2_ref_file', 'demean3_ref_file' + "avg_files", + "avg_lts_files", + "demean1_ref_file", + "demean2_ref_file", + "demean3_ref_file", ], - requires=['warp_files'], - desc=desc) + requires=["warp_files"], + desc=desc, + ) - desc = 'Average images and demean average image that have affine \ -transformations to a common space' + desc = "Average images and demean average image that have affine \ +transformations to a common space" demean1_ref_file = File( position=1, - argstr='-demean1 %s', + argstr="-demean1 %s", xor=[ - 'avg_files', 'avg_lts_files', 'avg_ref_file', 'demean2_ref_file', - 'demean3_ref_file' + "avg_files", + "avg_lts_files", + "avg_ref_file", + "demean2_ref_file", + "demean3_ref_file", ], - requires=['warp_files'], - desc=desc) + requires=["warp_files"], + desc=desc, + ) - desc = 'Average images and demean average image that have non-rigid \ -transformations to a common space' + desc = "Average images and demean average image that have non-rigid \ +transformations to a common space" demean2_ref_file = File( position=1, - argstr='-demean2 %s', + argstr="-demean2 %s", xor=[ - 'avg_files', 'avg_lts_files', 'avg_ref_file', 'demean1_ref_file', - 'demean3_ref_file' + "avg_files", + "avg_lts_files", + "avg_ref_file", + "demean1_ref_file", + "demean3_ref_file", ], - requires=['warp_files'], - desc=desc) + requires=["warp_files"], + desc=desc, + ) - desc = 'Average images and demean average image that have linear and \ -non-rigid transformations to a common space' + desc = "Average images and demean average image that have linear and \ +non-rigid transformations to a common space" demean3_ref_file = File( position=1, - argstr='-demean3 %s', + argstr="-demean3 %s", xor=[ - 'avg_files', 'avg_lts_files', 'avg_ref_file', 'demean1_ref_file', - 'demean2_ref_file' + "avg_files", + "avg_lts_files", + "avg_ref_file", + "demean1_ref_file", + "demean2_ref_file", ], - requires=['warp_files'], - desc=desc) + requires=["warp_files"], + desc=desc, + ) - desc = 'transformation files and floating image pairs/triplets to the \ -reference space' + desc = "transformation files and floating image pairs/triplets to the \ +reference space" warp_files = traits.List( File(exist=True), position=-1, - argstr='%s', - sep=' ', - xor=['avg_files', 'avg_lts_files'], - desc=desc) + argstr="%s", + sep=" ", + xor=["avg_files", "avg_lts_files"], + desc=desc, + ) - out_file = File( - genfile=True, position=0, desc='Output file name', argstr='%s') + out_file = File(genfile=True, position=0, desc="Output file name", argstr="%s") class RegAverageOutputSpec(TraitedSpec): """ Output Spec for RegAverage. """ - out_file = File(desc='Output file name') + + out_file = File(desc="Output file name") class RegAverage(NiftyRegCommand): @@ -455,20 +492,21 @@ class RegAverage(NiftyRegCommand): >>> node.cmdline # doctest: +ELLIPSIS 'reg_average --cmd_file .../reg_average_cmd' """ - _cmd = get_custom_path('reg_average') + + _cmd = get_custom_path("reg_average") input_spec = RegAverageInputSpec output_spec = RegAverageOutputSpec - _suffix = 'avg_out' + _suffix = "avg_out" def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": if isdefined(self.inputs.avg_lts_files): - return self._gen_fname(self._suffix, ext='.txt') + return self._gen_fname(self._suffix, ext=".txt") elif isdefined(self.inputs.avg_files): _, _, _ext = split_filename(self.inputs.avg_files[0]) - if _ext not in ['.nii', '.nii.gz', '.hdr', '.img', '.img.gz']: + if _ext not in [".nii", ".nii.gz", ".hdr", ".img", ".img.gz"]: return self._gen_fname(self._suffix, ext=_ext) - return self._gen_fname(self._suffix, ext='.nii.gz') + return self._gen_fname(self._suffix, ext=".nii.gz") return None @@ -476,9 +514,9 @@ def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_file): - outputs['out_file'] = self.inputs.out_file + outputs["out_file"] = self.inputs.out_file else: - outputs['out_file'] = self._gen_filename('out_file') + outputs["out_file"] = self._gen_filename("out_file") return outputs @@ -486,134 +524,196 @@ def _list_outputs(self): def cmdline(self): """ Rewrite the cmdline to write options in text_file.""" argv = super(RegAverage, self).cmdline - reg_average_cmd = os.path.join(os.getcwd(), 'reg_average_cmd') - with open(reg_average_cmd, 'w') as f: + reg_average_cmd = os.path.join(os.getcwd(), "reg_average_cmd") + with open(reg_average_cmd, "w") as f: f.write(argv) - return '%s --cmd_file %s' % (self.cmd, reg_average_cmd) + return "%s --cmd_file %s" % (self.cmd, reg_average_cmd) class RegTransformInputSpec(NiftyRegCommandInputSpec): """ Input Spec for RegTransform. """ + ref1_file = File( exists=True, - desc='The input reference/target image', - argstr='-ref %s', - position=0) + desc="The input reference/target image", + argstr="-ref %s", + position=0, + ) ref2_file = File( exists=True, - desc='The input second reference/target image', - argstr='-ref2 %s', + desc="The input second reference/target image", + argstr="-ref2 %s", position=1, - requires=['ref1_file']) + requires=["ref1_file"], + ) def_input = File( exists=True, - argstr='-def %s', + argstr="-def %s", position=-2, - desc='Compute deformation field from transformation', + desc="Compute deformation field from transformation", xor=[ - 'disp_input', 'flow_input', 'comp_input', 'upd_s_form_input', - 'inv_aff_input', 'inv_nrr_input', 'half_input', 'make_aff_input', - 'aff_2_rig_input', 'flirt_2_nr_input' - ]) + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", + ], + ) disp_input = File( exists=True, - argstr='-disp %s', + argstr="-disp %s", position=-2, - desc='Compute displacement field from transformation', + desc="Compute displacement field from transformation", xor=[ - 'def_input', 'flow_input', 'comp_input', 'upd_s_form_input', - 'inv_aff_input', 'inv_nrr_input', 'half_input', 'make_aff_input', - 'aff_2_rig_input', 'flirt_2_nr_input' - ]) + "def_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", + ], + ) flow_input = File( exists=True, - argstr='-flow %s', + argstr="-flow %s", position=-2, - desc='Compute flow field from spline SVF', + desc="Compute flow field from spline SVF", xor=[ - 'def_input', 'disp_input', 'comp_input', 'upd_s_form_input', - 'inv_aff_input', 'inv_nrr_input', 'half_input', 'make_aff_input', - 'aff_2_rig_input', 'flirt_2_nr_input' - ]) + "def_input", + "disp_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", + ], + ) comp_input = File( exists=True, - argstr='-comp %s', + argstr="-comp %s", position=-3, - desc='compose two transformations', + desc="compose two transformations", xor=[ - 'def_input', 'disp_input', 'flow_input', 'upd_s_form_input', - 'inv_aff_input', 'inv_nrr_input', 'half_input', 'make_aff_input', - 'aff_2_rig_input', 'flirt_2_nr_input' + "def_input", + "disp_input", + "flow_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], - requires=['comp_input2']) + requires=["comp_input2"], + ) comp_input2 = File( - exists=True, - argstr='%s', - position=-2, - desc='compose two transformations') + exists=True, argstr="%s", position=-2, desc="compose two transformations" + ) - desc = 'Update s-form using the affine transformation' + desc = "Update s-form using the affine transformation" upd_s_form_input = File( exists=True, - argstr='-updSform %s', + argstr="-updSform %s", position=-3, desc=desc, xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'inv_aff_input', 'inv_nrr_input', 'half_input', 'make_aff_input', - 'aff_2_rig_input', 'flirt_2_nr_input' + "def_input", + "disp_input", + "flow_input", + "comp_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], - requires=['upd_s_form_input2']) + requires=["upd_s_form_input2"], + ) - desc = 'Update s-form using the affine transformation' + desc = "Update s-form using the affine transformation" upd_s_form_input2 = File( - exists=True, - argstr='%s', - position=-2, - desc=desc, - requires=['upd_s_form_input']) + exists=True, argstr="%s", position=-2, desc=desc, requires=["upd_s_form_input"] + ) inv_aff_input = File( exists=True, - argstr='-invAff %s', + argstr="-invAff %s", position=-2, - desc='Invert an affine transformation', + desc="Invert an affine transformation", xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_nrr_input', 'half_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' - ]) + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", + ], + ) inv_nrr_input = traits.Tuple( File(exists=True), File(exists=True), - desc='Invert a non-linear transformation', - argstr='-invNrr %s %s', + desc="Invert a non-linear transformation", + argstr="-invNrr %s %s", position=-2, xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_aff_input', 'half_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' - ]) + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", + ], + ) half_input = File( exists=True, - argstr='-half %s', + argstr="-half %s", position=-2, - desc='Half way to the input transformation', + desc="Half way to the input transformation", xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_aff_input', 'inv_nrr_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' - ]) + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", + ], + ) - argstr_tmp = '-makeAff %f %f %f %f %f %f %f %f %f %f %f %f' + argstr_tmp = "-makeAff %f %f %f %f %f %f %f %f %f %f %f %f" make_aff_input = traits.Tuple( traits.Float, traits.Float, @@ -629,51 +729,74 @@ class RegTransformInputSpec(NiftyRegCommandInputSpec): traits.Float, argstr=argstr_tmp, position=-2, - desc='Make an affine transformation matrix', + desc="Make an affine transformation matrix", xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_aff_input', 'inv_nrr_input', 'half_input', - 'aff_2_rig_input', 'flirt_2_nr_input' - ]) + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "aff_2_rig_input", + "flirt_2_nr_input", + ], + ) - desc = 'Extract the rigid component from affine transformation' + desc = "Extract the rigid component from affine transformation" aff_2_rig_input = File( exists=True, - argstr='-aff2rig %s', + argstr="-aff2rig %s", position=-2, desc=desc, xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_aff_input', 'inv_nrr_input', 'half_input', - 'make_aff_input', 'flirt_2_nr_input' - ]) + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "flirt_2_nr_input", + ], + ) - desc = 'Convert a FLIRT affine transformation to niftyreg affine \ -transformation' + desc = "Convert a FLIRT affine transformation to niftyreg affine \ +transformation" flirt_2_nr_input = traits.Tuple( File(exists=True), File(exists=True), File(exists=True), - argstr='-flirtAff2NR %s %s %s', + argstr="-flirtAff2NR %s %s %s", position=-2, desc=desc, xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_aff_input', 'inv_nrr_input', 'half_input', - 'make_aff_input', 'aff_2_rig_input' - ]) + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + ], + ) out_file = File( - genfile=True, - position=-1, - argstr='%s', - desc='transformation file to write') + genfile=True, position=-1, argstr="%s", desc="transformation file to write" + ) class RegTransformOutputSpec(TraitedSpec): """ Output Spec for RegTransform. """ - out_file = File(desc='Output File (transformation in any format)') + + out_file = File(desc="Output File (transformation in any format)") class RegTransform(NiftyRegCommand): @@ -694,62 +817,68 @@ class RegTransform(NiftyRegCommand): 'reg_transform -omp 4 -def warpfield.nii .../warpfield_trans.nii.gz' """ - _cmd = get_custom_path('reg_transform') + + _cmd = get_custom_path("reg_transform") input_spec = RegTransformInputSpec output_spec = RegTransformOutputSpec - _suffix = '_trans' + _suffix = "_trans" def _find_input(self): inputs = [ - self.inputs.def_input, self.inputs.disp_input, - self.inputs.flow_input, self.inputs.comp_input, - self.inputs.comp_input2, self.inputs.upd_s_form_input, - self.inputs.inv_aff_input, self.inputs.inv_nrr_input, - self.inputs.half_input, self.inputs.make_aff_input, - self.inputs.aff_2_rig_input, self.inputs.flirt_2_nr_input + self.inputs.def_input, + self.inputs.disp_input, + self.inputs.flow_input, + self.inputs.comp_input, + self.inputs.comp_input2, + self.inputs.upd_s_form_input, + self.inputs.inv_aff_input, + self.inputs.inv_nrr_input, + self.inputs.half_input, + self.inputs.make_aff_input, + self.inputs.aff_2_rig_input, + self.inputs.flirt_2_nr_input, ] entries = [] for entry in inputs: if isdefined(entry): entries.append(entry) _, _, ext = split_filename(entry) - if ext == '.nii' or ext == '.nii.gz' or ext == '.hdr': + if ext == ".nii" or ext == ".nii.gz" or ext == ".hdr": return entry if len(entries): return entries[0] return None def _gen_filename(self, name): - if name == 'out_file': + if name == "out_file": if isdefined(self.inputs.make_aff_input): - return self._gen_fname( - 'matrix', suffix=self._suffix, ext='.txt') + return self._gen_fname("matrix", suffix=self._suffix, ext=".txt") - if isdefined(self.inputs.comp_input) and \ - isdefined(self.inputs.comp_input2): + if isdefined(self.inputs.comp_input) and isdefined(self.inputs.comp_input2): _, bn1, ext1 = split_filename(self.inputs.comp_input) _, _, ext2 = split_filename(self.inputs.comp_input2) - if ext1 in ['.nii', '.nii.gz', '.hdr', '.img', '.img.gz'] or \ - ext2 in ['.nii', '.nii.gz', '.hdr', '.img', '.img.gz']: - return self._gen_fname( - bn1, suffix=self._suffix, ext='.nii.gz') + if ext1 in [".nii", ".nii.gz", ".hdr", ".img", ".img.gz"] or ext2 in [ + ".nii", + ".nii.gz", + ".hdr", + ".img", + ".img.gz", + ]: + return self._gen_fname(bn1, suffix=self._suffix, ext=".nii.gz") else: return self._gen_fname(bn1, suffix=self._suffix, ext=ext1) if isdefined(self.inputs.flirt_2_nr_input): return self._gen_fname( - self.inputs.flirt_2_nr_input[0], - suffix=self._suffix, - ext='.txt') + self.inputs.flirt_2_nr_input[0], suffix=self._suffix, ext=".txt" + ) input_to_use = self._find_input() _, _, ext = split_filename(input_to_use) - if ext not in ['.nii', '.nii.gz', '.hdr', '.img', '.img.gz']: - return self._gen_fname( - input_to_use, suffix=self._suffix, ext=ext) + if ext not in [".nii", ".nii.gz", ".hdr", ".img", ".img.gz"]: + return self._gen_fname(input_to_use, suffix=self._suffix, ext=ext) else: - return self._gen_fname( - input_to_use, suffix=self._suffix, ext='.nii.gz') + return self._gen_fname(input_to_use, suffix=self._suffix, ext=".nii.gz") return None @@ -757,45 +886,51 @@ def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_file): - outputs['out_file'] = self.inputs.out_file + outputs["out_file"] = self.inputs.out_file else: - outputs['out_file'] = self._gen_filename('out_file') + outputs["out_file"] = self._gen_filename("out_file") return outputs class RegMeasureInputSpec(NiftyRegCommandInputSpec): """ Input Spec for RegMeasure. """ + # Input reference file ref_file = File( exists=True, - desc='The input reference/target image', - argstr='-ref %s', - mandatory=True) + desc="The input reference/target image", + argstr="-ref %s", + mandatory=True, + ) # Input floating file flo_file = File( exists=True, - desc='The input floating/source image', - argstr='-flo %s', - mandatory=True) + desc="The input floating/source image", + argstr="-flo %s", + mandatory=True, + ) measure_type = traits.Enum( - 'ncc', - 'lncc', - 'nmi', - 'ssd', + "ncc", + "lncc", + "nmi", + "ssd", mandatory=True, - argstr='-%s', - desc='Measure of similarity to compute') + argstr="-%s", + desc="Measure of similarity to compute", + ) out_file = File( - name_source=['flo_file'], - name_template='%s', - argstr='-out %s', - desc='The output text file containing the measure') + name_source=["flo_file"], + name_template="%s", + argstr="-out %s", + desc="The output text file containing the measure", + ) class RegMeasureOutputSpec(TraitedSpec): """ Output Spec for RegMeasure. """ - out_file = File(desc='The output text file containing the measure') + + out_file = File(desc="The output text file containing the measure") class RegMeasure(NiftyRegCommand): @@ -817,11 +952,12 @@ class RegMeasure(NiftyRegCommand): 'reg_measure -flo im2.nii -lncc -omp 4 -out im2_lncc.txt -ref im1.nii' """ - _cmd = get_custom_path('reg_measure') + + _cmd = get_custom_path("reg_measure") input_spec = RegMeasureInputSpec output_spec = RegMeasureOutputSpec def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) suffix = self.inputs.measure_type - return os.path.join(path, '{0}_{1}.txt'.format(base, suffix)) + return os.path.join(path, "{0}_{1}.txt".format(base, suffix)) diff --git a/nipype/interfaces/niftyreg/tests/test_auto_NiftyRegCommand.py b/nipype/interfaces/niftyreg/tests/test_auto_NiftyRegCommand.py index 016ca5654b..75e103edbe 100644 --- a/nipype/interfaces/niftyreg/tests/test_auto_NiftyRegCommand.py +++ b/nipype/interfaces/niftyreg/tests/test_auto_NiftyRegCommand.py @@ -4,15 +4,9 @@ def test_NiftyRegCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - omp_core_val=dict( - argstr='-omp %i', - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + omp_core_val=dict(argstr="-omp %i", usedefault=True,), ) inputs = NiftyRegCommand.input_spec() diff --git a/nipype/interfaces/niftyreg/tests/test_auto_RegAladin.py b/nipype/interfaces/niftyreg/tests/test_auto_RegAladin.py index ed11753b5d..16ca83bdba 100644 --- a/nipype/interfaces/niftyreg/tests/test_auto_RegAladin.py +++ b/nipype/interfaces/niftyreg/tests/test_auto_RegAladin.py @@ -4,79 +4,58 @@ def test_RegAladin_inputs(): input_map = dict( - aff_direct_flag=dict(argstr='-affDirect', ), + aff_direct_flag=dict(argstr="-affDirect",), aff_file=dict( - argstr='-aff %s', + argstr="-aff %s", extensions=None, - name_source=['flo_file'], - name_template='%s_aff.txt', + name_source=["flo_file"], + name_template="%s_aff.txt", ), - args=dict(argstr='%s', ), - cog_flag=dict(argstr='-cog', ), - environ=dict( - nohash=True, - usedefault=True, - ), - flo_file=dict( - argstr='-flo %s', - extensions=None, - mandatory=True, - ), - flo_low_val=dict(argstr='-floLowThr %f', ), - flo_up_val=dict(argstr='-floUpThr %f', ), - fmask_file=dict( - argstr='-fmask %s', - extensions=None, - ), - gpuid_val=dict(argstr='-gpuid %i', ), - i_val=dict(argstr='-pi %d', ), - in_aff_file=dict( - argstr='-inaff %s', - extensions=None, - ), - ln_val=dict(argstr='-ln %d', ), - lp_val=dict(argstr='-lp %d', ), - maxit_val=dict(argstr='-maxit %d', ), - nac_flag=dict(argstr='-nac', ), - nosym_flag=dict(argstr='-noSym', ), - omp_core_val=dict( - argstr='-omp %i', - usedefault=True, - ), - platform_val=dict(argstr='-platf %i', ), - ref_file=dict( - argstr='-ref %s', - extensions=None, - mandatory=True, - ), - ref_low_val=dict(argstr='-refLowThr %f', ), - ref_up_val=dict(argstr='-refUpThr %f', ), + args=dict(argstr="%s",), + cog_flag=dict(argstr="-cog",), + environ=dict(nohash=True, usedefault=True,), + flo_file=dict(argstr="-flo %s", extensions=None, mandatory=True,), + flo_low_val=dict(argstr="-floLowThr %f",), + flo_up_val=dict(argstr="-floUpThr %f",), + fmask_file=dict(argstr="-fmask %s", extensions=None,), + gpuid_val=dict(argstr="-gpuid %i",), + i_val=dict(argstr="-pi %d",), + in_aff_file=dict(argstr="-inaff %s", extensions=None,), + ln_val=dict(argstr="-ln %d",), + lp_val=dict(argstr="-lp %d",), + maxit_val=dict(argstr="-maxit %d",), + nac_flag=dict(argstr="-nac",), + nosym_flag=dict(argstr="-noSym",), + omp_core_val=dict(argstr="-omp %i", usedefault=True,), + platform_val=dict(argstr="-platf %i",), + ref_file=dict(argstr="-ref %s", extensions=None, mandatory=True,), + ref_low_val=dict(argstr="-refLowThr %f",), + ref_up_val=dict(argstr="-refUpThr %f",), res_file=dict( - argstr='-res %s', + argstr="-res %s", extensions=None, - name_source=['flo_file'], - name_template='%s_res.nii.gz', + name_source=["flo_file"], + name_template="%s_res.nii.gz", ), - rig_only_flag=dict(argstr='-rigOnly', ), - rmask_file=dict( - argstr='-rmask %s', - extensions=None, - ), - smoo_f_val=dict(argstr='-smooF %f', ), - smoo_r_val=dict(argstr='-smooR %f', ), - v_val=dict(argstr='-pv %d', ), - verbosity_off_flag=dict(argstr='-voff', ), + rig_only_flag=dict(argstr="-rigOnly",), + rmask_file=dict(argstr="-rmask %s", extensions=None,), + smoo_f_val=dict(argstr="-smooF %f",), + smoo_r_val=dict(argstr="-smooR %f",), + v_val=dict(argstr="-pv %d",), + verbosity_off_flag=dict(argstr="-voff",), ) inputs = RegAladin.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RegAladin_outputs(): output_map = dict( - aff_file=dict(extensions=None, ), + aff_file=dict(extensions=None,), avg_output=dict(), - res_file=dict(extensions=None, ), + res_file=dict(extensions=None,), ) outputs = RegAladin.output_spec() diff --git a/nipype/interfaces/niftyreg/tests/test_auto_RegAverage.py b/nipype/interfaces/niftyreg/tests/test_auto_RegAverage.py index 8eb40cd38e..3ee172453f 100644 --- a/nipype/interfaces/niftyreg/tests/test_auto_RegAverage.py +++ b/nipype/interfaces/niftyreg/tests/test_auto_RegAverage.py @@ -4,84 +4,90 @@ def test_RegAverage_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), avg_files=dict( - argstr='-avg %s', + argstr="-avg %s", position=1, - sep=' ', + sep=" ", xor=[ - 'avg_lts_files', 'avg_ref_file', 'demean1_ref_file', - 'demean2_ref_file', 'demean3_ref_file', 'warp_files' + "avg_lts_files", + "avg_ref_file", + "demean1_ref_file", + "demean2_ref_file", + "demean3_ref_file", + "warp_files", ], ), avg_lts_files=dict( - argstr='-avg_lts %s', + argstr="-avg_lts %s", position=1, - sep=' ', + sep=" ", xor=[ - 'avg_files', 'avg_ref_file', 'demean1_ref_file', - 'demean2_ref_file', 'demean3_ref_file', 'warp_files' + "avg_files", + "avg_ref_file", + "demean1_ref_file", + "demean2_ref_file", + "demean3_ref_file", + "warp_files", ], ), avg_ref_file=dict( - argstr='-avg_tran %s', + argstr="-avg_tran %s", extensions=None, position=1, - requires=['warp_files'], + requires=["warp_files"], xor=[ - 'avg_files', 'avg_lts_files', 'demean1_ref_file', - 'demean2_ref_file', 'demean3_ref_file' + "avg_files", + "avg_lts_files", + "demean1_ref_file", + "demean2_ref_file", + "demean3_ref_file", ], ), demean1_ref_file=dict( - argstr='-demean1 %s', + argstr="-demean1 %s", extensions=None, position=1, - requires=['warp_files'], + requires=["warp_files"], xor=[ - 'avg_files', 'avg_lts_files', 'avg_ref_file', - 'demean2_ref_file', 'demean3_ref_file' + "avg_files", + "avg_lts_files", + "avg_ref_file", + "demean2_ref_file", + "demean3_ref_file", ], ), demean2_ref_file=dict( - argstr='-demean2 %s', + argstr="-demean2 %s", extensions=None, position=1, - requires=['warp_files'], + requires=["warp_files"], xor=[ - 'avg_files', 'avg_lts_files', 'avg_ref_file', - 'demean1_ref_file', 'demean3_ref_file' + "avg_files", + "avg_lts_files", + "avg_ref_file", + "demean1_ref_file", + "demean3_ref_file", ], ), demean3_ref_file=dict( - argstr='-demean3 %s', + argstr="-demean3 %s", extensions=None, position=1, - requires=['warp_files'], + requires=["warp_files"], xor=[ - 'avg_files', 'avg_lts_files', 'avg_ref_file', - 'demean1_ref_file', 'demean2_ref_file' + "avg_files", + "avg_lts_files", + "avg_ref_file", + "demean1_ref_file", + "demean2_ref_file", ], ), - environ=dict( - nohash=True, - usedefault=True, - ), - omp_core_val=dict( - argstr='-omp %i', - usedefault=True, - ), - out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - position=0, - ), + environ=dict(nohash=True, usedefault=True,), + omp_core_val=dict(argstr="-omp %i", usedefault=True,), + out_file=dict(argstr="%s", extensions=None, genfile=True, position=0,), warp_files=dict( - argstr='%s', - position=-1, - sep=' ', - xor=['avg_files', 'avg_lts_files'], + argstr="%s", position=-1, sep=" ", xor=["avg_files", "avg_lts_files"], ), ) inputs = RegAverage.input_spec() @@ -89,8 +95,10 @@ def test_RegAverage_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RegAverage_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = RegAverage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyreg/tests/test_auto_RegF3D.py b/nipype/interfaces/niftyreg/tests/test_auto_RegF3D.py index 20fdee1f08..a70318cd43 100644 --- a/nipype/interfaces/niftyreg/tests/test_auto_RegF3D.py +++ b/nipype/interfaces/niftyreg/tests/test_auto_RegF3D.py @@ -4,108 +4,84 @@ def test_RegF3D_inputs(): input_map = dict( - aff_file=dict( - argstr='-aff %s', - extensions=None, - ), - amc_flag=dict(argstr='-amc', ), - args=dict(argstr='%s', ), - be_val=dict(argstr='-be %f', ), + aff_file=dict(argstr="-aff %s", extensions=None,), + amc_flag=dict(argstr="-amc",), + args=dict(argstr="%s",), + be_val=dict(argstr="-be %f",), cpp_file=dict( - argstr='-cpp %s', - extensions=None, - name_source=['flo_file'], - name_template='%s_cpp.nii.gz', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fbn2_val=dict(argstr='-fbn %d %d', ), - fbn_val=dict(argstr='--fbn %d', ), - flo_file=dict( - argstr='-flo %s', - extensions=None, - mandatory=True, - ), - flo_smooth_val=dict(argstr='-smooF %f', ), - flwth2_thr_val=dict(argstr='-fLwTh %d %f', ), - flwth_thr_val=dict(argstr='--fLwTh %f', ), - fmask_file=dict( - argstr='-fmask %s', - extensions=None, - ), - fupth2_thr_val=dict(argstr='-fUpTh %d %f', ), - fupth_thr_val=dict(argstr='--fUpTh %f', ), - incpp_file=dict( - argstr='-incpp %s', - extensions=None, - ), - jl_val=dict(argstr='-jl %f', ), - kld2_flag=dict(argstr='-kld %d', ), - kld_flag=dict(argstr='--kld', ), - le_val=dict(argstr='-le %f', ), - ln_val=dict(argstr='-ln %d', ), - lncc2_val=dict(argstr='-lncc %d %f', ), - lncc_val=dict(argstr='--lncc %f', ), - lp_val=dict(argstr='-lp %d', ), - maxit_val=dict(argstr='-maxit %d', ), - nmi_flag=dict(argstr='--nmi', ), - no_app_jl_flag=dict(argstr='-noAppJL', ), - noconj_flag=dict(argstr='-noConj', ), - nopy_flag=dict(argstr='-nopy', ), - nox_flag=dict(argstr='-nox', ), - noy_flag=dict(argstr='-noy', ), - noz_flag=dict(argstr='-noz', ), - omp_core_val=dict( - argstr='-omp %i', - usedefault=True, - ), - pad_val=dict(argstr='-pad %f', ), - pert_val=dict(argstr='-pert %d', ), - rbn2_val=dict(argstr='-rbn %d %d', ), - rbn_val=dict(argstr='--rbn %d', ), - ref_file=dict( - argstr='-ref %s', + argstr="-cpp %s", extensions=None, - mandatory=True, + name_source=["flo_file"], + name_template="%s_cpp.nii.gz", ), - ref_smooth_val=dict(argstr='-smooR %f', ), + environ=dict(nohash=True, usedefault=True,), + fbn2_val=dict(argstr="-fbn %d %d",), + fbn_val=dict(argstr="--fbn %d",), + flo_file=dict(argstr="-flo %s", extensions=None, mandatory=True,), + flo_smooth_val=dict(argstr="-smooF %f",), + flwth2_thr_val=dict(argstr="-fLwTh %d %f",), + flwth_thr_val=dict(argstr="--fLwTh %f",), + fmask_file=dict(argstr="-fmask %s", extensions=None,), + fupth2_thr_val=dict(argstr="-fUpTh %d %f",), + fupth_thr_val=dict(argstr="--fUpTh %f",), + incpp_file=dict(argstr="-incpp %s", extensions=None,), + jl_val=dict(argstr="-jl %f",), + kld2_flag=dict(argstr="-kld %d",), + kld_flag=dict(argstr="--kld",), + le_val=dict(argstr="-le %f",), + ln_val=dict(argstr="-ln %d",), + lncc2_val=dict(argstr="-lncc %d %f",), + lncc_val=dict(argstr="--lncc %f",), + lp_val=dict(argstr="-lp %d",), + maxit_val=dict(argstr="-maxit %d",), + nmi_flag=dict(argstr="--nmi",), + no_app_jl_flag=dict(argstr="-noAppJL",), + noconj_flag=dict(argstr="-noConj",), + nopy_flag=dict(argstr="-nopy",), + nox_flag=dict(argstr="-nox",), + noy_flag=dict(argstr="-noy",), + noz_flag=dict(argstr="-noz",), + omp_core_val=dict(argstr="-omp %i", usedefault=True,), + pad_val=dict(argstr="-pad %f",), + pert_val=dict(argstr="-pert %d",), + rbn2_val=dict(argstr="-rbn %d %d",), + rbn_val=dict(argstr="--rbn %d",), + ref_file=dict(argstr="-ref %s", extensions=None, mandatory=True,), + ref_smooth_val=dict(argstr="-smooR %f",), res_file=dict( - argstr='-res %s', + argstr="-res %s", extensions=None, - name_source=['flo_file'], - name_template='%s_res.nii.gz', + name_source=["flo_file"], + name_template="%s_res.nii.gz", ), - rlwth2_thr_val=dict(argstr='-rLwTh %d %f', ), - rlwth_thr_val=dict(argstr='--rLwTh %f', ), - rmask_file=dict( - argstr='-rmask %s', - extensions=None, - ), - rupth2_thr_val=dict(argstr='-rUpTh %d %f', ), - rupth_thr_val=dict(argstr='--rUpTh %f', ), - smooth_grad_val=dict(argstr='-smoothGrad %f', ), - ssd2_flag=dict(argstr='-ssd %d', ), - ssd_flag=dict(argstr='--ssd', ), - sx_val=dict(argstr='-sx %f', ), - sy_val=dict(argstr='-sy %f', ), - sz_val=dict(argstr='-sz %f', ), - vel_flag=dict(argstr='-vel', ), - verbosity_off_flag=dict(argstr='-voff', ), + rlwth2_thr_val=dict(argstr="-rLwTh %d %f",), + rlwth_thr_val=dict(argstr="--rLwTh %f",), + rmask_file=dict(argstr="-rmask %s", extensions=None,), + rupth2_thr_val=dict(argstr="-rUpTh %d %f",), + rupth_thr_val=dict(argstr="--rUpTh %f",), + smooth_grad_val=dict(argstr="-smoothGrad %f",), + ssd2_flag=dict(argstr="-ssd %d",), + ssd_flag=dict(argstr="--ssd",), + sx_val=dict(argstr="-sx %f",), + sy_val=dict(argstr="-sy %f",), + sz_val=dict(argstr="-sz %f",), + vel_flag=dict(argstr="-vel",), + verbosity_off_flag=dict(argstr="-voff",), ) inputs = RegF3D.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RegF3D_outputs(): output_map = dict( avg_output=dict(), - cpp_file=dict(extensions=None, ), - invcpp_file=dict(extensions=None, ), - invres_file=dict(extensions=None, ), - res_file=dict(extensions=None, ), + cpp_file=dict(extensions=None,), + invcpp_file=dict(extensions=None,), + invres_file=dict(extensions=None,), + res_file=dict(extensions=None,), ) outputs = RegF3D.output_spec() diff --git a/nipype/interfaces/niftyreg/tests/test_auto_RegJacobian.py b/nipype/interfaces/niftyreg/tests/test_auto_RegJacobian.py index 9c08f4ef42..5a0291e1af 100644 --- a/nipype/interfaces/niftyreg/tests/test_auto_RegJacobian.py +++ b/nipype/interfaces/niftyreg/tests/test_auto_RegJacobian.py @@ -4,44 +4,29 @@ def test_RegJacobian_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - omp_core_val=dict( - argstr='-omp %i', - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + omp_core_val=dict(argstr="-omp %i", usedefault=True,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, - name_source=['trans_file'], - name_template='%s', + name_source=["trans_file"], + name_template="%s", position=-1, ), - ref_file=dict( - argstr='-ref %s', - extensions=None, - ), - trans_file=dict( - argstr='-trans %s', - extensions=None, - mandatory=True, - ), - type=dict( - argstr='-%s', - position=-2, - usedefault=True, - ), + ref_file=dict(argstr="-ref %s", extensions=None,), + trans_file=dict(argstr="-trans %s", extensions=None, mandatory=True,), + type=dict(argstr="-%s", position=-2, usedefault=True,), ) inputs = RegJacobian.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RegJacobian_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = RegJacobian.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyreg/tests/test_auto_RegMeasure.py b/nipype/interfaces/niftyreg/tests/test_auto_RegMeasure.py index 93224b8a90..8ae16aa9c8 100644 --- a/nipype/interfaces/niftyreg/tests/test_auto_RegMeasure.py +++ b/nipype/interfaces/niftyreg/tests/test_auto_RegMeasure.py @@ -4,43 +4,28 @@ def test_RegMeasure_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - flo_file=dict( - argstr='-flo %s', - extensions=None, - mandatory=True, - ), - measure_type=dict( - argstr='-%s', - mandatory=True, - ), - omp_core_val=dict( - argstr='-omp %i', - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + flo_file=dict(argstr="-flo %s", extensions=None, mandatory=True,), + measure_type=dict(argstr="-%s", mandatory=True,), + omp_core_val=dict(argstr="-omp %i", usedefault=True,), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, - name_source=['flo_file'], - name_template='%s', - ), - ref_file=dict( - argstr='-ref %s', - extensions=None, - mandatory=True, + name_source=["flo_file"], + name_template="%s", ), + ref_file=dict(argstr="-ref %s", extensions=None, mandatory=True,), ) inputs = RegMeasure.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RegMeasure_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = RegMeasure.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyreg/tests/test_auto_RegResample.py b/nipype/interfaces/niftyreg/tests/test_auto_RegResample.py index 5a1852a97d..2836efb4f8 100644 --- a/nipype/interfaces/niftyreg/tests/test_auto_RegResample.py +++ b/nipype/interfaces/niftyreg/tests/test_auto_RegResample.py @@ -4,55 +4,36 @@ def test_RegResample_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - flo_file=dict( - argstr='-flo %s', - extensions=None, - mandatory=True, - ), - inter_val=dict(argstr='-inter %d', ), - omp_core_val=dict( - argstr='-omp %i', - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + flo_file=dict(argstr="-flo %s", extensions=None, mandatory=True,), + inter_val=dict(argstr="-inter %d",), + omp_core_val=dict(argstr="-omp %i", usedefault=True,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, - name_source=['flo_file'], - name_template='%s', + name_source=["flo_file"], + name_template="%s", position=-1, ), - pad_val=dict(argstr='-pad %f', ), - psf_alg=dict(argstr='-psf_alg %d', ), - psf_flag=dict(argstr='-psf', ), - ref_file=dict( - argstr='-ref %s', - extensions=None, - mandatory=True, - ), - tensor_flag=dict(argstr='-tensor ', ), - trans_file=dict( - argstr='-trans %s', - extensions=None, - ), - type=dict( - argstr='-%s', - position=-2, - usedefault=True, - ), - verbosity_off_flag=dict(argstr='-voff', ), + pad_val=dict(argstr="-pad %f",), + psf_alg=dict(argstr="-psf_alg %d",), + psf_flag=dict(argstr="-psf",), + ref_file=dict(argstr="-ref %s", extensions=None, mandatory=True,), + tensor_flag=dict(argstr="-tensor ",), + trans_file=dict(argstr="-trans %s", extensions=None,), + type=dict(argstr="-%s", position=-2, usedefault=True,), + verbosity_off_flag=dict(argstr="-voff",), ) inputs = RegResample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RegResample_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = RegResample.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyreg/tests/test_auto_RegTools.py b/nipype/interfaces/niftyreg/tests/test_auto_RegTools.py index 9b81307511..0b0513ef4d 100644 --- a/nipype/interfaces/niftyreg/tests/test_auto_RegTools.py +++ b/nipype/interfaces/niftyreg/tests/test_auto_RegTools.py @@ -4,55 +4,41 @@ def test_RegTools_inputs(): input_map = dict( - add_val=dict(argstr='-add %s', ), - args=dict(argstr='%s', ), - bin_flag=dict(argstr='-bin', ), - chg_res_val=dict(argstr='-chgres %f %f %f', ), - div_val=dict(argstr='-div %s', ), - down_flag=dict(argstr='-down', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - ), - inter_val=dict(argstr='-interp %d', ), - iso_flag=dict(argstr='-iso', ), - mask_file=dict( - argstr='-nan %s', - extensions=None, - ), - mul_val=dict(argstr='-mul %s', ), - noscl_flag=dict(argstr='-noscl', ), - omp_core_val=dict( - argstr='-omp %i', - usedefault=True, - ), + add_val=dict(argstr="-add %s",), + args=dict(argstr="%s",), + bin_flag=dict(argstr="-bin",), + chg_res_val=dict(argstr="-chgres %f %f %f",), + div_val=dict(argstr="-div %s",), + down_flag=dict(argstr="-down",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True,), + inter_val=dict(argstr="-interp %d",), + iso_flag=dict(argstr="-iso",), + mask_file=dict(argstr="-nan %s", extensions=None,), + mul_val=dict(argstr="-mul %s",), + noscl_flag=dict(argstr="-noscl",), + omp_core_val=dict(argstr="-omp %i", usedefault=True,), out_file=dict( - argstr='-out %s', - extensions=None, - name_source=['in_file'], - name_template='%s_tools.nii.gz', - ), - rms_val=dict( - argstr='-rms %s', + argstr="-out %s", extensions=None, + name_source=["in_file"], + name_template="%s_tools.nii.gz", ), - smo_g_val=dict(argstr='-smoG %f %f %f', ), - smo_s_val=dict(argstr='-smoS %f %f %f', ), - sub_val=dict(argstr='-sub %s', ), - thr_val=dict(argstr='-thr %f', ), + rms_val=dict(argstr="-rms %s", extensions=None,), + smo_g_val=dict(argstr="-smoG %f %f %f",), + smo_s_val=dict(argstr="-smoS %f %f %f",), + sub_val=dict(argstr="-sub %s",), + thr_val=dict(argstr="-thr %f",), ) inputs = RegTools.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RegTools_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = RegTools.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyreg/tests/test_auto_RegTransform.py b/nipype/interfaces/niftyreg/tests/test_auto_RegTransform.py index 07df2bb65b..7a16c6e452 100644 --- a/nipype/interfaces/niftyreg/tests/test_auto_RegTransform.py +++ b/nipype/interfaces/niftyreg/tests/test_auto_RegTransform.py @@ -5,150 +5,202 @@ def test_RegTransform_inputs(): input_map = dict( aff_2_rig_input=dict( - argstr='-aff2rig %s', + argstr="-aff2rig %s", extensions=None, position=-2, xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_aff_input', 'inv_nrr_input', - 'half_input', 'make_aff_input', 'flirt_2_nr_input' + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "flirt_2_nr_input", ], ), - args=dict(argstr='%s', ), + args=dict(argstr="%s",), comp_input=dict( - argstr='-comp %s', + argstr="-comp %s", extensions=None, position=-3, - requires=['comp_input2'], + requires=["comp_input2"], xor=[ - 'def_input', 'disp_input', 'flow_input', 'upd_s_form_input', - 'inv_aff_input', 'inv_nrr_input', 'half_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' + "def_input", + "disp_input", + "flow_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], ), - comp_input2=dict( - argstr='%s', - extensions=None, - position=-2, - ), + comp_input2=dict(argstr="%s", extensions=None, position=-2,), def_input=dict( - argstr='-def %s', + argstr="-def %s", extensions=None, position=-2, xor=[ - 'disp_input', 'flow_input', 'comp_input', 'upd_s_form_input', - 'inv_aff_input', 'inv_nrr_input', 'half_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], ), disp_input=dict( - argstr='-disp %s', + argstr="-disp %s", extensions=None, position=-2, xor=[ - 'def_input', 'flow_input', 'comp_input', 'upd_s_form_input', - 'inv_aff_input', 'inv_nrr_input', 'half_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' + "def_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], ), - environ=dict( - nohash=True, - usedefault=True, - ), + environ=dict(nohash=True, usedefault=True,), flirt_2_nr_input=dict( - argstr='-flirtAff2NR %s %s %s', + argstr="-flirtAff2NR %s %s %s", position=-2, xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_aff_input', 'inv_nrr_input', - 'half_input', 'make_aff_input', 'aff_2_rig_input' + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", ], ), flow_input=dict( - argstr='-flow %s', + argstr="-flow %s", extensions=None, position=-2, xor=[ - 'def_input', 'disp_input', 'comp_input', 'upd_s_form_input', - 'inv_aff_input', 'inv_nrr_input', 'half_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' + "def_input", + "disp_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], ), half_input=dict( - argstr='-half %s', + argstr="-half %s", extensions=None, position=-2, xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_aff_input', 'inv_nrr_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], ), inv_aff_input=dict( - argstr='-invAff %s', + argstr="-invAff %s", extensions=None, position=-2, xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_nrr_input', 'half_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], ), inv_nrr_input=dict( - argstr='-invNrr %s %s', + argstr="-invNrr %s %s", position=-2, xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_aff_input', 'half_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], ), make_aff_input=dict( - argstr='-makeAff %f %f %f %f %f %f %f %f %f %f %f %f', + argstr="-makeAff %f %f %f %f %f %f %f %f %f %f %f %f", position=-2, xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'upd_s_form_input', 'inv_aff_input', 'inv_nrr_input', - 'half_input', 'aff_2_rig_input', 'flirt_2_nr_input' + "def_input", + "disp_input", + "flow_input", + "comp_input", + "upd_s_form_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], ), - omp_core_val=dict( - argstr='-omp %i', - usedefault=True, - ), - out_file=dict( - argstr='%s', - extensions=None, - genfile=True, - position=-1, - ), - ref1_file=dict( - argstr='-ref %s', - extensions=None, - position=0, - ), + omp_core_val=dict(argstr="-omp %i", usedefault=True,), + out_file=dict(argstr="%s", extensions=None, genfile=True, position=-1,), + ref1_file=dict(argstr="-ref %s", extensions=None, position=0,), ref2_file=dict( - argstr='-ref2 %s', - extensions=None, - position=1, - requires=['ref1_file'], + argstr="-ref2 %s", extensions=None, position=1, requires=["ref1_file"], ), upd_s_form_input=dict( - argstr='-updSform %s', + argstr="-updSform %s", extensions=None, position=-3, - requires=['upd_s_form_input2'], + requires=["upd_s_form_input2"], xor=[ - 'def_input', 'disp_input', 'flow_input', 'comp_input', - 'inv_aff_input', 'inv_nrr_input', 'half_input', - 'make_aff_input', 'aff_2_rig_input', 'flirt_2_nr_input' + "def_input", + "disp_input", + "flow_input", + "comp_input", + "inv_aff_input", + "inv_nrr_input", + "half_input", + "make_aff_input", + "aff_2_rig_input", + "flirt_2_nr_input", ], ), upd_s_form_input2=dict( - argstr='%s', - extensions=None, - position=-2, - requires=['upd_s_form_input'], + argstr="%s", extensions=None, position=-2, requires=["upd_s_form_input"], ), ) inputs = RegTransform.input_spec() @@ -156,8 +208,10 @@ def test_RegTransform_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RegTransform_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = RegTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyreg/tests/test_reg.py b/nipype/interfaces/niftyreg/tests/test_reg.py index 862760139e..77b56e21da 100644 --- a/nipype/interfaces/niftyreg/tests/test_reg.py +++ b/nipype/interfaces/niftyreg/tests/test_reg.py @@ -5,43 +5,44 @@ import pytest from ....testing import example_data -from .. import (get_custom_path, RegAladin, RegF3D) +from .. import get_custom_path, RegAladin, RegF3D from .test_regutils import no_nifty_tool @pytest.mark.skipif( - no_nifty_tool(cmd='reg_aladin'), - reason="niftyreg is not installed. reg_aladin not found.") + no_nifty_tool(cmd="reg_aladin"), + reason="niftyreg is not installed. reg_aladin not found.", +) def test_reg_aladin(): """ tests for reg_aladin interface""" # Create a reg_aladin object nr_aladin = RegAladin() # Check if the command is properly defined - assert nr_aladin.cmd == get_custom_path('reg_aladin') + assert nr_aladin.cmd == get_custom_path("reg_aladin") # test raising error with mandatory args absent with pytest.raises(ValueError): nr_aladin.run() # Assign some input data - ref_file = example_data('im1.nii') - flo_file = example_data('im2.nii') - rmask_file = example_data('mask.nii') + ref_file = example_data("im1.nii") + flo_file = example_data("im2.nii") + rmask_file = example_data("mask.nii") nr_aladin.inputs.ref_file = ref_file nr_aladin.inputs.flo_file = flo_file nr_aladin.inputs.rmask_file = rmask_file nr_aladin.inputs.omp_core_val = 4 - cmd_tmp = '{cmd} -aff {aff} -flo {flo} -omp 4 -ref {ref} -res {res} \ --rmask {rmask}' + cmd_tmp = "{cmd} -aff {aff} -flo {flo} -omp 4 -ref {ref} -res {res} \ +-rmask {rmask}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_aladin'), - aff='im2_aff.txt', + cmd=get_custom_path("reg_aladin"), + aff="im2_aff.txt", flo=flo_file, ref=ref_file, - res='im2_res.nii.gz', + res="im2_res.nii.gz", rmask=rmask_file, ) @@ -49,24 +50,24 @@ def test_reg_aladin(): @pytest.mark.skipif( - no_nifty_tool(cmd='reg_f3d'), - reason="niftyreg is not installed. reg_f3d not found.") + no_nifty_tool(cmd="reg_f3d"), reason="niftyreg is not installed. reg_f3d not found." +) def test_reg_f3d(): """ tests for reg_f3d interface""" # Create a reg_f3d object nr_f3d = RegF3D() # Check if the command is properly defined - assert nr_f3d.cmd == get_custom_path('reg_f3d') + assert nr_f3d.cmd == get_custom_path("reg_f3d") # test raising error with mandatory args absent with pytest.raises(ValueError): nr_f3d.run() # Assign some input data - ref_file = example_data('im1.nii') - flo_file = example_data('im2.nii') - rmask_file = example_data('mask.nii') + ref_file = example_data("im1.nii") + flo_file = example_data("im2.nii") + rmask_file = example_data("mask.nii") nr_f3d.inputs.ref_file = ref_file nr_f3d.inputs.flo_file = flo_file nr_f3d.inputs.rmask_file = rmask_file @@ -75,15 +76,15 @@ def test_reg_f3d(): nr_f3d.inputs.be_val = 0.1 nr_f3d.inputs.le_val = 0.1 - cmd_tmp = '{cmd} -be 0.100000 -cpp {cpp} -flo {flo} -le 0.100000 -omp 4 \ --ref {ref} -res {res} -rmask {rmask} -vel' + cmd_tmp = "{cmd} -be 0.100000 -cpp {cpp} -flo {flo} -le 0.100000 -omp 4 \ +-ref {ref} -res {res} -rmask {rmask} -vel" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_f3d'), - cpp='im2_cpp.nii.gz', + cmd=get_custom_path("reg_f3d"), + cpp="im2_cpp.nii.gz", flo=flo_file, ref=ref_file, - res='im2_res.nii.gz', + res="im2_res.nii.gz", rmask=rmask_file, ) diff --git a/nipype/interfaces/niftyreg/tests/test_regutils.py b/nipype/interfaces/niftyreg/tests/test_regutils.py index 918d556ab2..26431ddc44 100644 --- a/nipype/interfaces/niftyreg/tests/test_regutils.py +++ b/nipype/interfaces/niftyreg/tests/test_regutils.py @@ -6,8 +6,15 @@ from ....utils.filemanip import which from ....testing import example_data -from .. import (get_custom_path, RegAverage, RegResample, RegJacobian, - RegTools, RegMeasure, RegTransform) +from .. import ( + get_custom_path, + RegAverage, + RegResample, + RegJacobian, + RegTools, + RegMeasure, + RegTransform, +) def no_nifty_tool(cmd=None): @@ -15,455 +22,509 @@ def no_nifty_tool(cmd=None): @pytest.mark.skipif( - no_nifty_tool(cmd='reg_resample'), - reason="niftyreg is not installed. reg_resample not found.") + no_nifty_tool(cmd="reg_resample"), + reason="niftyreg is not installed. reg_resample not found.", +) def test_reg_resample_res(): """ tests for reg_resample interface """ # Create a reg_resample object nr_resample = RegResample() # Check if the command is properly defined - assert nr_resample.cmd == get_custom_path('reg_resample') + assert nr_resample.cmd == get_custom_path("reg_resample") # test raising error with mandatory args absent with pytest.raises(ValueError): nr_resample.run() # Resample res - ref_file = example_data('im1.nii') - flo_file = example_data('im2.nii') - trans_file = example_data('warpfield.nii') + ref_file = example_data("im1.nii") + flo_file = example_data("im2.nii") + trans_file = example_data("warpfield.nii") nr_resample.inputs.ref_file = ref_file nr_resample.inputs.flo_file = flo_file nr_resample.inputs.trans_file = trans_file - nr_resample.inputs.inter_val = 'LIN' + nr_resample.inputs.inter_val = "LIN" nr_resample.inputs.omp_core_val = 4 - cmd_tmp = '{cmd} -flo {flo} -inter 1 -omp 4 -ref {ref} -trans {trans} \ --res {res}' + cmd_tmp = "{cmd} -flo {flo} -inter 1 -omp 4 -ref {ref} -trans {trans} \ +-res {res}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_resample'), + cmd=get_custom_path("reg_resample"), flo=flo_file, ref=ref_file, trans=trans_file, - res='im2_res.nii.gz') + res="im2_res.nii.gz", + ) assert nr_resample.cmdline == expected_cmd # test_reg_resample_blank() - nr_resample_2 = RegResample(type='blank', inter_val='LIN', omp_core_val=4) - ref_file = example_data('im1.nii') - flo_file = example_data('im2.nii') - trans_file = example_data('warpfield.nii') + nr_resample_2 = RegResample(type="blank", inter_val="LIN", omp_core_val=4) + ref_file = example_data("im1.nii") + flo_file = example_data("im2.nii") + trans_file = example_data("warpfield.nii") nr_resample_2.inputs.ref_file = ref_file nr_resample_2.inputs.flo_file = flo_file nr_resample_2.inputs.trans_file = trans_file - cmd_tmp = '{cmd} -flo {flo} -inter 1 -omp 4 -ref {ref} -trans {trans} \ --blank {blank}' + cmd_tmp = "{cmd} -flo {flo} -inter 1 -omp 4 -ref {ref} -trans {trans} \ +-blank {blank}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_resample'), + cmd=get_custom_path("reg_resample"), flo=flo_file, ref=ref_file, trans=trans_file, - blank='im2_blank.nii.gz') + blank="im2_blank.nii.gz", + ) assert nr_resample_2.cmdline == expected_cmd @pytest.mark.skipif( - no_nifty_tool(cmd='reg_jacobian'), - reason="niftyreg is not installed. reg_jacobian not found.") + no_nifty_tool(cmd="reg_jacobian"), + reason="niftyreg is not installed. reg_jacobian not found.", +) def test_reg_jacobian_jac(): """ Test interface for RegJacobian """ # Create a reg_jacobian object nr_jacobian = RegJacobian() # Check if the command is properly defined - assert nr_jacobian.cmd == get_custom_path('reg_jacobian') + assert nr_jacobian.cmd == get_custom_path("reg_jacobian") # test raising error with mandatory args absent with pytest.raises(ValueError): nr_jacobian.run() # Test Reg Jacobian: jac - ref_file = example_data('im1.nii') - trans_file = example_data('warpfield.nii') + ref_file = example_data("im1.nii") + trans_file = example_data("warpfield.nii") nr_jacobian.inputs.ref_file = ref_file nr_jacobian.inputs.trans_file = trans_file nr_jacobian.inputs.omp_core_val = 4 - cmd_tmp = '{cmd} -omp 4 -ref {ref} -trans {trans} -jac {jac}' + cmd_tmp = "{cmd} -omp 4 -ref {ref} -trans {trans} -jac {jac}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_jacobian'), + cmd=get_custom_path("reg_jacobian"), ref=ref_file, trans=trans_file, - jac='warpfield_jac.nii.gz') + jac="warpfield_jac.nii.gz", + ) assert nr_jacobian.cmdline == expected_cmd # Test Reg Jacobian: jac m - nr_jacobian_2 = RegJacobian(type='jacM', omp_core_val=4) - ref_file = example_data('im1.nii') - trans_file = example_data('warpfield.nii') + nr_jacobian_2 = RegJacobian(type="jacM", omp_core_val=4) + ref_file = example_data("im1.nii") + trans_file = example_data("warpfield.nii") nr_jacobian_2.inputs.ref_file = ref_file nr_jacobian_2.inputs.trans_file = trans_file - cmd_tmp = '{cmd} -omp 4 -ref {ref} -trans {trans} -jacM {jac}' + cmd_tmp = "{cmd} -omp 4 -ref {ref} -trans {trans} -jacM {jac}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_jacobian'), + cmd=get_custom_path("reg_jacobian"), ref=ref_file, trans=trans_file, - jac='warpfield_jacM.nii.gz') + jac="warpfield_jacM.nii.gz", + ) assert nr_jacobian_2.cmdline == expected_cmd # Test Reg Jacobian: jac l - nr_jacobian_3 = RegJacobian(type='jacL', omp_core_val=4) - ref_file = example_data('im1.nii') - trans_file = example_data('warpfield.nii') + nr_jacobian_3 = RegJacobian(type="jacL", omp_core_val=4) + ref_file = example_data("im1.nii") + trans_file = example_data("warpfield.nii") nr_jacobian_3.inputs.ref_file = ref_file nr_jacobian_3.inputs.trans_file = trans_file - cmd_tmp = '{cmd} -omp 4 -ref {ref} -trans {trans} -jacL {jac}' + cmd_tmp = "{cmd} -omp 4 -ref {ref} -trans {trans} -jacL {jac}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_jacobian'), + cmd=get_custom_path("reg_jacobian"), ref=ref_file, trans=trans_file, - jac='warpfield_jacL.nii.gz') + jac="warpfield_jacL.nii.gz", + ) assert nr_jacobian_3.cmdline == expected_cmd @pytest.mark.skipif( - no_nifty_tool(cmd='reg_tools'), - reason="niftyreg is not installed. reg_tools not found.") + no_nifty_tool(cmd="reg_tools"), + reason="niftyreg is not installed. reg_tools not found.", +) def test_reg_tools_mul(): """ tests for reg_tools interface """ # Create a reg_tools object nr_tools = RegTools() # Check if the command is properly defined - assert nr_tools.cmd == get_custom_path('reg_tools') + assert nr_tools.cmd == get_custom_path("reg_tools") # test raising error with mandatory args absent with pytest.raises(ValueError): nr_tools.run() # Test reg_tools: mul - in_file = example_data('im1.nii') + in_file = example_data("im1.nii") nr_tools.inputs.in_file = in_file nr_tools.inputs.mul_val = 4 nr_tools.inputs.omp_core_val = 4 - cmd_tmp = '{cmd} -in {in_file} -mul 4.0 -omp 4 -out {out_file}' + cmd_tmp = "{cmd} -in {in_file} -mul 4.0 -omp 4 -out {out_file}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_tools'), - in_file=in_file, - out_file='im1_tools.nii.gz') + cmd=get_custom_path("reg_tools"), in_file=in_file, out_file="im1_tools.nii.gz" + ) assert nr_tools.cmdline == expected_cmd # Test reg_tools: iso nr_tools_2 = RegTools(iso_flag=True, omp_core_val=4) - in_file = example_data('im1.nii') + in_file = example_data("im1.nii") nr_tools_2.inputs.in_file = in_file - cmd_tmp = '{cmd} -in {in_file} -iso -omp 4 -out {out_file}' + cmd_tmp = "{cmd} -in {in_file} -iso -omp 4 -out {out_file}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_tools'), - in_file=in_file, - out_file='im1_tools.nii.gz') + cmd=get_custom_path("reg_tools"), in_file=in_file, out_file="im1_tools.nii.gz" + ) assert nr_tools_2.cmdline == expected_cmd @pytest.mark.skipif( - no_nifty_tool(cmd='reg_average'), - reason="niftyreg is not installed. reg_average not found.") + no_nifty_tool(cmd="reg_average"), + reason="niftyreg is not installed. reg_average not found.", +) def test_reg_average(): """ tests for reg_average interface """ # Create a reg_average object nr_average = RegAverage() # Check if the command is properly defined - assert nr_average.cmd == get_custom_path('reg_average') + assert nr_average.cmd == get_custom_path("reg_average") # Average niis - one_file = example_data('im1.nii') - two_file = example_data('im2.nii') - three_file = example_data('im3.nii') + one_file = example_data("im1.nii") + two_file = example_data("im2.nii") + three_file = example_data("im3.nii") nr_average.inputs.avg_files = [one_file, two_file, three_file] nr_average.inputs.omp_core_val = 1 generated_cmd = nr_average.cmdline # Read the reg_average_cmd - reg_average_cmd = os.path.join(os.getcwd(), 'reg_average_cmd') - with open(reg_average_cmd, 'rb') as f_obj: + reg_average_cmd = os.path.join(os.getcwd(), "reg_average_cmd") + with open(reg_average_cmd, "rb") as f_obj: argv = f_obj.read() os.remove(reg_average_cmd) - expected_argv = '%s %s -avg %s %s %s -omp 1' % ( - get_custom_path('reg_average'), - os.path.join(os.getcwd(), 'avg_out.nii.gz'), one_file, two_file, - three_file) + expected_argv = "%s %s -avg %s %s %s -omp 1" % ( + get_custom_path("reg_average"), + os.path.join(os.getcwd(), "avg_out.nii.gz"), + one_file, + two_file, + three_file, + ) - assert argv.decode('utf-8') == expected_argv + assert argv.decode("utf-8") == expected_argv # Test command line with text file - expected_cmd = ('%s --cmd_file %s' % (get_custom_path('reg_average'), - reg_average_cmd)) + expected_cmd = "%s --cmd_file %s" % ( + get_custom_path("reg_average"), + reg_average_cmd, + ) assert generated_cmd == expected_cmd # Test Reg Average: average txt nr_average_2 = RegAverage() - one_file = example_data('TransformParameters.0.txt') - two_file = example_data('ants_Affine.txt') - three_file = example_data('elastix.txt') + one_file = example_data("TransformParameters.0.txt") + two_file = example_data("ants_Affine.txt") + three_file = example_data("elastix.txt") nr_average_2.inputs.avg_files = [one_file, two_file, three_file] nr_average_2.inputs.omp_core_val = 1 generated_cmd = nr_average_2.cmdline # Read the reg_average_cmd - reg_average_cmd = os.path.join(os.getcwd(), 'reg_average_cmd') - with open(reg_average_cmd, 'rb') as f_obj: + reg_average_cmd = os.path.join(os.getcwd(), "reg_average_cmd") + with open(reg_average_cmd, "rb") as f_obj: argv = f_obj.read() os.remove(reg_average_cmd) - expected_argv = '%s %s -avg %s %s %s -omp 1' % ( - get_custom_path('reg_average'), - os.path.join(os.getcwd(), 'avg_out.txt'), one_file, two_file, - three_file) + expected_argv = "%s %s -avg %s %s %s -omp 1" % ( + get_custom_path("reg_average"), + os.path.join(os.getcwd(), "avg_out.txt"), + one_file, + two_file, + three_file, + ) - assert argv.decode('utf-8') == expected_argv + assert argv.decode("utf-8") == expected_argv # Test Reg Average: average list nr_average_3 = RegAverage() - one_file = example_data('TransformParameters.0.txt') - two_file = example_data('ants_Affine.txt') - three_file = example_data('elastix.txt') + one_file = example_data("TransformParameters.0.txt") + two_file = example_data("ants_Affine.txt") + three_file = example_data("elastix.txt") nr_average_3.inputs.avg_lts_files = [one_file, two_file, three_file] nr_average_3.inputs.omp_core_val = 1 generated_cmd = nr_average_3.cmdline # Read the reg_average_cmd - reg_average_cmd = os.path.join(os.getcwd(), 'reg_average_cmd') - with open(reg_average_cmd, 'rb') as f_obj: + reg_average_cmd = os.path.join(os.getcwd(), "reg_average_cmd") + with open(reg_average_cmd, "rb") as f_obj: argv = f_obj.read() os.remove(reg_average_cmd) - expected_argv = ('%s %s -avg_lts %s %s %s -omp 1' % - (get_custom_path('reg_average'), - os.path.join(os.getcwd(), 'avg_out.txt'), one_file, - two_file, three_file)) + expected_argv = "%s %s -avg_lts %s %s %s -omp 1" % ( + get_custom_path("reg_average"), + os.path.join(os.getcwd(), "avg_out.txt"), + one_file, + two_file, + three_file, + ) - assert argv.decode('utf-8') == expected_argv + assert argv.decode("utf-8") == expected_argv # Test Reg Average: average ref nr_average_4 = RegAverage() - ref_file = example_data('anatomical.nii') - one_file = example_data('im1.nii') - two_file = example_data('im2.nii') - three_file = example_data('im3.nii') - trans1_file = example_data('roi01.nii') - trans2_file = example_data('roi02.nii') - trans3_file = example_data('roi03.nii') + ref_file = example_data("anatomical.nii") + one_file = example_data("im1.nii") + two_file = example_data("im2.nii") + three_file = example_data("im3.nii") + trans1_file = example_data("roi01.nii") + trans2_file = example_data("roi02.nii") + trans3_file = example_data("roi03.nii") nr_average_4.inputs.warp_files = [ - trans1_file, one_file, trans2_file, two_file, trans3_file, three_file + trans1_file, + one_file, + trans2_file, + two_file, + trans3_file, + three_file, ] nr_average_4.inputs.avg_ref_file = ref_file nr_average_4.inputs.omp_core_val = 1 generated_cmd = nr_average_4.cmdline # Read the reg_average_cmd - reg_average_cmd = os.path.join(os.getcwd(), 'reg_average_cmd') - with open(reg_average_cmd, 'rb') as f_obj: + reg_average_cmd = os.path.join(os.getcwd(), "reg_average_cmd") + with open(reg_average_cmd, "rb") as f_obj: argv = f_obj.read() os.remove(reg_average_cmd) - expected_argv = ('%s %s -avg_tran %s -omp 1 %s %s %s %s %s %s' % - (get_custom_path('reg_average'), - os.path.join(os.getcwd(), 'avg_out.nii.gz'), ref_file, - trans1_file, one_file, trans2_file, two_file, - trans3_file, three_file)) + expected_argv = "%s %s -avg_tran %s -omp 1 %s %s %s %s %s %s" % ( + get_custom_path("reg_average"), + os.path.join(os.getcwd(), "avg_out.nii.gz"), + ref_file, + trans1_file, + one_file, + trans2_file, + two_file, + trans3_file, + three_file, + ) - assert argv.decode('utf-8') == expected_argv + assert argv.decode("utf-8") == expected_argv # Test Reg Average: demean3 nr_average_5 = RegAverage() - ref_file = example_data('anatomical.nii') - one_file = example_data('im1.nii') - two_file = example_data('im2.nii') - three_file = example_data('im3.nii') - aff1_file = example_data('TransformParameters.0.txt') - aff2_file = example_data('ants_Affine.txt') - aff3_file = example_data('elastix.txt') - trans1_file = example_data('roi01.nii') - trans2_file = example_data('roi02.nii') - trans3_file = example_data('roi03.nii') + ref_file = example_data("anatomical.nii") + one_file = example_data("im1.nii") + two_file = example_data("im2.nii") + three_file = example_data("im3.nii") + aff1_file = example_data("TransformParameters.0.txt") + aff2_file = example_data("ants_Affine.txt") + aff3_file = example_data("elastix.txt") + trans1_file = example_data("roi01.nii") + trans2_file = example_data("roi02.nii") + trans3_file = example_data("roi03.nii") nr_average_5.inputs.warp_files = [ - aff1_file, trans1_file, one_file, aff2_file, trans2_file, two_file, - aff3_file, trans3_file, three_file + aff1_file, + trans1_file, + one_file, + aff2_file, + trans2_file, + two_file, + aff3_file, + trans3_file, + three_file, ] nr_average_5.inputs.demean3_ref_file = ref_file nr_average_5.inputs.omp_core_val = 1 generated_cmd = nr_average_5.cmdline # Read the reg_average_cmd - reg_average_cmd = os.path.join(os.getcwd(), 'reg_average_cmd') - with open(reg_average_cmd, 'rb') as f_obj: + reg_average_cmd = os.path.join(os.getcwd(), "reg_average_cmd") + with open(reg_average_cmd, "rb") as f_obj: argv = f_obj.read() os.remove(reg_average_cmd) - expected_argv = ('%s %s -demean3 %s -omp 1 %s %s %s %s %s %s %s %s %s' % - (get_custom_path('reg_average'), - os.path.join(os.getcwd(), 'avg_out.nii.gz'), ref_file, - aff1_file, trans1_file, one_file, aff2_file, trans2_file, - two_file, aff3_file, trans3_file, three_file)) + expected_argv = "%s %s -demean3 %s -omp 1 %s %s %s %s %s %s %s %s %s" % ( + get_custom_path("reg_average"), + os.path.join(os.getcwd(), "avg_out.nii.gz"), + ref_file, + aff1_file, + trans1_file, + one_file, + aff2_file, + trans2_file, + two_file, + aff3_file, + trans3_file, + three_file, + ) - assert argv.decode('utf-8') == expected_argv + assert argv.decode("utf-8") == expected_argv @pytest.mark.skipif( - no_nifty_tool(cmd='reg_transform'), - reason="niftyreg is not installed. reg_transform not found.") + no_nifty_tool(cmd="reg_transform"), + reason="niftyreg is not installed. reg_transform not found.", +) def test_reg_transform_def(): """ tests for reg_transform interface """ # Create a reg_transform object nr_transform = RegTransform() # Check if the command is properly defined - assert nr_transform.cmd == get_custom_path('reg_transform') + assert nr_transform.cmd == get_custom_path("reg_transform") # Assign some input data - trans_file = example_data('warpfield.nii') + trans_file = example_data("warpfield.nii") nr_transform.inputs.def_input = trans_file nr_transform.inputs.omp_core_val = 4 - cmd_tmp = '{cmd} -omp 4 -def {trans_file} {out_file}' + cmd_tmp = "{cmd} -omp 4 -def {trans_file} {out_file}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_transform'), + cmd=get_custom_path("reg_transform"), trans_file=trans_file, - out_file=os.path.join(os.getcwd(), 'warpfield_trans.nii.gz')) + out_file=os.path.join(os.getcwd(), "warpfield_trans.nii.gz"), + ) assert nr_transform.cmdline == expected_cmd # Test reg_transform: def ref nr_transform_2 = RegTransform(omp_core_val=4) - ref_file = example_data('im1.nii') - trans_file = example_data('warpfield.nii') + ref_file = example_data("im1.nii") + trans_file = example_data("warpfield.nii") nr_transform_2.inputs.ref1_file = ref_file nr_transform_2.inputs.def_input = trans_file - cmd_tmp = '{cmd} -ref {ref_file} -omp 4 -def {trans_file} {out_file}' + cmd_tmp = "{cmd} -ref {ref_file} -omp 4 -def {trans_file} {out_file}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_transform'), + cmd=get_custom_path("reg_transform"), ref_file=ref_file, trans_file=trans_file, - out_file=os.path.join(os.getcwd(), 'warpfield_trans.nii.gz')) + out_file=os.path.join(os.getcwd(), "warpfield_trans.nii.gz"), + ) assert nr_transform_2.cmdline == expected_cmd # Test reg_transform: comp nii nr_transform_3 = RegTransform(omp_core_val=4) - ref_file = example_data('im1.nii') - trans_file = example_data('warpfield.nii') - trans2_file = example_data('anatomical.nii') + ref_file = example_data("im1.nii") + trans_file = example_data("warpfield.nii") + trans2_file = example_data("anatomical.nii") nr_transform_3.inputs.ref1_file = ref_file nr_transform_3.inputs.comp_input2 = trans2_file nr_transform_3.inputs.comp_input = trans_file - cmd_tmp = '{cmd} -ref {ref_file} -omp 4 -comp {trans1} {trans2} {out_file}' + cmd_tmp = "{cmd} -ref {ref_file} -omp 4 -comp {trans1} {trans2} {out_file}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_transform'), + cmd=get_custom_path("reg_transform"), ref_file=ref_file, trans1=trans_file, trans2=trans2_file, - out_file=os.path.join(os.getcwd(), 'warpfield_trans.nii.gz')) + out_file=os.path.join(os.getcwd(), "warpfield_trans.nii.gz"), + ) assert nr_transform_3.cmdline == expected_cmd # Test reg_transform: comp txt nr_transform_4 = RegTransform(omp_core_val=4) - aff1_file = example_data('ants_Affine.txt') - aff2_file = example_data('elastix.txt') + aff1_file = example_data("ants_Affine.txt") + aff2_file = example_data("elastix.txt") nr_transform_4.inputs.comp_input2 = aff2_file nr_transform_4.inputs.comp_input = aff1_file - cmd_tmp = '{cmd} -omp 4 -comp {aff1} {aff2} {out_file}' + cmd_tmp = "{cmd} -omp 4 -comp {aff1} {aff2} {out_file}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_transform'), + cmd=get_custom_path("reg_transform"), aff1=aff1_file, aff2=aff2_file, - out_file=os.path.join(os.getcwd(), 'ants_Affine_trans.txt')) + out_file=os.path.join(os.getcwd(), "ants_Affine_trans.txt"), + ) assert nr_transform_4.cmdline == expected_cmd # Test reg_transform: comp nr_transform_5 = RegTransform(omp_core_val=4) - trans_file = example_data('warpfield.nii') - aff_file = example_data('elastix.txt') + trans_file = example_data("warpfield.nii") + aff_file = example_data("elastix.txt") nr_transform_5.inputs.comp_input2 = trans_file nr_transform_5.inputs.comp_input = aff_file - cmd_tmp = '{cmd} -omp 4 -comp {aff} {trans} {out_file}' + cmd_tmp = "{cmd} -omp 4 -comp {aff} {trans} {out_file}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_transform'), + cmd=get_custom_path("reg_transform"), aff=aff_file, trans=trans_file, - out_file=os.path.join(os.getcwd(), 'elastix_trans.nii.gz')) + out_file=os.path.join(os.getcwd(), "elastix_trans.nii.gz"), + ) assert nr_transform_5.cmdline == expected_cmd # Test reg_transform: flirt nr_transform_6 = RegTransform(omp_core_val=4) - aff_file = example_data('elastix.txt') - ref_file = example_data('im1.nii') - in_file = example_data('im2.nii') + aff_file = example_data("elastix.txt") + ref_file = example_data("im1.nii") + in_file = example_data("im2.nii") nr_transform_6.inputs.flirt_2_nr_input = (aff_file, ref_file, in_file) - cmd_tmp = '{cmd} -omp 4 -flirtAff2NR {aff} {ref} {in_file} {out_file}' + cmd_tmp = "{cmd} -omp 4 -flirtAff2NR {aff} {ref} {in_file} {out_file}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_transform'), + cmd=get_custom_path("reg_transform"), aff=aff_file, ref=ref_file, in_file=in_file, - out_file=os.path.join(os.getcwd(), 'elastix_trans.txt')) + out_file=os.path.join(os.getcwd(), "elastix_trans.txt"), + ) assert nr_transform_6.cmdline == expected_cmd @pytest.mark.skipif( - no_nifty_tool(cmd='reg_measure'), - reason="niftyreg is not installed. reg_measure not found.") + no_nifty_tool(cmd="reg_measure"), + reason="niftyreg is not installed. reg_measure not found.", +) def test_reg_measure(): """ tests for reg_measure interface """ # Create a reg_measure object nr_measure = RegMeasure() # Check if the command is properly defined - assert nr_measure.cmd == get_custom_path('reg_measure') + assert nr_measure.cmd == get_custom_path("reg_measure") # test raising error with mandatory args absent with pytest.raises(ValueError): nr_measure.run() # Assign some input data - ref_file = example_data('im1.nii') - flo_file = example_data('im2.nii') + ref_file = example_data("im1.nii") + flo_file = example_data("im2.nii") nr_measure.inputs.ref_file = ref_file nr_measure.inputs.flo_file = flo_file - nr_measure.inputs.measure_type = 'lncc' + nr_measure.inputs.measure_type = "lncc" nr_measure.inputs.omp_core_val = 4 - cmd_tmp = '{cmd} -flo {flo} -lncc -omp 4 -out {out} -ref {ref}' + cmd_tmp = "{cmd} -flo {flo} -lncc -omp 4 -out {out} -ref {ref}" expected_cmd = cmd_tmp.format( - cmd=get_custom_path('reg_measure'), + cmd=get_custom_path("reg_measure"), flo=flo_file, - out='im2_lncc.txt', - ref=ref_file) + out="im2_lncc.txt", + ref=ref_file, + ) assert nr_measure.cmdline == expected_cmd diff --git a/nipype/interfaces/niftyseg/__init__.py b/nipype/interfaces/niftyseg/__init__.py index 14b391edd5..f5d908a8ff 100644 --- a/nipype/interfaces/niftyseg/__init__.py +++ b/nipype/interfaces/niftyseg/__init__.py @@ -10,7 +10,6 @@ from .em import EM from .label_fusion import LabelFusion, CalcTopNCC from .lesions import FillLesions -from .maths import (UnaryMaths, BinaryMaths, BinaryMathsInteger, TupleMaths, - Merge) +from .maths import UnaryMaths, BinaryMaths, BinaryMathsInteger, TupleMaths, Merge from .patchmatch import PatchMatch from .stats import UnaryStats, BinaryStats diff --git a/nipype/interfaces/niftyseg/base.py b/nipype/interfaces/niftyseg/base.py index cc831aa9b5..65f1f9ff14 100644 --- a/nipype/interfaces/niftyseg/base.py +++ b/nipype/interfaces/niftyseg/base.py @@ -23,7 +23,8 @@ class NiftySegCommand(NiftyFitCommand): """ Base support interface for NiftySeg commands. """ - _suffix = '_ns' + + _suffix = "_ns" _min_version = None def __init__(self, **inputs): @@ -31,4 +32,5 @@ def __init__(self, **inputs): def get_version(self): return super(NiftySegCommand, self).version_from_command( - cmd='seg_EM', flag='--version') + cmd="seg_EM", flag="--version" + ) diff --git a/nipype/interfaces/niftyseg/em.py b/nipype/interfaces/niftyseg/em.py index e9c749c282..d6fb4d5180 100644 --- a/nipype/interfaces/niftyseg/em.py +++ b/nipype/interfaces/niftyseg/em.py @@ -11,119 +11,130 @@ See the docstrings of the individual classes for examples. """ -from ..base import (TraitedSpec, File, traits, CommandLineInputSpec, - InputMultiPath) +from ..base import TraitedSpec, File, traits, CommandLineInputSpec, InputMultiPath from .base import NiftySegCommand from ..niftyreg.base import get_custom_path class EMInputSpec(CommandLineInputSpec): """Input Spec for EM.""" + in_file = File( - argstr='-in %s', + argstr="-in %s", exists=True, mandatory=True, - desc='Input image to segment', - position=4) + desc="Input image to segment", + position=4, + ) mask_file = File( - argstr='-mask %s', - exists=True, - desc='Filename of the ROI for label fusion') + argstr="-mask %s", exists=True, desc="Filename of the ROI for label fusion" + ) # Priors no_prior = traits.Int( - argstr='-nopriors %s', + argstr="-nopriors %s", mandatory=True, - desc='Number of classes to use without prior', - xor=['prior_4D', 'priors']) + desc="Number of classes to use without prior", + xor=["prior_4D", "priors"], + ) prior_4D = File( - argstr='-prior4D %s', + argstr="-prior4D %s", exists=True, mandatory=True, - desc='4D file containing the priors', - xor=['no_prior', 'priors']) + desc="4D file containing the priors", + xor=["no_prior", "priors"], + ) priors = InputMultiPath( - argstr='%s', + argstr="%s", mandatory=True, - desc='List of priors filepaths.', - xor=['no_prior', 'prior_4D']) + desc="List of priors filepaths.", + xor=["no_prior", "prior_4D"], + ) # iterations max_iter = traits.Int( - argstr='-max_iter %s', + argstr="-max_iter %s", default_value=100, usedefault=True, - desc='Maximum number of iterations') + desc="Maximum number of iterations", + ) min_iter = traits.Int( - argstr='-min_iter %s', + argstr="-min_iter %s", default_value=0, usedefault=True, - desc='Minimum number of iterations') + desc="Minimum number of iterations", + ) # other options bc_order_val = traits.Int( - argstr='-bc_order %s', + argstr="-bc_order %s", default_value=3, usedefault=True, - desc='Polynomial order for the bias field') + desc="Polynomial order for the bias field", + ) mrf_beta_val = traits.Float( - argstr='-mrf_beta %s', desc='Weight of the Markov Random Field') + argstr="-mrf_beta %s", desc="Weight of the Markov Random Field" + ) - desc = 'Bias field correction will run only if the ratio of improvement \ -is below bc_thresh. (default=0 [OFF])' + desc = "Bias field correction will run only if the ratio of improvement \ +is below bc_thresh. (default=0 [OFF])" bc_thresh_val = traits.Float( - argstr='-bc_thresh %s', - default_value=0, - usedefault=True, - desc=desc) + argstr="-bc_thresh %s", default_value=0, usedefault=True, desc=desc + ) - desc = 'Amount of regularization over the diagonal of the covariance \ -matrix [above 1]' + desc = "Amount of regularization over the diagonal of the covariance \ +matrix [above 1]" - reg_val = traits.Float(argstr='-reg %s', desc=desc) + reg_val = traits.Float(argstr="-reg %s", desc=desc) - desc = 'Outlier detection as in (Van Leemput TMI 2003). is the \ + desc = "Outlier detection as in (Van Leemput TMI 2003). is the \ Mahalanobis threshold [recommended between 3 and 7] is a convergence \ -ratio below which the outlier detection is going to be done [recommended 0.01]' +ratio below which the outlier detection is going to be done [recommended 0.01]" outlier_val = traits.Tuple( - traits.Float(), traits.Float(), argstr='-outlier %s %s', desc=desc) + traits.Float(), traits.Float(), argstr="-outlier %s %s", desc=desc + ) - desc = 'Relax Priors [relaxation factor: 00 (recommended=2.0)] /only 3D/' + desc = "Relax Priors [relaxation factor: 00 (recommended=2.0)] /only 3D/" relax_priors = traits.Tuple( - traits.Float(), traits.Float(), argstr='-rf %s %s', desc=desc) + traits.Float(), traits.Float(), argstr="-rf %s %s", desc=desc + ) # outputs out_file = File( - name_source=['in_file'], - name_template='%s_em.nii.gz', - argstr='-out %s', - desc='Output segmentation') + name_source=["in_file"], + name_template="%s_em.nii.gz", + argstr="-out %s", + desc="Output segmentation", + ) out_bc_file = File( - name_source=['in_file'], - name_template='%s_bc_em.nii.gz', - argstr='-bc_out %s', - desc='Output bias corrected image') + name_source=["in_file"], + name_template="%s_bc_em.nii.gz", + argstr="-bc_out %s", + desc="Output bias corrected image", + ) out_outlier_file = File( - name_source=['in_file'], - name_template='%s_outlier_em.nii.gz', - argstr='-out_outlier %s', - desc='Output outlierness image') + name_source=["in_file"], + name_template="%s_outlier_em.nii.gz", + argstr="-out_outlier %s", + desc="Output outlierness image", + ) class EMOutputSpec(TraitedSpec): """Output Spec for EM.""" + out_file = File(desc="Output segmentation") out_bc_file = File(desc="Output bias corrected image") - out_outlier_file = File(desc='Output outlierness image') + out_outlier_file = File(desc="Output outlierness image") class EM(NiftySegCommand): @@ -147,15 +158,16 @@ class EM(NiftySegCommand): -bc_out im1_bc_em.nii.gz -out im1_em.nii.gz -out_outlier im1_outlier_em.nii.gz' """ - _cmd = get_custom_path('seg_EM', env_dir='NIFTYSEGDIR') - _suffix = '_em' + + _cmd = get_custom_path("seg_EM", env_dir="NIFTYSEGDIR") + _suffix = "_em" input_spec = EMInputSpec output_spec = EMOutputSpec def _format_arg(self, opt, spec, val): """Convert input to appropriate format for seg_EM.""" - if opt == 'priors': + if opt == "priors": _nb_priors = len(self.inputs.priors) - return '-priors %d %s' % (_nb_priors, ' '.join(self.inputs.priors)) + return "-priors %d %s" % (_nb_priors, " ".join(self.inputs.priors)) else: return super(EM, self)._format_arg(opt, spec, val) diff --git a/nipype/interfaces/niftyseg/label_fusion.py b/nipype/interfaces/niftyseg/label_fusion.py index 5185b64f68..aa255247d2 100644 --- a/nipype/interfaces/niftyseg/label_fusion.py +++ b/nipype/interfaces/niftyseg/label_fusion.py @@ -7,107 +7,118 @@ import os import warnings -from ..base import (TraitedSpec, File, traits, isdefined, CommandLineInputSpec, - NipypeInterfaceError) +from ..base import ( + TraitedSpec, + File, + traits, + isdefined, + CommandLineInputSpec, + NipypeInterfaceError, +) from .base import NiftySegCommand from ..niftyreg.base import get_custom_path from ...utils.filemanip import load_json, save_json, split_filename warn = warnings.warn -warnings.filterwarnings('always', category=UserWarning) +warnings.filterwarnings("always", category=UserWarning) class LabelFusionInput(CommandLineInputSpec): """Input Spec for LabelFusion.""" + in_file = File( - argstr='-in %s', + argstr="-in %s", exists=True, mandatory=True, position=1, - desc='Filename of the 4D integer label image.') + desc="Filename of the 4D integer label image.", + ) - template_file = File(exists=True, desc='Registered templates (4D Image)') + template_file = File(exists=True, desc="Registered templates (4D Image)") file_to_seg = File( - exists=True, - mandatory=True, - desc='Original image to segment (3D Image)') + exists=True, mandatory=True, desc="Original image to segment (3D Image)" + ) mask_file = File( - argstr='-mask %s', - exists=True, - desc='Filename of the ROI for label fusion') + argstr="-mask %s", exists=True, desc="Filename of the ROI for label fusion" + ) out_file = File( - argstr='-out %s', - name_source=['in_file'], - name_template='%s', - desc='Output consensus segmentation') + argstr="-out %s", + name_source=["in_file"], + name_template="%s", + desc="Output consensus segmentation", + ) prob_flag = traits.Bool( - desc='Probabilistic/Fuzzy segmented image', argstr='-outProb') + desc="Probabilistic/Fuzzy segmented image", argstr="-outProb" + ) - desc = 'Verbose level [0 = off, 1 = on, 2 = debug] (default = 0)' - verbose = traits.Enum('0', '1', '2', desc=desc, argstr='-v %s') + desc = "Verbose level [0 = off, 1 = on, 2 = debug] (default = 0)" + verbose = traits.Enum("0", "1", "2", desc=desc, argstr="-v %s") - desc = 'Only consider non-consensus voxels to calculate statistics' - unc = traits.Bool(desc=desc, argstr='-unc') + desc = "Only consider non-consensus voxels to calculate statistics" + unc = traits.Bool(desc=desc, argstr="-unc") classifier_type = traits.Enum( - 'STEPS', - 'STAPLE', - 'MV', - 'SBA', - argstr='-%s', + "STEPS", + "STAPLE", + "MV", + "SBA", + argstr="-%s", mandatory=True, position=2, - desc='Type of Classifier Fusion.') + desc="Type of Classifier Fusion.", + ) desc = "Gaussian kernel size in mm to compute the local similarity" kernel_size = traits.Float(desc=desc) - template_num = traits.Int(desc='Number of labels to use') + template_num = traits.Int(desc="Number of labels to use") # STAPLE and MV options sm_ranking = traits.Enum( - 'ALL', - 'GNCC', - 'ROINCC', - 'LNCC', - argstr='-%s', + "ALL", + "GNCC", + "ROINCC", + "LNCC", + argstr="-%s", usedefault=True, position=3, - desc='Ranking for STAPLE and MV') + desc="Ranking for STAPLE and MV", + ) - dilation_roi = traits.Int(desc='Dilation of the ROI ( d>=1 )') + dilation_roi = traits.Int(desc="Dilation of the ROI ( d>=1 )") # STAPLE and STEPS options - desc = 'Proportion of the label (only for single labels).' - proportion = traits.Float(argstr='-prop %s', desc=desc) + desc = "Proportion of the label (only for single labels)." + proportion = traits.Float(argstr="-prop %s", desc=desc) - desc = 'Update label proportions at each iteration' - prob_update_flag = traits.Bool(desc=desc, argstr='-prop_update') + desc = "Update label proportions at each iteration" + prob_update_flag = traits.Bool(desc=desc, argstr="-prop_update") - desc = 'Value of P and Q [ 0 < (P,Q) < 1 ] (default = 0.99 0.99)' - set_pq = traits.Tuple( - traits.Float, traits.Float, argstr='-setPQ %f %f', desc=desc) + desc = "Value of P and Q [ 0 < (P,Q) < 1 ] (default = 0.99 0.99)" + set_pq = traits.Tuple(traits.Float, traits.Float, argstr="-setPQ %f %f", desc=desc) mrf_value = traits.Float( - argstr='-MRF_beta %f', desc='MRF prior strength (between 0 and 5)') + argstr="-MRF_beta %f", desc="MRF prior strength (between 0 and 5)" + ) - desc = 'Maximum number of iterations (default = 15).' - max_iter = traits.Int(argstr='-max_iter %d', desc=desc) + desc = "Maximum number of iterations (default = 15)." + max_iter = traits.Int(argstr="-max_iter %d", desc=desc) - desc = 'If percent of labels agree, then area is not uncertain.' - unc_thresh = traits.Float(argstr='-uncthres %f', desc=desc) + desc = "If percent of labels agree, then area is not uncertain." + unc_thresh = traits.Float(argstr="-uncthres %f", desc=desc) - desc = 'Ratio for convergence (default epsilon = 10^-5).' - conv = traits.Float(argstr='-conv %f', desc=desc) + desc = "Ratio for convergence (default epsilon = 10^-5)." + conv = traits.Float(argstr="-conv %f", desc=desc) class LabelFusionOutput(TraitedSpec): """Output Spec for LabelFusion.""" - out_file = File(exists=True, desc='image written after calculations') + + out_file = File(exists=True, desc="image written after calculations") class LabelFusion(NiftySegCommand): @@ -148,24 +159,31 @@ class LabelFusion(NiftySegCommand): 'seg_LabFusion -in im1.nii -STEPS 2.000000 2 im2.nii im3.nii -out im1_steps.nii' """ - _cmd = get_custom_path('seg_LabFusion', env_dir='NIFTYSEGDIR') + + _cmd = get_custom_path("seg_LabFusion", env_dir="NIFTYSEGDIR") input_spec = LabelFusionInput output_spec = LabelFusionOutput - _suffix = '_label_fused' + _suffix = "_label_fused" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for seg_maths.""" # Remove options if not STAPLE or STEPS as fusion type: - if opt in ['proportion', 'prob_update_flag', 'set_pq', 'mrf_value', - 'max_iter', 'unc_thresh', 'conv'] and\ - self.inputs.classifier_type not in ['STAPLE', 'STEPS']: - return '' - - if opt == 'sm_ranking': + if opt in [ + "proportion", + "prob_update_flag", + "set_pq", + "mrf_value", + "max_iter", + "unc_thresh", + "conv", + ] and self.inputs.classifier_type not in ["STAPLE", "STEPS"]: + return "" + + if opt == "sm_ranking": return self.get_staple_args(val) # Return options string if STEPS: - if opt == 'classifier_type' and val == 'STEPS': + if opt == "classifier_type" and val == "STEPS": return self.get_steps_args() return super(LabelFusion, self)._format_arg(opt, spec, val) @@ -186,18 +204,20 @@ def get_steps_args(self): 'classifier_type' is set to 'STEPS'." raise NipypeInterfaceError(err) - return "-STEPS %f %d %s %s" % (self.inputs.kernel_size, - self.inputs.template_num, - self.inputs.file_to_seg, - self.inputs.template_file) + return "-STEPS %f %d %s %s" % ( + self.inputs.kernel_size, + self.inputs.template_num, + self.inputs.file_to_seg, + self.inputs.template_file, + ) def get_staple_args(self, ranking): classtype = self.inputs.classifier_type - if classtype not in ['STAPLE', 'MV']: + if classtype not in ["STAPLE", "MV"]: return None - if ranking == 'ALL': - return '-ALL' + if ranking == "ALL": + return "-ALL" if not isdefined(self.inputs.template_file): err = "LabelFusion requires a value for input 'tramplate_file' \ @@ -210,18 +230,21 @@ def get_staple_args(self, ranking): raise NipypeInterfaceError(err % (classtype, ranking)) - if ranking == 'GNCC': + if ranking == "GNCC": if not isdefined(self.inputs.template_num): err = "LabelFusion requires a value for input 'template_num' \ when 'classifier_type' is set to '%s' and 'sm_ranking' is set to '%s'." raise NipypeInterfaceError(err % (classtype, ranking)) - return "-%s %d %s %s" % (ranking, self.inputs.template_num, - self.inputs.file_to_seg, - self.inputs.template_file) + return "-%s %d %s %s" % ( + ranking, + self.inputs.template_num, + self.inputs.file_to_seg, + self.inputs.template_file, + ) - elif ranking == 'ROINCC': + elif ranking == "ROINCC": if not isdefined(self.inputs.dilation_roi): err = "LabelFusion requires a value for input 'dilation_roi' \ when 'classifier_type' is set to '%s' and 'sm_ranking' is set to '%s'." @@ -234,61 +257,62 @@ def get_staple_args(self, ranking): raise NipypeInterfaceError(err % self.inputs.dilation_roi) - return "-%s %d %d %s %s" % (ranking, self.inputs.dilation_roi, - self.inputs.template_num, - self.inputs.file_to_seg, - self.inputs.template_file) - elif ranking == 'LNCC': + return "-%s %d %d %s %s" % ( + ranking, + self.inputs.dilation_roi, + self.inputs.template_num, + self.inputs.file_to_seg, + self.inputs.template_file, + ) + elif ranking == "LNCC": if not isdefined(self.inputs.kernel_size): err = "LabelFusion requires a value for input 'kernel_size' \ when 'classifier_type' is set to '%s' and 'sm_ranking' is set to '%s'." raise NipypeInterfaceError(err % (classtype, ranking)) - return "-%s %f %d %s %s" % (ranking, self.inputs.kernel_size, - self.inputs.template_num, - self.inputs.file_to_seg, - self.inputs.template_file) + return "-%s %f %d %s %s" % ( + ranking, + self.inputs.kernel_size, + self.inputs.template_num, + self.inputs.file_to_seg, + self.inputs.template_file, + ) def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) _, _, ext = split_filename(self.inputs.in_file) suffix = self.inputs.classifier_type.lower() - return os.path.join(path, '{0}_{1}{2}'.format(base, suffix, ext)) + return os.path.join(path, "{0}_{1}{2}".format(base, suffix, ext)) class CalcTopNCCInputSpec(CommandLineInputSpec): """Input Spec for CalcTopNCC.""" + in_file = File( - argstr='-target %s', - exists=True, - mandatory=True, - desc='Target file', - position=1) + argstr="-target %s", exists=True, mandatory=True, desc="Target file", position=1 + ) num_templates = traits.Int( - argstr='-templates %s', - mandatory=True, - position=2, - desc='Number of Templates') + argstr="-templates %s", mandatory=True, position=2, desc="Number of Templates" + ) in_templates = traits.List( - File(exists=True), argstr="%s", position=3, mandatory=True) + File(exists=True), argstr="%s", position=3, mandatory=True + ) top_templates = traits.Int( - argstr='-n %s', - mandatory=True, - position=4, - desc='Number of Top Templates') + argstr="-n %s", mandatory=True, position=4, desc="Number of Top Templates" + ) mask_file = File( - argstr='-mask %s', - exists=True, - desc='Filename of the ROI for label fusion') + argstr="-mask %s", exists=True, desc="Filename of the ROI for label fusion" + ) class CalcTopNCCOutputSpec(TraitedSpec): """Output Spec for CalcTopNCC.""" + out_files = traits.Any(File(exists=True)) @@ -307,23 +331,24 @@ class CalcTopNCC(NiftySegCommand): 'seg_CalcTopNCC -target im1.nii -templates 2 im2.nii im3.nii -n 1' """ - _cmd = get_custom_path('seg_CalcTopNCC', env_dir='NIFTYSEGDIR') - _suffix = '_topNCC' + + _cmd = get_custom_path("seg_CalcTopNCC", env_dir="NIFTYSEGDIR") + _suffix = "_topNCC" input_spec = CalcTopNCCInputSpec output_spec = CalcTopNCCOutputSpec def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() # local caching for backward compatibility - outfile = os.path.join(os.getcwd(), 'CalcTopNCC.json') + outfile = os.path.join(os.getcwd(), "CalcTopNCC.json") if runtime is None or not runtime.stdout: try: - out_files = load_json(outfile)['files'] + out_files = load_json(outfile)["files"] except IOError: return self.run().outputs else: out_files = [] - for line in runtime.stdout.split('\n'): + for line in runtime.stdout.split("\n"): if line: values = line.split() if len(values) > 1: diff --git a/nipype/interfaces/niftyseg/lesions.py b/nipype/interfaces/niftyseg/lesions.py index 14d7f23c6b..0d055a55f2 100644 --- a/nipype/interfaces/niftyseg/lesions.py +++ b/nipype/interfaces/niftyseg/lesions.py @@ -18,85 +18,88 @@ from ..niftyreg.base import get_custom_path warn = warnings.warn -warnings.filterwarnings('always', category=UserWarning) +warnings.filterwarnings("always", category=UserWarning) class FillLesionsInputSpec(CommandLineInputSpec): """Input Spec for FillLesions.""" + # Mandatory input arguments in_file = File( - argstr='-i %s', + argstr="-i %s", exists=True, mandatory=True, - desc='Input image to fill lesions', - position=1) + desc="Input image to fill lesions", + position=1, + ) lesion_mask = File( - argstr='-l %s', - exists=True, - mandatory=True, - desc='Lesion mask', - position=2) + argstr="-l %s", exists=True, mandatory=True, desc="Lesion mask", position=2 + ) # Output file name out_file = File( - name_source=['in_file'], - name_template='%s_lesions_filled.nii.gz', - desc='The output filename of the fill lesions results', - argstr='-o %s', - position=3) + name_source=["in_file"], + name_template="%s_lesions_filled.nii.gz", + desc="The output filename of the fill lesions results", + argstr="-o %s", + position=3, + ) # Optional arguments desc = "Dilate the mask times (in voxels, by default 0)" - in_dilation = traits.Int(desc=desc, argstr='-dil %d') + in_dilation = traits.Int(desc=desc, argstr="-dil %d") - desc = 'Percentage of minimum number of voxels between patches \ -(by default 0.5).' + desc = "Percentage of minimum number of voxels between patches \ +(by default 0.5)." - match = traits.Float(desc=desc, argstr='-match %f') + match = traits.Float(desc=desc, argstr="-match %f") - desc = 'Minimum percentage of valid voxels in target patch \ -(by default 0).' + desc = "Minimum percentage of valid voxels in target patch \ +(by default 0)." - search = traits.Float(desc=desc, argstr='-search %f') + search = traits.Float(desc=desc, argstr="-search %f") - desc = 'Smoothing by (in minimal 6-neighbourhood voxels \ -(by default 0.1)).' + desc = "Smoothing by (in minimal 6-neighbourhood voxels \ +(by default 0.1))." - smooth = traits.Float(desc=desc, argstr='-smo %f') + smooth = traits.Float(desc=desc, argstr="-smo %f") - desc = 'Search regions size respect biggest patch size (by default 4).' - size = traits.Int(desc=desc, argstr='-size %d') + desc = "Search regions size respect biggest patch size (by default 4)." + size = traits.Int(desc=desc, argstr="-size %d") - desc = 'Patch cardinality weighting factor (by default 2).' - cwf = traits.Float(desc=desc, argstr='-cwf %f') + desc = "Patch cardinality weighting factor (by default 2)." + cwf = traits.Float(desc=desc, argstr="-cwf %f") - desc = 'Give a binary mask with the valid search areas.' - bin_mask = File(desc=desc, argstr='-mask %s') + desc = "Give a binary mask with the valid search areas." + bin_mask = File(desc=desc, argstr="-mask %s") desc = "Guizard et al. (FIN 2015) method, it doesn't include the \ multiresolution/hierarchical inpainting part, this part needs to be done \ with some external software such as reg_tools and reg_resample from NiftyReg. \ By default it uses the method presented in Prados et al. (Neuroimage 2016)." - other = traits.Bool(desc=desc, argstr='-other') + other = traits.Bool(desc=desc, argstr="-other") use_2d = traits.Bool( - desc='Uses 2D patches in the Z axis, by default 3D.', argstr='-2D') + desc="Uses 2D patches in the Z axis, by default 3D.", argstr="-2D" + ) debug = traits.Bool( - desc='Save all intermidium files (by default OFF).', argstr='-debug') + desc="Save all intermidium files (by default OFF).", argstr="-debug" + ) - desc = 'Set output (char, short, int, uchar, ushort, uint, \ -float, double).' + desc = "Set output (char, short, int, uchar, ushort, uint, \ +float, double)." - out_datatype = traits.String(desc=desc, argstr='-odt %s') + out_datatype = traits.String(desc=desc, argstr="-odt %s") - verbose = traits.Bool(desc='Verbose (by default OFF).', argstr='-v') + verbose = traits.Bool(desc="Verbose (by default OFF).", argstr="-v") class FillLesionsOutputSpec(TraitedSpec): """Output Spec for FillLesions.""" + out_file = File(desc="Output segmentation") @@ -118,6 +121,7 @@ class FillLesions(NiftySegCommand): 'seg_FillLesions -i im1.nii -l im2.nii -o im1_lesions_filled.nii.gz' """ - _cmd = get_custom_path('seg_FillLesions', env_dir='NIFTYSEGDIR') + + _cmd = get_custom_path("seg_FillLesions", env_dir="NIFTYSEGDIR") input_spec = FillLesionsInputSpec output_spec = FillLesionsOutputSpec diff --git a/nipype/interfaces/niftyseg/maths.py b/nipype/interfaces/niftyseg/maths.py index d4773f86e8..0afea087a0 100644 --- a/nipype/interfaces/niftyseg/maths.py +++ b/nipype/interfaces/niftyseg/maths.py @@ -13,8 +13,14 @@ import os -from ..base import (TraitedSpec, File, traits, isdefined, CommandLineInputSpec, - NipypeInterfaceError) +from ..base import ( + TraitedSpec, + File, + traits, + isdefined, + CommandLineInputSpec, + NipypeInterfaceError, +) from .base import NiftySegCommand from ..niftyreg.base import get_custom_path from ...utils.filemanip import split_filename @@ -22,36 +28,37 @@ class MathsInput(CommandLineInputSpec): """Input Spec for seg_maths interfaces.""" + in_file = File( - position=2, - argstr='%s', - exists=True, - mandatory=True, - desc='image to operate on') + position=2, argstr="%s", exists=True, mandatory=True, desc="image to operate on" + ) out_file = File( - name_source=['in_file'], - name_template='%s', + name_source=["in_file"], + name_template="%s", position=-2, - argstr='%s', - desc='image to write') + argstr="%s", + desc="image to write", + ) - desc = 'datatype to use for output (default uses input type)' + desc = "datatype to use for output (default uses input type)" output_datatype = traits.Enum( - 'float', - 'char', - 'int', - 'short', - 'double', - 'input', + "float", + "char", + "int", + "short", + "double", + "input", position=-3, - argstr='-odt %s', - desc=desc) + argstr="-odt %s", + desc=desc, + ) class MathsOutput(TraitedSpec): """Output Spec for seg_maths interfaces.""" - out_file = File(desc='image written after calculations') + + out_file = File(desc="image written after calculations") class MathsCommand(NiftySegCommand): @@ -71,52 +78,55 @@ class MathsCommand(NiftySegCommand): into several 3D images, to estimating the maximum, minimum and average over all time-points, etc. """ - _cmd = get_custom_path('seg_maths', env_dir='NIFTYSEGDIR') + + _cmd = get_custom_path("seg_maths", env_dir="NIFTYSEGDIR") input_spec = MathsInput output_spec = MathsOutput - _suffix = '_maths' + _suffix = "_maths" def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) _, _, ext = split_filename(self.inputs.in_file) suffix = self._suffix - if suffix != '_merged' and isdefined(self.inputs.operation): - suffix = '_' + self.inputs.operation + if suffix != "_merged" and isdefined(self.inputs.operation): + suffix = "_" + self.inputs.operation - return os.path.join(path, '{0}{1}{2}'.format(base, suffix, ext)) + return os.path.join(path, "{0}{1}{2}".format(base, suffix, ext)) class UnaryMathsInput(MathsInput): """Input Spec for seg_maths Unary operations.""" + operation = traits.Enum( - 'sqrt', - 'exp', - 'log', - 'recip', - 'abs', - 'bin', - 'otsu', - 'lconcomp', - 'concomp6', - 'concomp26', - 'fill', - 'euc', - 'tpmax', - 'tmean', - 'tmax', - 'tmin', - 'splitlab', - 'removenan', - 'isnan', - 'subsamp2', - 'scl', - '4to5', - 'range', - argstr='-%s', + "sqrt", + "exp", + "log", + "recip", + "abs", + "bin", + "otsu", + "lconcomp", + "concomp6", + "concomp26", + "fill", + "euc", + "tpmax", + "tmean", + "tmax", + "tmin", + "splitlab", + "removenan", + "isnan", + "subsamp2", + "scl", + "4to5", + "range", + argstr="-%s", position=4, mandatory=True, - desc='operation to perform') + desc="operation to perform", + ) class UnaryMaths(MathsCommand): @@ -218,60 +228,66 @@ class UnaryMaths(MathsCommand): >>> unary_isnan.run() # doctest: +SKIP """ + input_spec = UnaryMathsInput class BinaryMathsInput(MathsInput): """Input Spec for seg_maths Binary operations.""" + operation = traits.Enum( - 'mul', - 'div', - 'add', - 'sub', - 'pow', - 'thr', - 'uthr', - 'smo', - 'edge', - 'sobel3', - 'sobel5', - 'min', - 'smol', - 'geo', - 'llsnorm', - 'masknan', - 'hdr_copy', - 'splitinter', + "mul", + "div", + "add", + "sub", + "pow", + "thr", + "uthr", + "smo", + "edge", + "sobel3", + "sobel5", + "min", + "smol", + "geo", + "llsnorm", + "masknan", + "hdr_copy", + "splitinter", mandatory=True, - argstr='-%s', + argstr="-%s", position=4, - desc='operation to perform') + desc="operation to perform", + ) operand_file = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=5, - xor=['operand_value', 'operand_str'], - desc='second image to perform operation with') + xor=["operand_value", "operand_str"], + desc="second image to perform operation with", + ) operand_value = traits.Float( - argstr='%.8f', + argstr="%.8f", mandatory=True, position=5, - xor=['operand_file', 'operand_str'], - desc='float value to perform operation with') + xor=["operand_file", "operand_str"], + desc="float value to perform operation with", + ) - desc = 'string value to perform operation splitinter' + desc = "string value to perform operation splitinter" operand_str = traits.Enum( - 'x', - 'y', - 'z', - argstr='%s', + "x", + "y", + "z", + argstr="%s", mandatory=True, position=5, - xor=['operand_value', 'operand_file'], - desc=desc) + xor=["operand_value", "operand_file"], + desc=desc, + ) class BinaryMaths(MathsCommand): @@ -368,70 +384,71 @@ class BinaryMaths(MathsCommand): >>> binary_splitinter.run() # doctest: +SKIP """ + input_spec = BinaryMathsInput def _format_arg(self, opt, spec, val): """Convert input to appropriate format for seg_maths.""" - if opt == 'operand_str' and self.inputs.operation != 'splitinter': + if opt == "operand_str" and self.inputs.operation != "splitinter": err = 'operand_str set but with an operation different than \ "splitinter"' raise NipypeInterfaceError(err) - if opt == 'operation': + if opt == "operation": # Only float - if val in [ - 'pow', 'thr', 'uthr', 'smo', 'edge', 'sobel3', 'sobel5', - 'smol' - ]: + if val in ["pow", "thr", "uthr", "smo", "edge", "sobel3", "sobel5", "smol"]: if not isdefined(self.inputs.operand_value): - err = 'operand_value not set for {0}.'.format(val) + err = "operand_value not set for {0}.".format(val) raise NipypeInterfaceError(err) # only files - elif val in ['min', 'llsnorm', 'masknan', 'hdr_copy']: + elif val in ["min", "llsnorm", "masknan", "hdr_copy"]: if not isdefined(self.inputs.operand_file): - err = 'operand_file not set for {0}.'.format(val) + err = "operand_file not set for {0}.".format(val) raise NipypeInterfaceError(err) # splitinter: - elif val == 'splitinter': + elif val == "splitinter": if not isdefined(self.inputs.operand_str): - err = 'operand_str not set for splitinter.' + err = "operand_str not set for splitinter." raise NipypeInterfaceError(err) - if opt == 'operand_value' and float(val) == 0.0: - return '0' + if opt == "operand_value" and float(val) == 0.0: + return "0" return super(BinaryMaths, self)._format_arg(opt, spec, val) def _overload_extension(self, value, name=None): - if self.inputs.operation == 'hdr_copy': + if self.inputs.operation == "hdr_copy": path, base, _ = split_filename(value) _, base, ext = split_filename(self.inputs.operand_file) suffix = self.inputs.operation - return os.path.join(path, '{0}{1}{2}'.format(base, suffix, ext)) + return os.path.join(path, "{0}{1}{2}".format(base, suffix, ext)) else: return super(BinaryMaths, self)._overload_extension(value, name) class BinaryMathsInputInteger(MathsInput): """Input Spec for seg_maths Binary operations that require integer.""" + operation = traits.Enum( - 'dil', - 'ero', - 'tp', - 'equal', - 'pad', - 'crop', + "dil", + "ero", + "tp", + "equal", + "pad", + "crop", mandatory=True, - argstr='-%s', + argstr="-%s", position=4, - desc='operation to perform') + desc="operation to perform", + ) operand_value = traits.Int( - argstr='%d', + argstr="%d", mandatory=True, position=5, - desc='int value to perform operation with') + desc="int value to perform operation with", + ) class BinaryMathsInteger(MathsCommand): @@ -488,51 +505,50 @@ class BinaryMathsInteger(MathsCommand): >>> binaryi_pad.run() # doctest: +SKIP """ + input_spec = BinaryMathsInputInteger class TupleMathsInput(MathsInput): """Input Spec for seg_maths Tuple operations.""" + operation = traits.Enum( - 'lncc', - 'lssd', - 'lltsnorm', + "lncc", + "lssd", + "lltsnorm", mandatory=True, - argstr='-%s', + argstr="-%s", position=4, - desc='operation to perform') + desc="operation to perform", + ) operand_file1 = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=5, - xor=['operand_value1'], - desc='image to perform operation 1 with') + xor=["operand_value1"], + desc="image to perform operation 1 with", + ) - desc = 'float value to perform operation 1 with' + desc = "float value to perform operation 1 with" operand_value1 = traits.Float( - argstr='%.8f', - mandatory=True, - position=5, - xor=['operand_file1'], - desc=desc) + argstr="%.8f", mandatory=True, position=5, xor=["operand_file1"], desc=desc + ) operand_file2 = File( exists=True, - argstr='%s', + argstr="%s", mandatory=True, position=6, - xor=['operand_value2'], - desc='image to perform operation 2 with') + xor=["operand_value2"], + desc="image to perform operation 2 with", + ) - desc = 'float value to perform operation 2 with' + desc = "float value to perform operation 2 with" operand_value2 = traits.Float( - argstr='%.8f', - mandatory=True, - position=6, - xor=['operand_file2'], - desc=desc) + argstr="%.8f", mandatory=True, position=6, xor=["operand_file2"], desc=desc + ) class TupleMaths(MathsCommand): @@ -592,17 +608,19 @@ class TupleMaths(MathsCommand): im1_lltsnorm.nii' >>> tuple_lltsnorm.run() # doctest: +SKIP """ + input_spec = TupleMathsInput class MergeInput(MathsInput): """Input Spec for seg_maths merge operation.""" - dimension = traits.Int( - mandatory=True, desc='Dimension to merge the images.') - desc = 'List of images to merge to the working image .' + dimension = traits.Int(mandatory=True, desc="Dimension to merge the images.") + + desc = "List of images to merge to the working image ." merge_files = traits.List( - File(exists=True), argstr='%s', mandatory=True, position=4, desc=desc) + File(exists=True), argstr="%s", mandatory=True, position=4, desc=desc + ) class Merge(MathsCommand): @@ -633,13 +651,13 @@ class Merge(MathsCommand): 'seg_maths im1.nii -merge 2 2 im2.nii im3.nii -odt float im1_merged.nii' """ + input_spec = MergeInput - _suffix = '_merged' + _suffix = "_merged" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for seg_maths.""" - if opt == 'merge_files': - return "-merge %d %d %s" % (len(val), self.inputs.dimension, - ' '.join(val)) + if opt == "merge_files": + return "-merge %d %d %s" % (len(val), self.inputs.dimension, " ".join(val)) return super(Merge, self)._format_arg(opt, spec, val) diff --git a/nipype/interfaces/niftyseg/patchmatch.py b/nipype/interfaces/niftyseg/patchmatch.py index 5732b1ba17..9dd7ddff5e 100644 --- a/nipype/interfaces/niftyseg/patchmatch.py +++ b/nipype/interfaces/niftyseg/patchmatch.py @@ -12,61 +12,65 @@ from ..niftyreg.base import get_custom_path warn = warnings.warn -warnings.filterwarnings('always', category=UserWarning) +warnings.filterwarnings("always", category=UserWarning) class PatchMatchInputSpec(CommandLineInputSpec): """Input Spec for PatchMatch.""" + # Mandatory input arguments in_file = File( - argstr='-i %s', + argstr="-i %s", exists=True, mandatory=True, - desc='Input image to segment', - position=1) + desc="Input image to segment", + position=1, + ) mask_file = File( - argstr='-m %s', + argstr="-m %s", exists=True, mandatory=True, - desc='Input mask for the area where applies PatchMatch', - position=2) + desc="Input mask for the area where applies PatchMatch", + position=2, + ) database_file = File( - argstr='-db %s', + argstr="-db %s", exists=True, mandatory=True, - desc='Database with the segmentations', - position=3) + desc="Database with the segmentations", + position=3, + ) # Output file name out_file = File( - name_source=['in_file'], - name_template='%s_pm.nii.gz', - desc='The output filename of the patchmatch results', - argstr='-o %s', - position=4) + name_source=["in_file"], + name_template="%s_pm.nii.gz", + desc="The output filename of the patchmatch results", + argstr="-o %s", + position=4, + ) # Optional arguments - patch_size = traits.Int(desc="Patch size, #voxels", argstr='-size %i') + patch_size = traits.Int(desc="Patch size, #voxels", argstr="-size %i") desc = "Constrained search area size, number of times bigger than the \ patchsize" - cs_size = traits.Int(desc=desc, argstr='-cs %i') + cs_size = traits.Int(desc=desc, argstr="-cs %i") - match_num = traits.Int( - desc="Number of better matching", argstr='-match %i') + match_num = traits.Int(desc="Number of better matching", argstr="-match %i") - pm_num = traits.Int( - desc="Number of patchmatch executions", argstr='-pm %i') + pm_num = traits.Int(desc="Number of patchmatch executions", argstr="-pm %i") desc = "Number of iterations for the patchmatch algorithm" - it_num = traits.Int(desc=desc, argstr='-it %i') + it_num = traits.Int(desc=desc, argstr="-it %i") class PatchMatchOutputSpec(TraitedSpec): """OutputSpec for PatchMatch.""" + out_file = File(desc="Output segmentation") @@ -99,7 +103,8 @@ class PatchMatch(NiftySegCommand): 'seg_PatchMatch -i im1.nii -m im2.nii -db db.xml -o im1_pm.nii.gz' """ - _cmd = get_custom_path('seg_PatchMatch', env_dir='NIFTYSEGDIR') + + _cmd = get_custom_path("seg_PatchMatch", env_dir="NIFTYSEGDIR") input_spec = PatchMatchInputSpec output_spec = PatchMatchOutputSpec - _suffix = '_pm' + _suffix = "_pm" diff --git a/nipype/interfaces/niftyseg/stats.py b/nipype/interfaces/niftyseg/stats.py index 94c7abd49e..611f293b42 100644 --- a/nipype/interfaces/niftyseg/stats.py +++ b/nipype/interfaces/niftyseg/stats.py @@ -13,27 +13,27 @@ class StatsInput(CommandLineInputSpec): """Input Spec for seg_stats interfaces.""" + in_file = File( - position=2, - argstr='%s', - exists=True, - mandatory=True, - desc='image to operate on') + position=2, argstr="%s", exists=True, mandatory=True, desc="image to operate on" + ) # Constrains mask_file = File( exists=True, position=-2, - argstr='-m %s', - desc='statistics within the masked area') + argstr="-m %s", + desc="statistics within the masked area", + ) - desc = 'Only estimate statistics if voxel is larger than ' - larger_voxel = traits.Float(argstr='-t %f', position=-3, desc=desc) + desc = "Only estimate statistics if voxel is larger than " + larger_voxel = traits.Float(argstr="-t %f", position=-3, desc=desc) class StatsOutput(TraitedSpec): """Output Spec for seg_stats interfaces.""" - output = traits.Array(desc='Output array from seg_stats') + + output = traits.Array(desc="Output array from seg_stats") class StatsCommand(NiftySegCommand): @@ -50,14 +50,15 @@ class StatsCommand(NiftySegCommand): robust to the presence of NaNs, and can be constrained by a mask and/or thresholded at a certain level. """ - _cmd = get_custom_path('seg_stats', env_dir='NIFTYSEGDIR') + + _cmd = get_custom_path("seg_stats", env_dir="NIFTYSEGDIR") input_spec = StatsInput output_spec = StatsOutput def _parse_stdout(self, stdout): out = [] for string_line in stdout.split("\n"): - if string_line.startswith('#'): + if string_line.startswith("#"): continue if len(string_line) <= 1: continue @@ -72,34 +73,36 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self.output_spec().get() - outputs['output'] = self.output + outputs["output"] = self.output return outputs class UnaryStatsInput(StatsInput): """Input Spec for seg_stats unary operations.""" + operation = traits.Enum( - 'r', - 'R', - 'a', - 's', - 'v', - 'vl', - 'vp', - 'n', - 'np', - 'e', - 'ne', - 'x', - 'X', - 'c', - 'B', - 'xvox', - 'xdim', - argstr='-%s', + "r", + "R", + "a", + "s", + "v", + "vl", + "vp", + "n", + "np", + "e", + "ne", + "x", + "X", + "c", + "B", + "xvox", + "xdim", + argstr="-%s", position=4, mandatory=True, - desc='operation to perform') + desc="operation to perform", + ) class UnaryStats(StatsCommand): @@ -178,26 +181,29 @@ class UnaryStats(StatsCommand): >>> unary_x.run() # doctest: +SKIP """ + input_spec = UnaryStatsInput class BinaryStatsInput(StatsInput): """Input Spec for seg_stats Binary operations.""" + operation = traits.Enum( - 'p', - 'sa', - 'ss', - 'svp', - 'al', - 'd', - 'ncc', - 'nmi', - 'Vl', - 'Nl', + "p", + "sa", + "ss", + "svp", + "al", + "d", + "ncc", + "nmi", + "Vl", + "Nl", mandatory=True, - argstr='-%s', + argstr="-%s", position=4, - desc='operation to perform') + desc="operation to perform", + ) operand_file = File( exists=True, @@ -205,14 +211,16 @@ class BinaryStatsInput(StatsInput): mandatory=True, position=5, xor=["operand_value"], - desc="second image to perform operation with") + desc="second image to perform operation with", + ) operand_value = traits.Float( - argstr='%.8f', + argstr="%.8f", mandatory=True, position=5, xor=["operand_file"], - desc='value to perform operation with') + desc="value to perform operation with", + ) class BinaryStats(StatsCommand): @@ -280,4 +288,5 @@ class BinaryStats(StatsCommand): >>> binary_nl.run() # doctest: +SKIP """ + input_spec = BinaryStatsInput diff --git a/nipype/interfaces/niftyseg/tests/test_auto_BinaryMaths.py b/nipype/interfaces/niftyseg/tests/test_auto_BinaryMaths.py index d094c52724..ae7bb8a8ef 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_BinaryMaths.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_BinaryMaths.py @@ -4,60 +4,47 @@ def test_BinaryMaths_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), operand_file=dict( - argstr='%s', + argstr="%s", extensions=None, mandatory=True, position=5, - xor=['operand_value', 'operand_str'], + xor=["operand_value", "operand_str"], ), operand_str=dict( - argstr='%s', + argstr="%s", mandatory=True, position=5, - xor=['operand_value', 'operand_file'], + xor=["operand_value", "operand_file"], ), operand_value=dict( - argstr='%.8f', + argstr="%.8f", mandatory=True, position=5, - xor=['operand_file', 'operand_str'], - ), - operation=dict( - argstr='-%s', - mandatory=True, - position=4, + xor=["operand_file", "operand_str"], ), + operation=dict(argstr="-%s", mandatory=True, position=4,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, - name_source=['in_file'], - name_template='%s', + name_source=["in_file"], + name_template="%s", position=-2, ), - output_datatype=dict( - argstr='-odt %s', - position=-3, - ), + output_datatype=dict(argstr="-odt %s", position=-3,), ) inputs = BinaryMaths.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BinaryMaths_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = BinaryMaths.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_BinaryMathsInteger.py b/nipype/interfaces/niftyseg/tests/test_auto_BinaryMathsInteger.py index 7c80638583..195a361f58 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_BinaryMathsInteger.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_BinaryMathsInteger.py @@ -4,46 +4,29 @@ def test_BinaryMathsInteger_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - operand_value=dict( - argstr='%d', - mandatory=True, - position=5, - ), - operation=dict( - argstr='-%s', - mandatory=True, - position=4, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + operand_value=dict(argstr="%d", mandatory=True, position=5,), + operation=dict(argstr="-%s", mandatory=True, position=4,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, - name_source=['in_file'], - name_template='%s', + name_source=["in_file"], + name_template="%s", position=-2, ), - output_datatype=dict( - argstr='-odt %s', - position=-3, - ), + output_datatype=dict(argstr="-odt %s", position=-3,), ) inputs = BinaryMathsInteger.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BinaryMathsInteger_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = BinaryMathsInteger.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_BinaryStats.py b/nipype/interfaces/niftyseg/tests/test_auto_BinaryStats.py index 184e089335..61ef530418 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_BinaryStats.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_BinaryStats.py @@ -4,52 +4,32 @@ def test_BinaryStats_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - larger_voxel=dict( - argstr='-t %f', - position=-3, - ), - mask_file=dict( - argstr='-m %s', - extensions=None, - position=-2, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + larger_voxel=dict(argstr="-t %f", position=-3,), + mask_file=dict(argstr="-m %s", extensions=None, position=-2,), operand_file=dict( - argstr='%s', + argstr="%s", extensions=None, mandatory=True, position=5, - xor=['operand_value'], + xor=["operand_value"], ), operand_value=dict( - argstr='%.8f', - mandatory=True, - position=5, - xor=['operand_file'], - ), - operation=dict( - argstr='-%s', - mandatory=True, - position=4, + argstr="%.8f", mandatory=True, position=5, xor=["operand_file"], ), + operation=dict(argstr="-%s", mandatory=True, position=4,), ) inputs = BinaryStats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BinaryStats_outputs(): - output_map = dict(output=dict(), ) + output_map = dict(output=dict(),) outputs = BinaryStats.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_CalcTopNCC.py b/nipype/interfaces/niftyseg/tests/test_auto_CalcTopNCC.py index e75455c96f..f1c16859eb 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_CalcTopNCC.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_CalcTopNCC.py @@ -4,44 +4,23 @@ def test_CalcTopNCC_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-target %s', - extensions=None, - mandatory=True, - position=1, - ), - in_templates=dict( - argstr='%s', - mandatory=True, - position=3, - ), - mask_file=dict( - argstr='-mask %s', - extensions=None, - ), - num_templates=dict( - argstr='-templates %s', - mandatory=True, - position=2, - ), - top_templates=dict( - argstr='-n %s', - mandatory=True, - position=4, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-target %s", extensions=None, mandatory=True, position=1,), + in_templates=dict(argstr="%s", mandatory=True, position=3,), + mask_file=dict(argstr="-mask %s", extensions=None,), + num_templates=dict(argstr="-templates %s", mandatory=True, position=2,), + top_templates=dict(argstr="-n %s", mandatory=True, position=4,), ) inputs = CalcTopNCC.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CalcTopNCC_outputs(): - output_map = dict(out_files=dict(), ) + output_map = dict(out_files=dict(),) outputs = CalcTopNCC.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_EM.py b/nipype/interfaces/niftyseg/tests/test_auto_EM.py index eac0fd86a0..ac340e89ff 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_EM.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_EM.py @@ -4,86 +4,59 @@ def test_EM_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bc_order_val=dict( - argstr='-bc_order %s', - usedefault=True, - ), - bc_thresh_val=dict( - argstr='-bc_thresh %s', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - position=4, - ), - mask_file=dict( - argstr='-mask %s', - extensions=None, - ), - max_iter=dict( - argstr='-max_iter %s', - usedefault=True, - ), - min_iter=dict( - argstr='-min_iter %s', - usedefault=True, - ), - mrf_beta_val=dict(argstr='-mrf_beta %s', ), + args=dict(argstr="%s",), + bc_order_val=dict(argstr="-bc_order %s", usedefault=True,), + bc_thresh_val=dict(argstr="-bc_thresh %s", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True, position=4,), + mask_file=dict(argstr="-mask %s", extensions=None,), + max_iter=dict(argstr="-max_iter %s", usedefault=True,), + min_iter=dict(argstr="-min_iter %s", usedefault=True,), + mrf_beta_val=dict(argstr="-mrf_beta %s",), no_prior=dict( - argstr='-nopriors %s', - mandatory=True, - xor=['prior_4D', 'priors'], + argstr="-nopriors %s", mandatory=True, xor=["prior_4D", "priors"], ), out_bc_file=dict( - argstr='-bc_out %s', + argstr="-bc_out %s", extensions=None, - name_source=['in_file'], - name_template='%s_bc_em.nii.gz', + name_source=["in_file"], + name_template="%s_bc_em.nii.gz", ), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, - name_source=['in_file'], - name_template='%s_em.nii.gz', + name_source=["in_file"], + name_template="%s_em.nii.gz", ), out_outlier_file=dict( - argstr='-out_outlier %s', + argstr="-out_outlier %s", extensions=None, - name_source=['in_file'], - name_template='%s_outlier_em.nii.gz', + name_source=["in_file"], + name_template="%s_outlier_em.nii.gz", ), - outlier_val=dict(argstr='-outlier %s %s', ), + outlier_val=dict(argstr="-outlier %s %s",), prior_4D=dict( - argstr='-prior4D %s', + argstr="-prior4D %s", extensions=None, mandatory=True, - xor=['no_prior', 'priors'], + xor=["no_prior", "priors"], ), - priors=dict( - argstr='%s', - mandatory=True, - xor=['no_prior', 'prior_4D'], - ), - reg_val=dict(argstr='-reg %s', ), - relax_priors=dict(argstr='-rf %s %s', ), + priors=dict(argstr="%s", mandatory=True, xor=["no_prior", "prior_4D"],), + reg_val=dict(argstr="-reg %s",), + relax_priors=dict(argstr="-rf %s %s",), ) inputs = EM.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EM_outputs(): output_map = dict( - out_bc_file=dict(extensions=None, ), - out_file=dict(extensions=None, ), - out_outlier_file=dict(extensions=None, ), + out_bc_file=dict(extensions=None,), + out_file=dict(extensions=None,), + out_outlier_file=dict(extensions=None,), ) outputs = EM.output_spec() diff --git a/nipype/interfaces/niftyseg/tests/test_auto_FillLesions.py b/nipype/interfaces/niftyseg/tests/test_auto_FillLesions.py index a6d11d735e..0e4c3d65bf 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_FillLesions.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_FillLesions.py @@ -4,53 +4,39 @@ def test_FillLesions_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bin_mask=dict( - argstr='-mask %s', - extensions=None, - ), - cwf=dict(argstr='-cwf %f', ), - debug=dict(argstr='-debug', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_dilation=dict(argstr='-dil %d', ), - in_file=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - position=1, - ), - lesion_mask=dict( - argstr='-l %s', - extensions=None, - mandatory=True, - position=2, - ), - match=dict(argstr='-match %f', ), - other=dict(argstr='-other', ), - out_datatype=dict(argstr='-odt %s', ), + args=dict(argstr="%s",), + bin_mask=dict(argstr="-mask %s", extensions=None,), + cwf=dict(argstr="-cwf %f",), + debug=dict(argstr="-debug",), + environ=dict(nohash=True, usedefault=True,), + in_dilation=dict(argstr="-dil %d",), + in_file=dict(argstr="-i %s", extensions=None, mandatory=True, position=1,), + lesion_mask=dict(argstr="-l %s", extensions=None, mandatory=True, position=2,), + match=dict(argstr="-match %f",), + other=dict(argstr="-other",), + out_datatype=dict(argstr="-odt %s",), out_file=dict( - argstr='-o %s', + argstr="-o %s", extensions=None, - name_source=['in_file'], - name_template='%s_lesions_filled.nii.gz', + name_source=["in_file"], + name_template="%s_lesions_filled.nii.gz", position=3, ), - search=dict(argstr='-search %f', ), - size=dict(argstr='-size %d', ), - smooth=dict(argstr='-smo %f', ), - use_2d=dict(argstr='-2D', ), - verbose=dict(argstr='-v', ), + search=dict(argstr="-search %f",), + size=dict(argstr="-size %d",), + smooth=dict(argstr="-smo %f",), + use_2d=dict(argstr="-2D",), + verbose=dict(argstr="-v",), ) inputs = FillLesions.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FillLesions_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = FillLesions.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_LabelFusion.py b/nipype/interfaces/niftyseg/tests/test_auto_LabelFusion.py index 5d8f170899..ba319d3475 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_LabelFusion.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_LabelFusion.py @@ -4,63 +4,43 @@ def test_LabelFusion_inputs(): input_map = dict( - args=dict(argstr='%s', ), - classifier_type=dict( - argstr='-%s', - mandatory=True, - position=2, - ), - conv=dict(argstr='-conv %f', ), + args=dict(argstr="%s",), + classifier_type=dict(argstr="-%s", mandatory=True, position=2,), + conv=dict(argstr="-conv %f",), dilation_roi=dict(), - environ=dict( - nohash=True, - usedefault=True, - ), - file_to_seg=dict( - extensions=None, - mandatory=True, - ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - position=1, - ), + environ=dict(nohash=True, usedefault=True,), + file_to_seg=dict(extensions=None, mandatory=True,), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True, position=1,), kernel_size=dict(), - mask_file=dict( - argstr='-mask %s', - extensions=None, - ), - max_iter=dict(argstr='-max_iter %d', ), - mrf_value=dict(argstr='-MRF_beta %f', ), + mask_file=dict(argstr="-mask %s", extensions=None,), + max_iter=dict(argstr="-max_iter %d",), + mrf_value=dict(argstr="-MRF_beta %f",), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, - name_source=['in_file'], - name_template='%s', - ), - prob_flag=dict(argstr='-outProb', ), - prob_update_flag=dict(argstr='-prop_update', ), - proportion=dict(argstr='-prop %s', ), - set_pq=dict(argstr='-setPQ %f %f', ), - sm_ranking=dict( - argstr='-%s', - position=3, - usedefault=True, + name_source=["in_file"], + name_template="%s", ), - template_file=dict(extensions=None, ), + prob_flag=dict(argstr="-outProb",), + prob_update_flag=dict(argstr="-prop_update",), + proportion=dict(argstr="-prop %s",), + set_pq=dict(argstr="-setPQ %f %f",), + sm_ranking=dict(argstr="-%s", position=3, usedefault=True,), + template_file=dict(extensions=None,), template_num=dict(), - unc=dict(argstr='-unc', ), - unc_thresh=dict(argstr='-uncthres %f', ), - verbose=dict(argstr='-v %s', ), + unc=dict(argstr="-unc",), + unc_thresh=dict(argstr="-uncthres %f",), + verbose=dict(argstr="-v %s",), ) inputs = LabelFusion.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LabelFusion_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = LabelFusion.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_MathsCommand.py b/nipype/interfaces/niftyseg/tests/test_auto_MathsCommand.py index 742e12447c..f8f7bcf95d 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_MathsCommand.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_MathsCommand.py @@ -4,36 +4,27 @@ def test_MathsCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, - name_source=['in_file'], - name_template='%s', + name_source=["in_file"], + name_template="%s", position=-2, ), - output_datatype=dict( - argstr='-odt %s', - position=-3, - ), + output_datatype=dict(argstr="-odt %s", position=-3,), ) inputs = MathsCommand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MathsCommand_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MathsCommand.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_Merge.py b/nipype/interfaces/niftyseg/tests/test_auto_Merge.py index 02052f2ada..cfeb8a01d0 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_Merge.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_Merge.py @@ -4,42 +4,29 @@ def test_Merge_inputs(): input_map = dict( - args=dict(argstr='%s', ), - dimension=dict(mandatory=True, ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - merge_files=dict( - argstr='%s', - mandatory=True, - position=4, - ), + args=dict(argstr="%s",), + dimension=dict(mandatory=True,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + merge_files=dict(argstr="%s", mandatory=True, position=4,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, - name_source=['in_file'], - name_template='%s', + name_source=["in_file"], + name_template="%s", position=-2, ), - output_datatype=dict( - argstr='-odt %s', - position=-3, - ), + output_datatype=dict(argstr="-odt %s", position=-3,), ) inputs = Merge.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Merge_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Merge.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_NiftySegCommand.py b/nipype/interfaces/niftyseg/tests/test_auto_NiftySegCommand.py index c0d36cdfe6..e78d913a4c 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_NiftySegCommand.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_NiftySegCommand.py @@ -4,11 +4,7 @@ def test_NiftySegCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), environ=dict(nohash=True, usedefault=True,), ) inputs = NiftySegCommand.input_spec() diff --git a/nipype/interfaces/niftyseg/tests/test_auto_PatchMatch.py b/nipype/interfaces/niftyseg/tests/test_auto_PatchMatch.py index 9206be5647..3832a197f6 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_PatchMatch.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_PatchMatch.py @@ -4,49 +4,35 @@ def test_PatchMatch_inputs(): input_map = dict( - args=dict(argstr='%s', ), - cs_size=dict(argstr='-cs %i', ), + args=dict(argstr="%s",), + cs_size=dict(argstr="-cs %i",), database_file=dict( - argstr='-db %s', - extensions=None, - mandatory=True, - position=3, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - position=1, + argstr="-db %s", extensions=None, mandatory=True, position=3, ), - it_num=dict(argstr='-it %i', ), - mask_file=dict( - argstr='-m %s', - extensions=None, - mandatory=True, - position=2, - ), - match_num=dict(argstr='-match %i', ), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-i %s", extensions=None, mandatory=True, position=1,), + it_num=dict(argstr="-it %i",), + mask_file=dict(argstr="-m %s", extensions=None, mandatory=True, position=2,), + match_num=dict(argstr="-match %i",), out_file=dict( - argstr='-o %s', + argstr="-o %s", extensions=None, - name_source=['in_file'], - name_template='%s_pm.nii.gz', + name_source=["in_file"], + name_template="%s_pm.nii.gz", position=4, ), - patch_size=dict(argstr='-size %i', ), - pm_num=dict(argstr='-pm %i', ), + patch_size=dict(argstr="-size %i",), + pm_num=dict(argstr="-pm %i",), ) inputs = PatchMatch.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PatchMatch_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = PatchMatch.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_StatsCommand.py b/nipype/interfaces/niftyseg/tests/test_auto_StatsCommand.py index b4a3e25cdb..b0332f1a46 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_StatsCommand.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_StatsCommand.py @@ -4,34 +4,21 @@ def test_StatsCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - larger_voxel=dict( - argstr='-t %f', - position=-3, - ), - mask_file=dict( - argstr='-m %s', - extensions=None, - position=-2, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + larger_voxel=dict(argstr="-t %f", position=-3,), + mask_file=dict(argstr="-m %s", extensions=None, position=-2,), ) inputs = StatsCommand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_StatsCommand_outputs(): - output_map = dict(output=dict(), ) + output_map = dict(output=dict(),) outputs = StatsCommand.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_TupleMaths.py b/nipype/interfaces/niftyseg/tests/test_auto_TupleMaths.py index 30f03e31a1..9fc193b442 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_TupleMaths.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_TupleMaths.py @@ -4,67 +4,48 @@ def test_TupleMaths_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), operand_file1=dict( - argstr='%s', + argstr="%s", extensions=None, mandatory=True, position=5, - xor=['operand_value1'], + xor=["operand_value1"], ), operand_file2=dict( - argstr='%s', + argstr="%s", extensions=None, mandatory=True, position=6, - xor=['operand_value2'], + xor=["operand_value2"], ), operand_value1=dict( - argstr='%.8f', - mandatory=True, - position=5, - xor=['operand_file1'], + argstr="%.8f", mandatory=True, position=5, xor=["operand_file1"], ), operand_value2=dict( - argstr='%.8f', - mandatory=True, - position=6, - xor=['operand_file2'], - ), - operation=dict( - argstr='-%s', - mandatory=True, - position=4, + argstr="%.8f", mandatory=True, position=6, xor=["operand_file2"], ), + operation=dict(argstr="-%s", mandatory=True, position=4,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, - name_source=['in_file'], - name_template='%s', + name_source=["in_file"], + name_template="%s", position=-2, ), - output_datatype=dict( - argstr='-odt %s', - position=-3, - ), + output_datatype=dict(argstr="-odt %s", position=-3,), ) inputs = TupleMaths.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TupleMaths_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = TupleMaths.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_UnaryMaths.py b/nipype/interfaces/niftyseg/tests/test_auto_UnaryMaths.py index 5acd5cb1e3..0409efb5c7 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_UnaryMaths.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_UnaryMaths.py @@ -4,41 +4,28 @@ def test_UnaryMaths_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - operation=dict( - argstr='-%s', - mandatory=True, - position=4, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + operation=dict(argstr="-%s", mandatory=True, position=4,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, - name_source=['in_file'], - name_template='%s', + name_source=["in_file"], + name_template="%s", position=-2, ), - output_datatype=dict( - argstr='-odt %s', - position=-3, - ), + output_datatype=dict(argstr="-odt %s", position=-3,), ) inputs = UnaryMaths.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_UnaryMaths_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = UnaryMaths.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_auto_UnaryStats.py b/nipype/interfaces/niftyseg/tests/test_auto_UnaryStats.py index 9e3d7f81fc..177f044fd1 100644 --- a/nipype/interfaces/niftyseg/tests/test_auto_UnaryStats.py +++ b/nipype/interfaces/niftyseg/tests/test_auto_UnaryStats.py @@ -4,39 +4,22 @@ def test_UnaryStats_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), - larger_voxel=dict( - argstr='-t %f', - position=-3, - ), - mask_file=dict( - argstr='-m %s', - extensions=None, - position=-2, - ), - operation=dict( - argstr='-%s', - mandatory=True, - position=4, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), + larger_voxel=dict(argstr="-t %f", position=-3,), + mask_file=dict(argstr="-m %s", extensions=None, position=-2,), + operation=dict(argstr="-%s", mandatory=True, position=4,), ) inputs = UnaryStats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_UnaryStats_outputs(): - output_map = dict(output=dict(), ) + output_map = dict(output=dict(),) outputs = UnaryStats.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/niftyseg/tests/test_em_interfaces.py b/nipype/interfaces/niftyseg/tests/test_em_interfaces.py index f4c56da2fe..5615f3e61c 100644 --- a/nipype/interfaces/niftyseg/tests/test_em_interfaces.py +++ b/nipype/interfaces/niftyseg/tests/test_em_interfaces.py @@ -9,15 +9,14 @@ from .. import EM -@pytest.mark.skipif( - no_nifty_tool(cmd='seg_EM'), reason="niftyseg is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="seg_EM"), reason="niftyseg is not installed") def test_seg_em(): # Create a node object seg_em = EM() # Check if the command is properly defined - cmd = get_custom_path('seg_EM', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_EM", env_dir="NIFTYSEGDIR") assert seg_em.cmd == cmd # test raising error with mandatory args absent @@ -25,19 +24,19 @@ def test_seg_em(): seg_em.run() # Assign some input data - in_file = example_data('im1.nii') + in_file = example_data("im1.nii") seg_em.inputs.in_file = in_file seg_em.inputs.no_prior = 4 - cmd_tmp = '{cmd} -in {in_file} -nopriors 4 -bc_out {bc_out} -out \ -{out_file} -out_outlier {out_outlier}' + cmd_tmp = "{cmd} -in {in_file} -nopriors 4 -bc_out {bc_out} -out \ +{out_file} -out_outlier {out_outlier}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, - out_file='im1_em.nii.gz', - bc_out='im1_bc_em.nii.gz', - out_outlier='im1_outlier_em.nii.gz', + out_file="im1_em.nii.gz", + bc_out="im1_bc_em.nii.gz", + out_outlier="im1_outlier_em.nii.gz", ) assert seg_em.cmdline == expected_cmd diff --git a/nipype/interfaces/niftyseg/tests/test_extra_PatchMatch.py b/nipype/interfaces/niftyseg/tests/test_extra_PatchMatch.py index 9fe82ac544..18156e37f1 100644 --- a/nipype/interfaces/niftyseg/tests/test_extra_PatchMatch.py +++ b/nipype/interfaces/niftyseg/tests/test_extra_PatchMatch.py @@ -10,14 +10,15 @@ @pytest.mark.skipif( - no_nifty_tool(cmd='seg_PatchMatch'), reason="niftyseg is not installed") + no_nifty_tool(cmd="seg_PatchMatch"), reason="niftyseg is not installed" +) def test_seg_patchmatch(): # Create a node object seg_patchmatch = PatchMatch() # Check if the command is properly defined - cmd = get_custom_path('seg_PatchMatch', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_PatchMatch", env_dir="NIFTYSEGDIR") assert seg_patchmatch.cmd == cmd # test raising error with mandatory args absent @@ -25,20 +26,20 @@ def test_seg_patchmatch(): seg_patchmatch.run() # Assign some input data - in_file = example_data('im1.nii') - mask_file = example_data('im2.nii') - db_file = example_data('db.xml') + in_file = example_data("im1.nii") + mask_file = example_data("im2.nii") + db_file = example_data("db.xml") seg_patchmatch.inputs.in_file = in_file seg_patchmatch.inputs.mask_file = mask_file seg_patchmatch.inputs.database_file = db_file - cmd_tmp = '{cmd} -i {in_file} -m {mask_file} -db {db} -o {out_file}' + cmd_tmp = "{cmd} -i {in_file} -m {mask_file} -db {db} -o {out_file}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, mask_file=mask_file, db=db_file, - out_file='im1_pm.nii.gz', + out_file="im1_pm.nii.gz", ) assert seg_patchmatch.cmdline == expected_cmd diff --git a/nipype/interfaces/niftyseg/tests/test_label_fusion.py b/nipype/interfaces/niftyseg/tests/test_label_fusion.py index fdc186d6c2..0a469a774e 100644 --- a/nipype/interfaces/niftyseg/tests/test_label_fusion.py +++ b/nipype/interfaces/niftyseg/tests/test_label_fusion.py @@ -10,14 +10,15 @@ @pytest.mark.skipif( - no_nifty_tool(cmd='seg_LabFusion'), reason="niftyseg is not installed") + no_nifty_tool(cmd="seg_LabFusion"), reason="niftyseg is not installed" +) def test_seg_lab_fusion(): """ Test interfaces for seg_labfusion""" # Create a node object steps = LabelFusion() # Check if the command is properly defined - cmd = get_custom_path('seg_LabFusion', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_LabFusion", env_dir="NIFTYSEGDIR") assert steps.cmd == cmd # test raising error with mandatory args absent @@ -25,86 +26,84 @@ def test_seg_lab_fusion(): steps.run() # Assign some input data - in_file = example_data('im1.nii') - file_to_seg = example_data('im2.nii') - template_file = example_data('im3.nii') + in_file = example_data("im1.nii") + file_to_seg = example_data("im2.nii") + template_file = example_data("im3.nii") steps.inputs.in_file = in_file steps.inputs.kernel_size = 2.0 steps.inputs.file_to_seg = file_to_seg steps.inputs.template_file = template_file steps.inputs.template_num = 2 - steps.inputs.classifier_type = 'STEPS' + steps.inputs.classifier_type = "STEPS" - cmd_tmp = '{cmd} -in {in_file} -STEPS 2.000000 2 {file_to_seg} \ -{template_file} -out {out_file}' + cmd_tmp = "{cmd} -in {in_file} -STEPS 2.000000 2 {file_to_seg} \ +{template_file} -out {out_file}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, file_to_seg=file_to_seg, template_file=template_file, - out_file='im1_steps.nii', + out_file="im1_steps.nii", ) assert steps.cmdline == expected_cmd # Staple - staple = LabelFusion( - kernel_size=2.0, template_num=2, classifier_type='STAPLE') - in_file = example_data('im1.nii') - file_to_seg = example_data('im2.nii') - template_file = example_data('im3.nii') + staple = LabelFusion(kernel_size=2.0, template_num=2, classifier_type="STAPLE") + in_file = example_data("im1.nii") + file_to_seg = example_data("im2.nii") + template_file = example_data("im3.nii") staple.inputs.in_file = in_file staple.inputs.file_to_seg = file_to_seg staple.inputs.template_file = template_file - cmd_tmp = '{cmd} -in {in_file} -STAPLE -ALL -out {out_file}' + cmd_tmp = "{cmd} -in {in_file} -STAPLE -ALL -out {out_file}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, file_to_seg=file_to_seg, template_file=template_file, - out_file='im1_staple.nii', + out_file="im1_staple.nii", ) assert staple.cmdline == expected_cmd # Assign some input data mv_node = LabelFusion( - template_num=2, - classifier_type='MV', - sm_ranking='ROINCC', - dilation_roi=2) - in_file = example_data('im1.nii') - file_to_seg = example_data('im2.nii') - template_file = example_data('im3.nii') + template_num=2, classifier_type="MV", sm_ranking="ROINCC", dilation_roi=2 + ) + in_file = example_data("im1.nii") + file_to_seg = example_data("im2.nii") + template_file = example_data("im3.nii") mv_node.inputs.in_file = in_file mv_node.inputs.file_to_seg = file_to_seg mv_node.inputs.template_file = template_file - cmd_tmp = '{cmd} -in {in_file} -MV -ROINCC 2 2 {file_to_seg} \ -{template_file} -out {out_file}' + cmd_tmp = "{cmd} -in {in_file} -MV -ROINCC 2 2 {file_to_seg} \ +{template_file} -out {out_file}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, file_to_seg=file_to_seg, template_file=template_file, - out_file='im1_mv.nii', + out_file="im1_mv.nii", ) assert mv_node.cmdline == expected_cmd @pytest.mark.skipif( - no_nifty_tool(cmd='seg_CalcTopNCC'), reason="niftyseg is not installed") + no_nifty_tool(cmd="seg_CalcTopNCC"), reason="niftyseg is not installed" +) def test_seg_calctopncc(): """ Test interfaces for seg_CalctoNCC""" # Create a node object calctopncc = CalcTopNCC() # Check if the command is properly defined - cmd = get_custom_path('seg_CalcTopNCC', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_CalcTopNCC", env_dir="NIFTYSEGDIR") assert calctopncc.cmd == cmd # test raising error with mandatory args absent @@ -112,20 +111,15 @@ def test_seg_calctopncc(): calctopncc.run() # Assign some input data - in_file = example_data('im1.nii') - file1 = example_data('im2.nii') - file2 = example_data('im3.nii') + in_file = example_data("im1.nii") + file1 = example_data("im2.nii") + file2 = example_data("im3.nii") calctopncc.inputs.in_file = in_file calctopncc.inputs.num_templates = 2 calctopncc.inputs.in_templates = [file1, file2] calctopncc.inputs.top_templates = 1 - cmd_tmp = '{cmd} -target {in_file} -templates 2 {file1} {file2} -n 1' - expected_cmd = cmd_tmp.format( - cmd=cmd, - in_file=in_file, - file1=file1, - file2=file2, - ) + cmd_tmp = "{cmd} -target {in_file} -templates 2 {file1} {file2} -n 1" + expected_cmd = cmd_tmp.format(cmd=cmd, in_file=in_file, file1=file1, file2=file2,) assert calctopncc.cmdline == expected_cmd diff --git a/nipype/interfaces/niftyseg/tests/test_lesions.py b/nipype/interfaces/niftyseg/tests/test_lesions.py index 24b7e82cd7..d46b380cc2 100644 --- a/nipype/interfaces/niftyseg/tests/test_lesions.py +++ b/nipype/interfaces/niftyseg/tests/test_lesions.py @@ -10,14 +10,15 @@ @pytest.mark.skipif( - no_nifty_tool(cmd='seg_FillLesions'), reason="niftyseg is not installed") + no_nifty_tool(cmd="seg_FillLesions"), reason="niftyseg is not installed" +) def test_seg_filllesions(): # Create a node object seg_fill = FillLesions() # Check if the command is properly defined - cmd = get_custom_path('seg_FillLesions', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_FillLesions", env_dir="NIFTYSEGDIR") assert seg_fill.cmd == cmd # test raising error with mandatory args absent @@ -25,16 +26,16 @@ def test_seg_filllesions(): seg_fill.run() # Assign some input data - in_file = example_data('im1.nii') - lesion_mask = example_data('im2.nii') + in_file = example_data("im1.nii") + lesion_mask = example_data("im2.nii") seg_fill.inputs.in_file = in_file seg_fill.inputs.lesion_mask = lesion_mask - expected_cmd = '{cmd} -i {in_file} -l {lesion_mask} -o {out_file}'.format( + expected_cmd = "{cmd} -i {in_file} -l {lesion_mask} -o {out_file}".format( cmd=cmd, in_file=in_file, lesion_mask=lesion_mask, - out_file='im1_lesions_filled.nii.gz', + out_file="im1_lesions_filled.nii.gz", ) assert seg_fill.cmdline == expected_cmd diff --git a/nipype/interfaces/niftyseg/tests/test_maths.py b/nipype/interfaces/niftyseg/tests/test_maths.py index 0680a8a481..84740b7447 100644 --- a/nipype/interfaces/niftyseg/tests/test_maths.py +++ b/nipype/interfaces/niftyseg/tests/test_maths.py @@ -6,18 +6,17 @@ from ....testing import example_data from ...niftyreg import get_custom_path from ...niftyreg.tests.test_regutils import no_nifty_tool -from .. import (UnaryMaths, BinaryMaths, BinaryMathsInteger, TupleMaths, Merge) +from .. import UnaryMaths, BinaryMaths, BinaryMathsInteger, TupleMaths, Merge -@pytest.mark.skipif( - no_nifty_tool(cmd='seg_maths'), reason="niftyseg is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="seg_maths"), reason="niftyseg is not installed") def test_unary_maths(): # Create a node object unarym = UnaryMaths() # Check if the command is properly defined - cmd = get_custom_path('seg_maths', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_maths", env_dir="NIFTYSEGDIR") assert unarym.cmd == cmd # test raising error with mandatory args absent @@ -25,26 +24,26 @@ def test_unary_maths(): unarym.run() # Assign some input data - in_file = example_data('im1.nii') + in_file = example_data("im1.nii") unarym.inputs.in_file = in_file - unarym.inputs.operation = 'otsu' - unarym.inputs.output_datatype = 'float' + unarym.inputs.operation = "otsu" + unarym.inputs.output_datatype = "float" - expected_cmd = '{cmd} {in_file} -otsu -odt float {out_file}'.format( - cmd=cmd, in_file=in_file, out_file='im1_otsu.nii') + expected_cmd = "{cmd} {in_file} -otsu -odt float {out_file}".format( + cmd=cmd, in_file=in_file, out_file="im1_otsu.nii" + ) assert unarym.cmdline == expected_cmd -@pytest.mark.skipif( - no_nifty_tool(cmd='seg_maths'), reason="niftyseg is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="seg_maths"), reason="niftyseg is not installed") def test_binary_maths(): # Create a node object binarym = BinaryMaths() # Check if the command is properly defined - cmd = get_custom_path('seg_maths', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_maths", env_dir="NIFTYSEGDIR") assert binarym.cmd == cmd # test raising error with mandatory args absent @@ -52,28 +51,26 @@ def test_binary_maths(): binarym.run() # Assign some input data - in_file = example_data('im1.nii') + in_file = example_data("im1.nii") binarym.inputs.in_file = in_file binarym.inputs.operand_value = 2.0 - binarym.inputs.operation = 'sub' - binarym.inputs.output_datatype = 'float' + binarym.inputs.operation = "sub" + binarym.inputs.output_datatype = "float" - cmd_tmp = '{cmd} {in_file} -sub 2.00000000 -odt float {out_file}' - expected_cmd = cmd_tmp.format( - cmd=cmd, in_file=in_file, out_file='im1_sub.nii') + cmd_tmp = "{cmd} {in_file} -sub 2.00000000 -odt float {out_file}" + expected_cmd = cmd_tmp.format(cmd=cmd, in_file=in_file, out_file="im1_sub.nii") assert binarym.cmdline == expected_cmd -@pytest.mark.skipif( - no_nifty_tool(cmd='seg_maths'), reason="niftyseg is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="seg_maths"), reason="niftyseg is not installed") def test_int_binary_maths(): # Create a node object ibinarym = BinaryMathsInteger() # Check if the command is properly defined - cmd = get_custom_path('seg_maths', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_maths", env_dir="NIFTYSEGDIR") assert ibinarym.cmd == cmd # test raising error with mandatory args absent @@ -81,27 +78,27 @@ def test_int_binary_maths(): ibinarym.run() # Assign some input data - in_file = example_data('im1.nii') + in_file = example_data("im1.nii") ibinarym.inputs.in_file = in_file ibinarym.inputs.operand_value = 2 - ibinarym.inputs.operation = 'dil' - ibinarym.inputs.output_datatype = 'float' + ibinarym.inputs.operation = "dil" + ibinarym.inputs.output_datatype = "float" - expected_cmd = '{cmd} {in_file} -dil 2 -odt float {out_file}'.format( - cmd=cmd, in_file=in_file, out_file='im1_dil.nii') + expected_cmd = "{cmd} {in_file} -dil 2 -odt float {out_file}".format( + cmd=cmd, in_file=in_file, out_file="im1_dil.nii" + ) assert ibinarym.cmdline == expected_cmd -@pytest.mark.skipif( - no_nifty_tool(cmd='seg_maths'), reason="niftyseg is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="seg_maths"), reason="niftyseg is not installed") def test_tuple_maths(): # Create a node object tuplem = TupleMaths() # Check if the command is properly defined - cmd = get_custom_path('seg_maths', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_maths", env_dir="NIFTYSEGDIR") assert tuplem.cmd == cmd # test raising error with mandatory args absent @@ -109,30 +106,30 @@ def test_tuple_maths(): tuplem.run() # Assign some input data - in_file = example_data('im1.nii') - op_file = example_data('im2.nii') + in_file = example_data("im1.nii") + op_file = example_data("im2.nii") tuplem.inputs.in_file = in_file - tuplem.inputs.operation = 'lncc' + tuplem.inputs.operation = "lncc" tuplem.inputs.operand_file1 = op_file tuplem.inputs.operand_value2 = 2.0 - tuplem.inputs.output_datatype = 'float' + tuplem.inputs.output_datatype = "float" - cmd_tmp = '{cmd} {in_file} -lncc {op} 2.00000000 -odt float {out_file}' + cmd_tmp = "{cmd} {in_file} -lncc {op} 2.00000000 -odt float {out_file}" expected_cmd = cmd_tmp.format( - cmd=cmd, in_file=in_file, op=op_file, out_file='im1_lncc.nii') + cmd=cmd, in_file=in_file, op=op_file, out_file="im1_lncc.nii" + ) assert tuplem.cmdline == expected_cmd -@pytest.mark.skipif( - no_nifty_tool(cmd='seg_maths'), reason="niftyseg is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="seg_maths"), reason="niftyseg is not installed") def test_merge(): # Create a node object merge = Merge() # Check if the command is properly defined - cmd = get_custom_path('seg_maths', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_maths", env_dir="NIFTYSEGDIR") assert merge.cmd == cmd # test raising error with mandatory args absent @@ -140,20 +137,17 @@ def test_merge(): merge.run() # Assign some input data - in_file = example_data('im1.nii') - file1 = example_data('im2.nii') - file2 = example_data('im3.nii') + in_file = example_data("im1.nii") + file1 = example_data("im2.nii") + file2 = example_data("im3.nii") merge.inputs.in_file = in_file merge.inputs.merge_files = [file1, file2] merge.inputs.dimension = 2 - merge.inputs.output_datatype = 'float' + merge.inputs.output_datatype = "float" - cmd_tmp = '{cmd} {in_file} -merge 2 2 {f1} {f2} -odt float {out_file}' + cmd_tmp = "{cmd} {in_file} -merge 2 2 {f1} {f2} -odt float {out_file}" expected_cmd = cmd_tmp.format( - cmd=cmd, - in_file=in_file, - f1=file1, - f2=file2, - out_file='im1_merged.nii') + cmd=cmd, in_file=in_file, f1=file1, f2=file2, out_file="im1_merged.nii" + ) assert merge.cmdline == expected_cmd diff --git a/nipype/interfaces/niftyseg/tests/test_stats.py b/nipype/interfaces/niftyseg/tests/test_stats.py index 91c234e98d..cfeefe404a 100644 --- a/nipype/interfaces/niftyseg/tests/test_stats.py +++ b/nipype/interfaces/niftyseg/tests/test_stats.py @@ -9,15 +9,14 @@ from .. import UnaryStats, BinaryStats -@pytest.mark.skipif( - no_nifty_tool(cmd='seg_stats'), reason="niftyseg is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="seg_stats"), reason="niftyseg is not installed") def test_unary_stats(): """ Test for the seg_stats interfaces """ # Create a node object unarys = UnaryStats() # Check if the command is properly defined - cmd = get_custom_path('seg_stats', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_stats", env_dir="NIFTYSEGDIR") assert unarys.cmd == cmd # test raising error with mandatory args absent @@ -25,24 +24,23 @@ def test_unary_stats(): unarys.run() # Assign some input data - in_file = example_data('im1.nii') + in_file = example_data("im1.nii") unarys.inputs.in_file = in_file - unarys.inputs.operation = 'a' + unarys.inputs.operation = "a" - expected_cmd = '{cmd} {in_file} -a'.format(cmd=cmd, in_file=in_file) + expected_cmd = "{cmd} {in_file} -a".format(cmd=cmd, in_file=in_file) assert unarys.cmdline == expected_cmd -@pytest.mark.skipif( - no_nifty_tool(cmd='seg_stats'), reason="niftyseg is not installed") +@pytest.mark.skipif(no_nifty_tool(cmd="seg_stats"), reason="niftyseg is not installed") def test_binary_stats(): """ Test for the seg_stats interfaces """ # Create a node object binarys = BinaryStats() # Check if the command is properly defined - cmd = get_custom_path('seg_stats', env_dir='NIFTYSEGDIR') + cmd = get_custom_path("seg_stats", env_dir="NIFTYSEGDIR") assert binarys.cmd == cmd # test raising error with mandatory args absent @@ -50,12 +48,11 @@ def test_binary_stats(): binarys.run() # Assign some input data - in_file = example_data('im1.nii') + in_file = example_data("im1.nii") binarys.inputs.in_file = in_file binarys.inputs.operand_value = 2 - binarys.inputs.operation = 'sa' + binarys.inputs.operation = "sa" - expected_cmd = '{cmd} {in_file} -sa 2.00000000'.format( - cmd=cmd, in_file=in_file) + expected_cmd = "{cmd} {in_file} -sa 2.00000000".format(cmd=cmd, in_file=in_file) assert binarys.cmdline == expected_cmd diff --git a/nipype/interfaces/nilearn.py b/nipype/interfaces/nilearn.py index 06bfdf899f..38ecb84a3a 100644 --- a/nipype/interfaces/nilearn.py +++ b/nipype/interfaces/nilearn.py @@ -1,78 +1,88 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -''' +""" Algorithms to compute statistics on :abbr:`fMRI (functional MRI)` -''' +""" import os import numpy as np import nibabel as nb -from ..interfaces.base import (traits, TraitedSpec, LibraryBaseInterface, - SimpleInterface, BaseInterfaceInputSpec, File, - InputMultiPath) +from ..interfaces.base import ( + traits, + TraitedSpec, + LibraryBaseInterface, + SimpleInterface, + BaseInterfaceInputSpec, + File, + InputMultiPath, +) class NilearnBaseInterface(LibraryBaseInterface): - _pkg = 'nilearn' + _pkg = "nilearn" class SignalExtractionInputSpec(BaseInterfaceInputSpec): - in_file = File(exists=True, mandatory=True, desc='4-D fMRI nii file') + in_file = File(exists=True, mandatory=True, desc="4-D fMRI nii file") label_files = InputMultiPath( File(exists=True), mandatory=True, - desc='a 3-D label image, with 0 denoting ' - 'background, or a list of 3-D probability ' - 'maps (one per label) or the equivalent 4D ' - 'file.') + desc="a 3-D label image, with 0 denoting " + "background, or a list of 3-D probability " + "maps (one per label) or the equivalent 4D " + "file.", + ) class_labels = traits.List( mandatory=True, - desc='Human-readable labels for each segment ' - 'in the label file, in order. The length of ' - 'class_labels must be equal to the number of ' - 'segments (background excluded). This list ' - 'corresponds to the class labels in label_file ' - 'in ascending order') + desc="Human-readable labels for each segment " + "in the label file, in order. The length of " + "class_labels must be equal to the number of " + "segments (background excluded). This list " + "corresponds to the class labels in label_file " + "in ascending order", + ) out_file = File( - 'signals.tsv', + "signals.tsv", usedefault=True, exists=False, - desc='The name of the file to output to. ' - 'signals.tsv by default') + desc="The name of the file to output to. " "signals.tsv by default", + ) incl_shared_variance = traits.Bool( True, usedefault=True, - desc='By default ' - '(True), returns simple time series calculated from each ' - 'region independently (e.g., for noise regression). If ' - 'False, returns unique signals for each region, discarding ' - 'shared variance (e.g., for connectivity. Only has effect ' - 'with 4D probability maps.') + desc="By default " + "(True), returns simple time series calculated from each " + "region independently (e.g., for noise regression). If " + "False, returns unique signals for each region, discarding " + "shared variance (e.g., for connectivity. Only has effect " + "with 4D probability maps.", + ) include_global = traits.Bool( False, usedefault=True, - desc='If True, include an extra column ' + desc="If True, include an extra column " 'labeled "GlobalSignal", with values calculated from the entire brain ' - '(instead of just regions).') + "(instead of just regions).", + ) detrend = traits.Bool( - False, - usedefault=True, - desc='If True, perform detrending using nilearn.') + False, usedefault=True, desc="If True, perform detrending using nilearn." + ) class SignalExtractionOutputSpec(TraitedSpec): out_file = File( exists=True, - desc='tsv file containing the computed ' - 'signals, with as many columns as there are labels and as ' - 'many rows as there are timepoints in in_file, plus a ' - 'header row with values from class_labels') + desc="tsv file containing the computed " + "signals, with as many columns as there are labels and as " + "many rows as there are timepoints in in_file, plus a " + "header row with values from class_labels", + ) class SignalExtraction(NilearnBaseInterface, SimpleInterface): - ''' + """ Extracts signals over tissue classes or brain regions >>> seinterface = SignalExtraction() @@ -83,7 +93,8 @@ class SignalExtraction(NilearnBaseInterface, SimpleInterface): >>> seinterface.inputs.class_labels = segments >>> seinterface.inputs.detrend = True >>> seinterface.inputs.include_global = True - ''' + """ + input_spec = SignalExtractionInputSpec output_spec = SignalExtractionOutputSpec @@ -95,20 +106,17 @@ def _run_interface(self, runtime): signals.append(masker.fit_transform(self.inputs.in_file)) region_signals = np.hstack(signals) - output = np.vstack((self.inputs.class_labels, - region_signals.astype(str))) + output = np.vstack((self.inputs.class_labels, region_signals.astype(str))) # save output - self._results['out_file'] = os.path.join(runtime.cwd, - self.inputs.out_file) - np.savetxt( - self._results['out_file'], output, fmt=b'%s', delimiter='\t') + self._results["out_file"] = os.path.join(runtime.cwd, self.inputs.out_file) + np.savetxt(self._results["out_file"], output, fmt=b"%s", delimiter="\t") return runtime def _process_inputs(self): - ''' validate and process inputs into useful form. + """ validate and process inputs into useful form. Returns a list of nilearn maskers and the list of corresponding label - names.''' + names.""" import nilearn.input_data as nl import nilearn.image as nli @@ -124,34 +132,41 @@ def _process_inputs(self): if self.inputs.incl_shared_variance: # independent computation for img in nli.iter_img(label_data): maskers.append( - nl.NiftiMapsMasker( - self._4d(img.get_data(), img.affine))) + nl.NiftiMapsMasker(self._4d(img.get_data(), img.affine)) + ) else: # one computation fitting all maskers.append(nl.NiftiMapsMasker(label_data)) # check label list size if not np.isclose(int(n_labels), n_labels): raise ValueError( - 'The label files {} contain invalid value {}. Check input.' - .format(self.inputs.label_files, n_labels)) + "The label files {} contain invalid value {}. Check input.".format( + self.inputs.label_files, n_labels + ) + ) if len(self.inputs.class_labels) != n_labels: - raise ValueError('The length of class_labels {} does not ' - 'match the number of regions {} found in ' - 'label_files {}'.format(self.inputs.class_labels, - n_labels, - self.inputs.label_files)) + raise ValueError( + "The length of class_labels {} does not " + "match the number of regions {} found in " + "label_files {}".format( + self.inputs.class_labels, n_labels, self.inputs.label_files + ) + ) if self.inputs.include_global: global_label_data = label_data.get_data().sum( - axis=3) # sum across all regions - global_label_data = np.rint(global_label_data).astype(int).clip( - 0, 1) # binarize + axis=3 + ) # sum across all regions + global_label_data = ( + np.rint(global_label_data).astype(int).clip(0, 1) + ) # binarize global_label_data = self._4d(global_label_data, label_data.affine) global_masker = nl.NiftiLabelsMasker( - global_label_data, detrend=self.inputs.detrend) + global_label_data, detrend=self.inputs.detrend + ) maskers.insert(0, global_masker) - self.inputs.class_labels.insert(0, 'GlobalSignal') + self.inputs.class_labels.insert(0, "GlobalSignal") for masker in maskers: masker.set_params(detrend=self.inputs.detrend) @@ -159,6 +174,6 @@ def _process_inputs(self): return maskers def _4d(self, array, affine): - ''' takes a 3-dimensional numpy array and an affine, - returns the equivalent 4th dimensional nifti file ''' + """ takes a 3-dimensional numpy array and an affine, + returns the equivalent 4th dimensional nifti file """ return nb.Nifti1Image(array[:, :, :, np.newaxis], affine) diff --git a/nipype/interfaces/nipy/base.py b/nipype/interfaces/nipy/base.py index 077499c52a..0991730e81 100644 --- a/nipype/interfaces/nipy/base.py +++ b/nipype/interfaces/nipy/base.py @@ -11,10 +11,10 @@ # Remove in 2.0 have_nipy = True try: - package_check('nipy') + package_check("nipy") except ImportError: have_nipy = False class NipyBaseInterface(LibraryBaseInterface): - _pkg = 'nipy' + _pkg = "nipy" diff --git a/nipype/interfaces/nipy/model.py b/nipype/interfaces/nipy/model.py index 66d1bbb6a9..c6287dac28 100644 --- a/nipype/interfaces/nipy/model.py +++ b/nipype/interfaces/nipy/model.py @@ -4,8 +4,14 @@ from ...utils import NUMPY_MMAP from .base import NipyBaseInterface -from ..base import (TraitedSpec, traits, File, OutputMultiPath, - BaseInterfaceInputSpec, isdefined) +from ..base import ( + TraitedSpec, + traits, + File, + OutputMultiPath, + BaseInterfaceInputSpec, + isdefined, +) class FitGLMInputSpec(BaseInterfaceInputSpec): @@ -13,49 +19,61 @@ class FitGLMInputSpec(BaseInterfaceInputSpec): minlen=1, maxlen=1, mandatory=True, - desc=('Session specific information generated by' - ' ``modelgen.SpecifyModel``, FitGLM does ' - 'not support multiple runs uless they are ' - 'concatenated (see SpecifyModel options)')) + desc=( + "Session specific information generated by" + " ``modelgen.SpecifyModel``, FitGLM does " + "not support multiple runs uless they are " + "concatenated (see SpecifyModel options)" + ), + ) hrf_model = traits.Enum( - 'Canonical', - 'Canonical With Derivative', - 'FIR', - desc=("that specifies the hemodynamic reponse " - "function it can be 'Canonical', 'Canonical " - "With Derivative' or 'FIR'"), - usedefault=True) + "Canonical", + "Canonical With Derivative", + "FIR", + desc=( + "that specifies the hemodynamic reponse " + "function it can be 'Canonical', 'Canonical " + "With Derivative' or 'FIR'" + ), + usedefault=True, + ) drift_model = traits.Enum( "Cosine", "Polynomial", "Blank", - desc=("string that specifies the desired drift " - "model, to be chosen among 'Polynomial', " - "'Cosine', 'Blank'"), - usedefault=True) + desc=( + "string that specifies the desired drift " + "model, to be chosen among 'Polynomial', " + "'Cosine', 'Blank'" + ), + usedefault=True, + ) TR = traits.Float(mandatory=True) model = traits.Enum( "ar1", "spherical", - desc=("autoregressive mode is available only for the " - "kalman method"), - usedefault=True) + desc=("autoregressive mode is available only for the " "kalman method"), + usedefault=True, + ) method = traits.Enum( "kalman", "ols", - desc=("method to fit the model, ols or kalma; kalman " - "is more time consuming but it supports " - "autoregressive model"), - usedefault=True) + desc=( + "method to fit the model, ols or kalma; kalman " + "is more time consuming but it supports " + "autoregressive model" + ), + usedefault=True, + ) mask = File( exists=True, - desc=("restrict the fitting only to the region defined " - "by this mask")) + desc=("restrict the fitting only to the region defined " "by this mask"), + ) normalize_design_matrix = traits.Bool( False, - desc=("normalize (zscore) the " - "regressors before fitting"), - usedefault=True) + desc=("normalize (zscore) the " "regressors before fitting"), + usedefault=True, + ) save_residuals = traits.Bool(False, usedefault=True) plot_design_matrix = traits.Bool(False, usedefault=True) @@ -73,9 +91,10 @@ class FitGLMOutputSpec(TraitedSpec): class FitGLM(NipyBaseInterface): - ''' + """ Fit GLM model based on the specified design. Supports only single or concatenated runs. - ''' + """ + input_spec = FitGLMInputSpec output_spec = FitGLMOutputSpec @@ -84,6 +103,7 @@ def _run_interface(self, runtime): import numpy as np import nipy.modalities.fmri.glm as GLM import nipy.modalities.fmri.design_matrix as dm + try: BlockParadigm = dm.BlockParadigm except AttributeError: @@ -91,7 +111,7 @@ def _run_interface(self, runtime): session_info = self.inputs.session_info - functional_runs = self.inputs.session_info[0]['scans'] + functional_runs = self.inputs.session_info[0]["scans"] if isinstance(functional_runs, (str, bytes)): functional_runs = [functional_runs] nii = nb.load(functional_runs[0]) @@ -115,21 +135,22 @@ def _run_interface(self, runtime): nscans = timeseries.shape[1] - if 'hpf' in list(session_info[0].keys()): - hpf = session_info[0]['hpf'] + if "hpf" in list(session_info[0].keys()): + hpf = session_info[0]["hpf"] drift_model = self.inputs.drift_model else: hpf = 0 drift_model = "Blank" reg_names = [] - for reg in session_info[0]['regress']: - reg_names.append(reg['name']) + for reg in session_info[0]["regress"]: + reg_names.append(reg["name"]) reg_vals = np.zeros((nscans, len(reg_names))) for i in range(len(reg_names)): - reg_vals[:, i] = np.array( - session_info[0]['regress'][i]['val']).reshape(1, -1) + reg_vals[:, i] = np.array(session_info[0]["regress"][i]["val"]).reshape( + 1, -1 + ) frametimes = np.linspace(0, (nscans - 1) * self.inputs.TR, nscans) @@ -137,17 +158,16 @@ def _run_interface(self, runtime): onsets = [] duration = [] - for i, cond in enumerate(session_info[0]['cond']): - onsets += cond['onset'] - conditions += [cond['name']] * len(cond['onset']) - if len(cond['duration']) == 1: - duration += cond['duration'] * len(cond['onset']) + for i, cond in enumerate(session_info[0]["cond"]): + onsets += cond["onset"] + conditions += [cond["name"]] * len(cond["onset"]) + if len(cond["duration"]) == 1: + duration += cond["duration"] * len(cond["onset"]) else: - duration += cond['duration'] + duration += cond["duration"] if conditions: - paradigm = BlockParadigm( - con_id=conditions, onset=onsets, duration=duration) + paradigm = BlockParadigm(con_id=conditions, onset=onsets, duration=duration) else: paradigm = None design_matrix, self._reg_names = dm.dmtx_light( @@ -157,15 +177,17 @@ def _run_interface(self, runtime): hfcut=hpf, hrf_model=self.inputs.hrf_model, add_regs=reg_vals, - add_reg_names=reg_names) + add_reg_names=reg_names, + ) if self.inputs.normalize_design_matrix: for i in range(len(self._reg_names) - 1): - design_matrix[:, i] = (( - design_matrix[:, i] - design_matrix[:, i].mean()) / - design_matrix[:, i].std()) + design_matrix[:, i] = ( + design_matrix[:, i] - design_matrix[:, i].mean() + ) / design_matrix[:, i].std() if self.inputs.plot_design_matrix: import pylab + pylab.pcolor(design_matrix) pylab.savefig("design_matrix.pdf") pylab.close() @@ -176,10 +198,11 @@ def _run_interface(self, runtime): timeseries.T, design_matrix, method=self.inputs.method, - model=self.inputs.model) + model=self.inputs.model, + ) self._beta_file = os.path.abspath("beta.nii") - beta = np.zeros(mask.shape + (glm.beta.shape[0], )) + beta = np.zeros(mask.shape + (glm.beta.shape[0],)) beta[mask, :] = glm.beta.T nb.save(nb.Nifti1Image(beta, nii.affine), self._beta_file) @@ -190,11 +213,10 @@ def _run_interface(self, runtime): if self.inputs.save_residuals: explained = np.dot(design_matrix, glm.beta) - residuals = np.zeros(mask.shape + (nscans, )) + residuals = np.zeros(mask.shape + (nscans,)) residuals[mask, :] = timeseries - explained.T self._residuals_file = os.path.abspath("residuals.nii") - nb.save( - nb.Nifti1Image(residuals, nii.affine), self._residuals_file) + nb.save(nb.Nifti1Image(residuals, nii.affine), self._residuals_file) self._nvbeta = glm.nvbeta self._dof = glm.dof @@ -229,33 +251,53 @@ def _list_outputs(self): class EstimateContrastInputSpec(BaseInterfaceInputSpec): contrasts = traits.List( traits.Either( - traits.Tuple(traits.Str, traits.Enum('T'), traits.List(traits.Str), - traits.List(traits.Float)), - traits.Tuple(traits.Str, traits.Enum('T'), traits.List(traits.Str), - traits.List(traits.Float), traits.List(traits.Float)), - traits.Tuple(traits.Str, traits.Enum('F'), - traits.List( - traits.Either( - traits.Tuple(traits.Str, traits.Enum('T'), - traits.List(traits.Str), - traits.List(traits.Float)), - traits.Tuple(traits.Str, traits.Enum('T'), - traits.List(traits.Str), - traits.List(traits.Float), - traits.List(traits.Float)))))), + traits.Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + ), + traits.Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + traits.List(traits.Float), + ), + traits.Tuple( + traits.Str, + traits.Enum("F"), + traits.List( + traits.Either( + traits.Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + ), + traits.Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + traits.List(traits.Float), + ), + ) + ), + ), + ), desc="""List of contrasts with each contrast being a list of the form: [('name', 'stat', [condition list], [weight list], [session list])]. if session list is None or not provided, all sessions are used. For F contrasts, the condition list should contain previously defined T-contrasts.""", - mandatory=True) + mandatory=True, + ) beta = File( - exists=True, - desc="beta coefficients of the fitted model", - mandatory=True) + exists=True, desc="beta coefficients of the fitted model", mandatory=True + ) nvbeta = traits.Any(mandatory=True) - s2 = File( - exists=True, desc="squared variance of the residuals", mandatory=True) + s2 = File(exists=True, desc="squared variance of the residuals", mandatory=True) dof = traits.Any(desc="degrees of freedom", mandatory=True) constants = traits.Any(mandatory=True) axis = traits.Any(mandatory=True) @@ -270,9 +312,10 @@ class EstimateContrastOutputSpec(TraitedSpec): class EstimateContrast(NipyBaseInterface): - ''' + """ Estimate contrast of a fitted model. - ''' + """ + input_spec = EstimateContrastInputSpec output_spec = EstimateContrastOutputSpec diff --git a/nipype/interfaces/nipy/preprocess.py b/nipype/interfaces/nipy/preprocess.py index 0f6ee5031d..3da52fbd04 100644 --- a/nipype/interfaces/nipy/preprocess.py +++ b/nipype/interfaces/nipy/preprocess.py @@ -8,19 +8,30 @@ from ...utils.filemanip import split_filename, fname_presuffix from .base import NipyBaseInterface, have_nipy -from ..base import (TraitedSpec, traits, BaseInterfaceInputSpec, - isdefined, File, InputMultiPath, OutputMultiPath) +from ..base import ( + TraitedSpec, + traits, + BaseInterfaceInputSpec, + isdefined, + File, + InputMultiPath, + OutputMultiPath, +) class ComputeMaskInputSpec(BaseInterfaceInputSpec): mean_volume = File( exists=True, mandatory=True, - desc="mean EPI image, used to compute the threshold for the mask") + desc="mean EPI image, used to compute the threshold for the mask", + ) reference_volume = File( exists=True, - desc=("reference volume used to compute the mask. " - "If none is give, the mean volume is used.")) + desc=( + "reference volume used to compute the mask. " + "If none is give, the mean volume is used." + ), + ) m = traits.Float(desc="lower fraction of the histogram to be discarded") M = traits.Float(desc="upper fraction of the histogram to be discarded") cc = traits.Bool(desc="Keep only the largest connected component") @@ -36,14 +47,16 @@ class ComputeMask(NipyBaseInterface): def _run_interface(self, runtime): from nipy.labs.mask import compute_mask + args = {} for key in [ - k for k, _ in list(self.inputs.items()) - if k not in BaseInterfaceInputSpec().trait_names() + k + for k, _ in list(self.inputs.items()) + if k not in BaseInterfaceInputSpec().trait_names() ]: value = getattr(self.inputs, key) if isdefined(value): - if key in ['mean_volume', 'reference_volume']: + if key in ["mean_volume", "reference_volume"]: nii = nb.load(value, mmap=NUMPY_MMAP) value = nii.get_data() args[key] = value @@ -53,7 +66,8 @@ def _run_interface(self, runtime): self._brain_mask_path = os.path.abspath("%s_mask.%s" % (name, ext)) nb.save( nb.Nifti1Image(brain_mask.astype(np.uint8), nii.affine), - self._brain_mask_path) + self._brain_mask_path, + ) return runtime @@ -65,34 +79,42 @@ def _list_outputs(self): class SpaceTimeRealignerInputSpec(BaseInterfaceInputSpec): in_file = InputMultiPath( - File(exists=True), - mandatory=True, - min_ver='0.4.0.dev', - desc="File to realign") - tr = traits.Float(desc="TR in seconds", requires=['slice_times']) + File(exists=True), mandatory=True, min_ver="0.4.0.dev", desc="File to realign" + ) + tr = traits.Float(desc="TR in seconds", requires=["slice_times"]) slice_times = traits.Either( traits.List(traits.Float()), - traits.Enum('asc_alt_2', 'asc_alt_2_1', 'asc_alt_half', - 'asc_alt_siemens', 'ascending', 'desc_alt_2', - 'desc_alt_half', 'descending'), - desc=('Actual slice acquisition times.')) + traits.Enum( + "asc_alt_2", + "asc_alt_2_1", + "asc_alt_half", + "asc_alt_siemens", + "ascending", + "desc_alt_2", + "desc_alt_half", + "descending", + ), + desc=("Actual slice acquisition times."), + ) slice_info = traits.Either( traits.Int, traits.List(min_len=2, max_len=2), - desc=('Single integer or length 2 sequence ' - 'If int, the axis in `images` that is the ' - 'slice axis. In a 4D image, this will ' - 'often be axis = 2. If a 2 sequence, then' - ' elements are ``(slice_axis, ' - 'slice_direction)``, where ``slice_axis`` ' - 'is the slice axis in the image as above, ' - 'and ``slice_direction`` is 1 if the ' - 'slices were acquired slice 0 first, slice' - ' -1 last, or -1 if acquired slice -1 ' - 'first, slice 0 last. If `slice_info` is ' - 'an int, assume ' - '``slice_direction`` == 1.'), - requires=['slice_times'], + desc=( + "Single integer or length 2 sequence " + "If int, the axis in `images` that is the " + "slice axis. In a 4D image, this will " + "often be axis = 2. If a 2 sequence, then" + " elements are ``(slice_axis, " + "slice_direction)``, where ``slice_axis`` " + "is the slice axis in the image as above, " + "and ``slice_direction`` is 1 if the " + "slices were acquired slice 0 first, slice" + " -1 last, or -1 if acquired slice -1 " + "first, slice 0 last. If `slice_info` is " + "an int, assume " + "``slice_direction`` == 1." + ), + requires=["slice_times"], ) @@ -100,8 +122,8 @@ class SpaceTimeRealignerOutputSpec(TraitedSpec): out_file = OutputMultiPath(File(exists=True), desc="Realigned files") par_file = OutputMultiPath( File(exists=True), - desc=("Motion parameter files. Angles are not " - "euler angles")) + desc=("Motion parameter files. Angles are not " "euler angles"), + ) class SpaceTimeRealigner(NipyBaseInterface): @@ -141,18 +163,20 @@ class SpaceTimeRealigner(NipyBaseInterface): input_spec = SpaceTimeRealignerInputSpec output_spec = SpaceTimeRealignerOutputSpec - keywords = ['slice timing', 'motion correction'] + keywords = ["slice timing", "motion correction"] def _run_interface(self, runtime): from nipy import save_image, load_image + all_ims = [load_image(fname) for fname in self.inputs.in_file] if not isdefined(self.inputs.slice_times): - from nipy.algorithms.registration.groupwise_registration import \ - SpaceRealign + from nipy.algorithms.registration.groupwise_registration import SpaceRealign + R = SpaceRealign(all_ims) else: from nipy.algorithms.registration import SpaceTimeRealign + R = SpaceTimeRealign( all_ims, tr=self.inputs.tr, @@ -168,24 +192,25 @@ def _run_interface(self, runtime): for j, corr in enumerate(corr_run): self._out_file_path.append( - os.path.abspath('corr_%s.nii.gz' % - (split_filename(self.inputs.in_file[j])[1]))) + os.path.abspath( + "corr_%s.nii.gz" % (split_filename(self.inputs.in_file[j])[1]) + ) + ) save_image(corr, self._out_file_path[j]) self._par_file_path.append( - os.path.abspath('%s.par' % - (os.path.split(self.inputs.in_file[j])[1]))) - mfile = open(self._par_file_path[j], 'w') + os.path.abspath("%s.par" % (os.path.split(self.inputs.in_file[j])[1])) + ) + mfile = open(self._par_file_path[j], "w") motion = R._transforms[j] # nipy does not encode euler angles. return in original form of # translation followed by rotation vector see: # http://en.wikipedia.org/wiki/Rodrigues'_rotation_formula for i, mo in enumerate(motion): params = [ - '%.10f' % item - for item in np.hstack((mo.translation, mo.rotation)) + "%.10f" % item for item in np.hstack((mo.translation, mo.rotation)) ] - string = ' '.join(params) + '\n' + string = " ".join(params) + "\n" mfile.write(string) mfile.close() @@ -193,23 +218,23 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = self._out_file_path - outputs['par_file'] = self._par_file_path + outputs["out_file"] = self._out_file_path + outputs["par_file"] = self._par_file_path return outputs class TrimInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True, desc="EPI image to trim") - begin_index = traits.Int(0, usedefault=True, desc='first volume') + begin_index = traits.Int(0, usedefault=True, desc="first volume") end_index = traits.Int( - 0, - usedefault=True, - desc='last volume indexed as in python (and 0 for last)') - out_file = File(desc='output filename') + 0, usedefault=True, desc="last volume indexed as in python (and 0 for last)" + ) + out_file = File(desc="output filename") suffix = traits.Str( - '_trim', + "_trim", usedefault=True, - desc='suffix for out_file to use if no out_file provided') + desc="suffix for out_file to use if no out_file provided", + ) class TrimOutputSpec(TraitedSpec): @@ -233,7 +258,7 @@ class Trim(NipyBaseInterface): output_spec = TrimOutputSpec def _run_interface(self, runtime): - out_file = self._list_outputs()['out_file'] + out_file = self._list_outputs()["out_file"] nii = nb.load(self.inputs.in_file) if self.inputs.end_index == 0: s = slice(self.inputs.begin_index, nii.shape[3]) @@ -245,11 +270,10 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_file - if not isdefined(outputs['out_file']): - outputs['out_file'] = fname_presuffix( - self.inputs.in_file, - newpath=os.getcwd(), - suffix=self.inputs.suffix) - outputs['out_file'] = os.path.abspath(outputs['out_file']) + outputs["out_file"] = self.inputs.out_file + if not isdefined(outputs["out_file"]): + outputs["out_file"] = fname_presuffix( + self.inputs.in_file, newpath=os.getcwd(), suffix=self.inputs.suffix + ) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs diff --git a/nipype/interfaces/nipy/tests/test_auto_ComputeMask.py b/nipype/interfaces/nipy/tests/test_auto_ComputeMask.py index 943b3f43d4..aa34d55caf 100644 --- a/nipype/interfaces/nipy/tests/test_auto_ComputeMask.py +++ b/nipype/interfaces/nipy/tests/test_auto_ComputeMask.py @@ -7,19 +7,18 @@ def test_ComputeMask_inputs(): M=dict(), cc=dict(), m=dict(), - mean_volume=dict( - extensions=None, - mandatory=True, - ), - reference_volume=dict(extensions=None, ), + mean_volume=dict(extensions=None, mandatory=True,), + reference_volume=dict(extensions=None,), ) inputs = ComputeMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ComputeMask_outputs(): - output_map = dict(brain_mask=dict(extensions=None, ), ) + output_map = dict(brain_mask=dict(extensions=None,),) outputs = ComputeMask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/nipy/tests/test_auto_EstimateContrast.py b/nipype/interfaces/nipy/tests/test_auto_EstimateContrast.py index 3f9d725fac..c89423bc74 100644 --- a/nipype/interfaces/nipy/tests/test_auto_EstimateContrast.py +++ b/nipype/interfaces/nipy/tests/test_auto_EstimateContrast.py @@ -4,33 +4,25 @@ def test_EstimateContrast_inputs(): input_map = dict( - axis=dict(mandatory=True, ), - beta=dict( - extensions=None, - mandatory=True, - ), - constants=dict(mandatory=True, ), - contrasts=dict(mandatory=True, ), - dof=dict(mandatory=True, ), - mask=dict(extensions=None, ), - nvbeta=dict(mandatory=True, ), - reg_names=dict(mandatory=True, ), - s2=dict( - extensions=None, - mandatory=True, - ), + axis=dict(mandatory=True,), + beta=dict(extensions=None, mandatory=True,), + constants=dict(mandatory=True,), + contrasts=dict(mandatory=True,), + dof=dict(mandatory=True,), + mask=dict(extensions=None,), + nvbeta=dict(mandatory=True,), + reg_names=dict(mandatory=True,), + s2=dict(extensions=None, mandatory=True,), ) inputs = EstimateContrast.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EstimateContrast_outputs(): - output_map = dict( - p_maps=dict(), - stat_maps=dict(), - z_maps=dict(), - ) + output_map = dict(p_maps=dict(), stat_maps=dict(), z_maps=dict(),) outputs = EstimateContrast.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/nipy/tests/test_auto_FitGLM.py b/nipype/interfaces/nipy/tests/test_auto_FitGLM.py index d453b98a25..7aa96870c7 100644 --- a/nipype/interfaces/nipy/tests/test_auto_FitGLM.py +++ b/nipype/interfaces/nipy/tests/test_auto_FitGLM.py @@ -4,33 +4,35 @@ def test_FitGLM_inputs(): input_map = dict( - TR=dict(mandatory=True, ), - drift_model=dict(usedefault=True, ), - hrf_model=dict(usedefault=True, ), - mask=dict(extensions=None, ), - method=dict(usedefault=True, ), - model=dict(usedefault=True, ), - normalize_design_matrix=dict(usedefault=True, ), - plot_design_matrix=dict(usedefault=True, ), - save_residuals=dict(usedefault=True, ), - session_info=dict(mandatory=True, ), + TR=dict(mandatory=True,), + drift_model=dict(usedefault=True,), + hrf_model=dict(usedefault=True,), + mask=dict(extensions=None,), + method=dict(usedefault=True,), + model=dict(usedefault=True,), + normalize_design_matrix=dict(usedefault=True,), + plot_design_matrix=dict(usedefault=True,), + save_residuals=dict(usedefault=True,), + session_info=dict(mandatory=True,), ) inputs = FitGLM.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FitGLM_outputs(): output_map = dict( - a=dict(extensions=None, ), + a=dict(extensions=None,), axis=dict(), - beta=dict(extensions=None, ), + beta=dict(extensions=None,), constants=dict(), dof=dict(), nvbeta=dict(), reg_names=dict(), - residuals=dict(extensions=None, ), - s2=dict(extensions=None, ), + residuals=dict(extensions=None,), + s2=dict(extensions=None,), ) outputs = FitGLM.output_spec() diff --git a/nipype/interfaces/nipy/tests/test_auto_Similarity.py b/nipype/interfaces/nipy/tests/test_auto_Similarity.py index e09754d382..ac0b3c853a 100644 --- a/nipype/interfaces/nipy/tests/test_auto_Similarity.py +++ b/nipype/interfaces/nipy/tests/test_auto_Similarity.py @@ -4,25 +4,21 @@ def test_Similarity_inputs(): input_map = dict( - mask1=dict(extensions=None, ), - mask2=dict(extensions=None, ), - metric=dict(usedefault=True, ), - volume1=dict( - extensions=None, - mandatory=True, - ), - volume2=dict( - extensions=None, - mandatory=True, - ), + mask1=dict(extensions=None,), + mask2=dict(extensions=None,), + metric=dict(usedefault=True,), + volume1=dict(extensions=None, mandatory=True,), + volume2=dict(extensions=None, mandatory=True,), ) inputs = Similarity.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Similarity_outputs(): - output_map = dict(similarity=dict(), ) + output_map = dict(similarity=dict(),) outputs = Similarity.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/nipy/tests/test_auto_SpaceTimeRealigner.py b/nipype/interfaces/nipy/tests/test_auto_SpaceTimeRealigner.py index c511737c70..2025d62498 100644 --- a/nipype/interfaces/nipy/tests/test_auto_SpaceTimeRealigner.py +++ b/nipype/interfaces/nipy/tests/test_auto_SpaceTimeRealigner.py @@ -4,24 +4,20 @@ def test_SpaceTimeRealigner_inputs(): input_map = dict( - in_file=dict( - mandatory=True, - min_ver='0.4.0.dev', - ), - slice_info=dict(requires=['slice_times'], ), + in_file=dict(mandatory=True, min_ver="0.4.0.dev",), + slice_info=dict(requires=["slice_times"],), slice_times=dict(), - tr=dict(requires=['slice_times'], ), + tr=dict(requires=["slice_times"],), ) inputs = SpaceTimeRealigner.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SpaceTimeRealigner_outputs(): - output_map = dict( - out_file=dict(), - par_file=dict(), - ) + output_map = dict(out_file=dict(), par_file=dict(),) outputs = SpaceTimeRealigner.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/nipy/tests/test_auto_Trim.py b/nipype/interfaces/nipy/tests/test_auto_Trim.py index ff7e66ab97..4b33c8b4b2 100644 --- a/nipype/interfaces/nipy/tests/test_auto_Trim.py +++ b/nipype/interfaces/nipy/tests/test_auto_Trim.py @@ -4,22 +4,21 @@ def test_Trim_inputs(): input_map = dict( - begin_index=dict(usedefault=True, ), - end_index=dict(usedefault=True, ), - in_file=dict( - extensions=None, - mandatory=True, - ), - out_file=dict(extensions=None, ), - suffix=dict(usedefault=True, ), + begin_index=dict(usedefault=True,), + end_index=dict(usedefault=True,), + in_file=dict(extensions=None, mandatory=True,), + out_file=dict(extensions=None,), + suffix=dict(usedefault=True,), ) inputs = Trim.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Trim_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Trim.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/nipy/utils.py b/nipype/interfaces/nipy/utils.py index 22856c01d6..23be89b4f5 100644 --- a/nipype/interfaces/nipy/utils.py +++ b/nipype/interfaces/nipy/utils.py @@ -4,8 +4,7 @@ import nibabel as nb from .base import NipyBaseInterface, have_nipy -from ..base import (TraitedSpec, traits, BaseInterfaceInputSpec, - File, isdefined) +from ..base import TraitedSpec, traits, BaseInterfaceInputSpec, File, isdefined class SimilarityInputSpec(BaseInterfaceInputSpec): @@ -14,7 +13,7 @@ class SimilarityInputSpec(BaseInterfaceInputSpec): mask1 = File(exists=True, desc="3D volume") mask2 = File(exists=True, desc="3D volume") metric = traits.Either( - traits.Enum('cc', 'cr', 'crl1', 'mi', 'nmi', 'slr'), + traits.Enum("cc", "cr", "crl1", "mi", "nmi", "slr"), traits.Callable(), desc="""str or callable Cost-function for assessing image similarity. If a string, @@ -24,7 +23,8 @@ class SimilarityInputSpec(BaseInterfaceInputSpec): supervised log-likelihood ratio. If a callable, it should take a two-dimensional array representing the image joint histogram as an input and return a float.""", - usedefault=True) + usedefault=True, + ) class SimilarityOutputSpec(TraitedSpec): @@ -55,13 +55,19 @@ class Similarity(NipyBaseInterface): output_spec = SimilarityOutputSpec def __init__(self, **inputs): - warnings.warn(("This interface is deprecated since 0.10.0." - " Please use nipype.algorithms.metrics.Similarity"), - DeprecationWarning) + warnings.warn( + ( + "This interface is deprecated since 0.10.0." + " Please use nipype.algorithms.metrics.Similarity" + ), + DeprecationWarning, + ) super(Similarity, self).__init__(**inputs) def _run_interface(self, runtime): - from nipy.algorithms.registration.histogram_registration import HistogramRegistration + from nipy.algorithms.registration.histogram_registration import ( + HistogramRegistration, + ) from nipy.algorithms.registration.affine import Affine vol1_nii = nb.load(self.inputs.volume1) @@ -82,12 +88,13 @@ def _run_interface(self, runtime): to_img=vol2_nii, similarity=self.inputs.metric, from_mask=mask1, - to_mask=mask2) + to_mask=mask2, + ) self._similarity = histreg.eval(Affine()) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['similarity'] = self._similarity + outputs["similarity"] = self._similarity return outputs diff --git a/nipype/interfaces/nitime/__init__.py b/nipype/interfaces/nitime/__init__.py index 656d601b3e..f237859eb6 100644 --- a/nipype/interfaces/nitime/__init__.py +++ b/nipype/interfaces/nitime/__init__.py @@ -2,5 +2,8 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from .analysis import (CoherenceAnalyzerInputSpec, CoherenceAnalyzerOutputSpec, - CoherenceAnalyzer) +from .analysis import ( + CoherenceAnalyzerInputSpec, + CoherenceAnalyzerOutputSpec, + CoherenceAnalyzer, +) diff --git a/nipype/interfaces/nitime/analysis.py b/nipype/interfaces/nitime/analysis.py index 7f393efadf..93787c1964 100644 --- a/nipype/interfaces/nitime/analysis.py +++ b/nipype/interfaces/nitime/analysis.py @@ -16,12 +16,18 @@ from ...utils.misc import package_check from ...utils.filemanip import fname_presuffix from .base import NitimeBaseInterface -from ..base import (TraitedSpec, File, Undefined, traits, - isdefined, BaseInterfaceInputSpec) +from ..base import ( + TraitedSpec, + File, + Undefined, + traits, + isdefined, + BaseInterfaceInputSpec, +) have_nitime = True try: - package_check('nitime') + package_check("nitime") except ImportError: have_nitime = False @@ -30,83 +36,96 @@ class CoherenceAnalyzerInputSpec(BaseInterfaceInputSpec): # Input either csv file, or time-series object and use _xor_inputs to # discriminate - _xor_inputs = ('in_file', 'in_TS') + _xor_inputs = ("in_file", "in_TS") in_file = File( - desc=('csv file with ROIs on the columns and ' - 'time-points on the rows. ROI names at the top row'), + desc=( + "csv file with ROIs on the columns and " + "time-points on the rows. ROI names at the top row" + ), exists=True, - requires=('TR', )) + requires=("TR",), + ) # If you gave just a file name, you need to specify the sampling_rate: TR = traits.Float( - desc=('The TR used to collect the data' - 'in your csv file ')) + desc=("The TR used to collect the data" "in your csv file ") + ) - in_TS = traits.Any(desc='a nitime TimeSeries object') + in_TS = traits.Any(desc="a nitime TimeSeries object") NFFT = traits.Range( low=32, value=64, usedefault=True, - desc=('This is the size of the window used for ' - 'the spectral estimation. Use values between ' - '32 and the number of samples in your time-series.' - '(Defaults to 64.)')) + desc=( + "This is the size of the window used for " + "the spectral estimation. Use values between " + "32 and the number of samples in your time-series." + "(Defaults to 64.)" + ), + ) n_overlap = traits.Range( low=0, value=0, usedefault=True, - desc=('The number of samples which overlap' - 'between subsequent windows.(Defaults to 0)')) + desc=( + "The number of samples which overlap" + "between subsequent windows.(Defaults to 0)" + ), + ) frequency_range = traits.List( value=[0.02, 0.15], usedefault=True, minlen=2, maxlen=2, - desc=('The range of frequencies over' - 'which the analysis will average.' - '[low,high] (Default [0.02,0.15]')) + desc=( + "The range of frequencies over" + "which the analysis will average." + "[low,high] (Default [0.02,0.15]" + ), + ) output_csv_file = File( - desc= - 'File to write outputs (coherence,time-delay) with file-names: file_name_ {coherence,timedelay}' + desc="File to write outputs (coherence,time-delay) with file-names: file_name_ {coherence,timedelay}" ) output_figure_file = File( - desc= - 'File to write output figures (coherence,time-delay) with file-names: file_name_{coherence,timedelay}. Possible formats: .png,.svg,.pdf,.jpg,...' + desc="File to write output figures (coherence,time-delay) with file-names: file_name_{coherence,timedelay}. Possible formats: .png,.svg,.pdf,.jpg,..." ) figure_type = traits.Enum( - 'matrix', - 'network', + "matrix", + "network", usedefault=True, - desc=("The type of plot to generate, where " - "'matrix' denotes a matrix image and" - "'network' denotes a graph representation." - " Default: 'matrix'")) + desc=( + "The type of plot to generate, where " + "'matrix' denotes a matrix image and" + "'network' denotes a graph representation." + " Default: 'matrix'" + ), + ) class CoherenceAnalyzerOutputSpec(TraitedSpec): coherence_array = traits.Array( - desc=('The pairwise coherence values' - 'between the ROIs')) + desc=("The pairwise coherence values" "between the ROIs") + ) timedelay_array = traits.Array( - desc=('The pairwise time delays between the' - 'ROIs (in seconds)')) + desc=("The pairwise time delays between the" "ROIs (in seconds)") + ) coherence_csv = File( - desc=('A csv file containing the pairwise ' - 'coherence values')) + desc=("A csv file containing the pairwise " "coherence values") + ) timedelay_csv = File( - desc=('A csv file containing the pairwise ' - 'time delay values')) + desc=("A csv file containing the pairwise " "time delay values") + ) - coherence_fig = File(desc=('Figure representing coherence values')) - timedelay_fig = File(desc=('Figure representing coherence values')) + coherence_fig = File(desc=("Figure representing coherence values")) + timedelay_fig = File(desc=("Figure representing coherence values")) class CoherenceAnalyzer(NitimeBaseInterface): @@ -132,20 +151,21 @@ def _read_csv(self): "First row of in_file should contain ROI names as strings of characters" ) - roi_names = open(self.inputs.in_file).readline().replace( - '\"', '').strip('\n').split(',') + roi_names = ( + open(self.inputs.in_file).readline().replace('"', "").strip("\n").split(",") + ) # Transpose, so that the time is the last dimension: - data = np.loadtxt(self.inputs.in_file, skiprows=1, delimiter=',').T + data = np.loadtxt(self.inputs.in_file, skiprows=1, delimiter=",").T return data, roi_names def _csv2ts(self): """ Read data from the in_file and generate a nitime TimeSeries object""" from nitime.timeseries import TimeSeries + data, roi_names = self._read_csv() - TS = TimeSeries( - data=data, sampling_interval=self.inputs.TR, time_unit='s') + TS = TimeSeries(data=data, sampling_interval=self.inputs.TR, time_unit="s") TS.metadata = dict(ROIs=roi_names) @@ -154,6 +174,7 @@ def _csv2ts(self): # Rewrite _run_interface, but not run def _run_interface(self, runtime): import nitime.analysis as nta + lb, ub = self.inputs.frequency_range if self.inputs.in_TS is Undefined: @@ -165,21 +186,24 @@ def _run_interface(self, runtime): TS = self.inputs.in_TS # deal with creating or storing ROI names: - if 'ROIs' not in TS.metadata: - self.ROIs = ['roi_%d' % x for x, _ in enumerate(TS.data)] + if "ROIs" not in TS.metadata: + self.ROIs = ["roi_%d" % x for x, _ in enumerate(TS.data)] else: - self.ROIs = TS.metadata['ROIs'] + self.ROIs = TS.metadata["ROIs"] A = nta.CoherenceAnalyzer( TS, method=dict( - this_method='welch', + this_method="welch", NFFT=self.inputs.NFFT, - n_overlap=self.inputs.n_overlap)) + n_overlap=self.inputs.n_overlap, + ), + ) freq_idx = np.where( - (A.frequencies > self.inputs.frequency_range[0]) * - (A.frequencies < self.inputs.frequency_range[1]))[0] + (A.frequencies > self.inputs.frequency_range[0]) + * (A.frequencies < self.inputs.frequency_range[1]) + )[0] # Get the coherence matrix from the analyzer, averaging on the last # (frequency) dimension: (roi X roi array) @@ -198,29 +222,31 @@ def _list_outputs(self): # file name + path) # Always defined (the arrays): - outputs['coherence_array'] = self.coherence - outputs['timedelay_array'] = self.delay + outputs["coherence_array"] = self.coherence + outputs["timedelay_array"] = self.delay # Conditional - if isdefined(self.inputs.output_csv_file) and hasattr( - self, 'coherence'): + if isdefined(self.inputs.output_csv_file) and hasattr(self, "coherence"): # we need to make a function that we call here that writes the # coherence values to this file "coherence_csv" and makes the # time_delay csv file?? self._make_output_files() - outputs['coherence_csv'] = fname_presuffix( - self.inputs.output_csv_file, suffix='_coherence') + outputs["coherence_csv"] = fname_presuffix( + self.inputs.output_csv_file, suffix="_coherence" + ) - outputs['timedelay_csv'] = fname_presuffix( - self.inputs.output_csv_file, suffix='_delay') + outputs["timedelay_csv"] = fname_presuffix( + self.inputs.output_csv_file, suffix="_delay" + ) - if isdefined(self.inputs.output_figure_file) and hasattr( - self, 'coherence'): + if isdefined(self.inputs.output_figure_file) and hasattr(self, "coherence"): self._make_output_figures() - outputs['coherence_fig'] = fname_presuffix( - self.inputs.output_figure_file, suffix='_coherence') - outputs['timedelay_fig'] = fname_presuffix( - self.inputs.output_figure_file, suffix='_delay') + outputs["coherence_fig"] = fname_presuffix( + self.inputs.output_figure_file, suffix="_coherence" + ) + outputs["timedelay_fig"] = fname_presuffix( + self.inputs.output_figure_file, suffix="_delay" + ) return outputs @@ -228,18 +254,19 @@ def _make_output_files(self): """ Generate the output csv files. """ - for this in zip([self.coherence, self.delay], ['coherence', 'delay']): + for this in zip([self.coherence, self.delay], ["coherence", "delay"]): tmp_f = tempfile.mkstemp()[1] - np.savetxt(tmp_f, this[0], delimiter=',') + np.savetxt(tmp_f, this[0], delimiter=",") fid = open( - fname_presuffix( - self.inputs.output_csv_file, suffix='_%s' % this[1]), 'w+') + fname_presuffix(self.inputs.output_csv_file, suffix="_%s" % this[1]), + "w+", + ) # this writes ROIs as header line - fid.write(',' + ','.join(self.ROIs) + '\n') + fid.write("," + ",".join(self.ROIs) + "\n") # this writes ROI and data to a line for r, line in zip(self.ROIs, open(tmp_f)): - fid.write('%s,%s' % (r, line)) + fid.write("%s,%s" % (r, line)) fid.close() def _make_output_figures(self): @@ -249,31 +276,32 @@ def _make_output_figures(self): """ import nitime.viz as viz - if self.inputs.figure_type == 'matrix': + + if self.inputs.figure_type == "matrix": fig_coh = viz.drawmatrix_channels( - self.coherence, channel_names=self.ROIs, color_anchor=0) + self.coherence, channel_names=self.ROIs, color_anchor=0 + ) fig_coh.savefig( - fname_presuffix( - self.inputs.output_figure_file, suffix='_coherence')) + fname_presuffix(self.inputs.output_figure_file, suffix="_coherence") + ) fig_dt = viz.drawmatrix_channels( - self.delay, channel_names=self.ROIs, color_anchor=0) + self.delay, channel_names=self.ROIs, color_anchor=0 + ) fig_dt.savefig( - fname_presuffix( - self.inputs.output_figure_file, suffix='_delay')) + fname_presuffix(self.inputs.output_figure_file, suffix="_delay") + ) else: - fig_coh = viz.drawgraph_channels( - self.coherence, channel_names=self.ROIs) + fig_coh = viz.drawgraph_channels(self.coherence, channel_names=self.ROIs) fig_coh.savefig( - fname_presuffix( - self.inputs.output_figure_file, suffix='_coherence')) + fname_presuffix(self.inputs.output_figure_file, suffix="_coherence") + ) - fig_dt = viz.drawgraph_channels( - self.delay, channel_names=self.ROIs) + fig_dt = viz.drawgraph_channels(self.delay, channel_names=self.ROIs) fig_dt.savefig( - fname_presuffix( - self.inputs.output_figure_file, suffix='_delay')) + fname_presuffix(self.inputs.output_figure_file, suffix="_delay") + ) diff --git a/nipype/interfaces/nitime/base.py b/nipype/interfaces/nitime/base.py index d9139f2c71..fb31cafc75 100644 --- a/nipype/interfaces/nitime/base.py +++ b/nipype/interfaces/nitime/base.py @@ -7,4 +7,4 @@ class NitimeBaseInterface(LibraryBaseInterface): - _pkg = 'nitime' + _pkg = "nitime" diff --git a/nipype/interfaces/nitime/tests/test_auto_CoherenceAnalyzer.py b/nipype/interfaces/nitime/tests/test_auto_CoherenceAnalyzer.py index 0af36046ac..d188c27800 100644 --- a/nipype/interfaces/nitime/tests/test_auto_CoherenceAnalyzer.py +++ b/nipype/interfaces/nitime/tests/test_auto_CoherenceAnalyzer.py @@ -4,32 +4,31 @@ def test_CoherenceAnalyzer_inputs(): input_map = dict( - NFFT=dict(usedefault=True, ), + NFFT=dict(usedefault=True,), TR=dict(), - figure_type=dict(usedefault=True, ), - frequency_range=dict(usedefault=True, ), + figure_type=dict(usedefault=True,), + frequency_range=dict(usedefault=True,), in_TS=dict(), - in_file=dict( - extensions=None, - requires=('TR', ), - ), - n_overlap=dict(usedefault=True, ), - output_csv_file=dict(extensions=None, ), - output_figure_file=dict(extensions=None, ), + in_file=dict(extensions=None, requires=("TR",),), + n_overlap=dict(usedefault=True,), + output_csv_file=dict(extensions=None,), + output_figure_file=dict(extensions=None,), ) inputs = CoherenceAnalyzer.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CoherenceAnalyzer_outputs(): output_map = dict( coherence_array=dict(), - coherence_csv=dict(extensions=None, ), - coherence_fig=dict(extensions=None, ), + coherence_csv=dict(extensions=None,), + coherence_fig=dict(extensions=None,), timedelay_array=dict(), - timedelay_csv=dict(extensions=None, ), - timedelay_fig=dict(extensions=None, ), + timedelay_csv=dict(extensions=None,), + timedelay_fig=dict(extensions=None,), ) outputs = CoherenceAnalyzer.output_spec() diff --git a/nipype/interfaces/nitime/tests/test_nitime.py b/nipype/interfaces/nitime/tests/test_nitime.py index daea1a177b..507e1856ae 100644 --- a/nipype/interfaces/nitime/tests/test_nitime.py +++ b/nipype/interfaces/nitime/tests/test_nitime.py @@ -11,7 +11,7 @@ import nipype.interfaces.nitime as nitime no_nitime = not nitime.analysis.have_nitime -display_available = 'DISPLAY' in os.environ and os.environ['DISPLAY'] +display_available = "DISPLAY" in os.environ and os.environ["DISPLAY"] @pytest.mark.skipif(no_nitime, reason="nitime is not installed") @@ -20,14 +20,14 @@ def test_read_csv(): time-series object """ CA = nitime.CoherenceAnalyzer() CA.inputs.TR = 1.89 # bogus value just to pass traits test - CA.inputs.in_file = example_data('fmri_timeseries_nolabels.csv') + CA.inputs.in_file = example_data("fmri_timeseries_nolabels.csv") with pytest.raises(ValueError): CA._read_csv() - CA.inputs.in_file = example_data('fmri_timeseries.csv') + CA.inputs.in_file = example_data("fmri_timeseries.csv") data, roi_names = CA._read_csv() assert data[0][0] == 10125.9 - assert roi_names[0] == 'WM' + assert roi_names[0] == "WM" @pytest.mark.skipif(no_nitime, reason="nitime is not installed") @@ -40,11 +40,11 @@ def test_coherence_analysis(tmpdir): # This is the nipype interface analysis: CA = nitime.CoherenceAnalyzer() CA.inputs.TR = 1.89 - CA.inputs.in_file = example_data('fmri_timeseries.csv') + CA.inputs.in_file = example_data("fmri_timeseries.csv") if display_available: - tmp_png = tempfile.mkstemp(suffix='.png')[1] + tmp_png = tempfile.mkstemp(suffix=".png")[1] CA.inputs.output_figure_file = tmp_png - tmp_csv = tempfile.mkstemp(suffix='.csv')[1] + tmp_csv = tempfile.mkstemp(suffix=".csv")[1] CA.inputs.output_csv_file = tmp_csv o = CA.run() @@ -52,7 +52,7 @@ def test_coherence_analysis(tmpdir): # This is the nitime analysis: TR = 1.89 - data_rec = np.recfromcsv(example_data('fmri_timeseries.csv')) + data_rec = np.recfromcsv(example_data("fmri_timeseries.csv")) roi_names = np.array(data_rec.dtype.names) n_samples = data_rec.shape[0] data = np.zeros((len(roi_names), n_samples)) @@ -64,16 +64,18 @@ def test_coherence_analysis(tmpdir): assert (CA._csv2ts().data == T.data).all() - T.metadata['roi'] = roi_names + T.metadata["roi"] = roi_names C = nta.CoherenceAnalyzer( T, method=dict( - this_method='welch', - NFFT=CA.inputs.NFFT, - n_overlap=CA.inputs.n_overlap)) - - freq_idx = np.where((C.frequencies > CA.inputs.frequency_range[0]) * - (C.frequencies < CA.inputs.frequency_range[1]))[0] + this_method="welch", NFFT=CA.inputs.NFFT, n_overlap=CA.inputs.n_overlap + ), + ) + + freq_idx = np.where( + (C.frequencies > CA.inputs.frequency_range[0]) + * (C.frequencies < CA.inputs.frequency_range[1]) + )[0] # Extract the coherence and average across these frequency bands: # Averaging is done on the last dimension diff --git a/nipype/interfaces/petpvc.py b/nipype/interfaces/petpvc.py index ffa892e7e0..c59b7f2777 100644 --- a/nipype/interfaces/petpvc.py +++ b/nipype/interfaces/petpvc.py @@ -4,57 +4,88 @@ import os -from .base import TraitedSpec, CommandLineInputSpec, CommandLine, File, isdefined, traits +from .base import ( + TraitedSpec, + CommandLineInputSpec, + CommandLine, + File, + isdefined, + traits, +) from ..utils.filemanip import fname_presuffix from ..external.due import BibTeX pvc_methods = [ - 'GTM', 'IY', 'IY+RL', 'IY+VC', 'LABBE', 'LABBE+MTC', 'LABBE+MTC+RL', - 'LABBE+MTC+VC', 'LABBE+RBV', 'LABBE+RBV+RL', 'LABBE+RBV+VC', 'MG', 'MG+RL', - 'MG+VC', 'MTC', 'MTC+RL', 'MTC+VC', 'RBV', 'RBV+RL', 'RBV+VC', 'RL', 'VC' + "GTM", + "IY", + "IY+RL", + "IY+VC", + "LABBE", + "LABBE+MTC", + "LABBE+MTC+RL", + "LABBE+MTC+VC", + "LABBE+RBV", + "LABBE+RBV+RL", + "LABBE+RBV+VC", + "MG", + "MG+RL", + "MG+VC", + "MTC", + "MTC+RL", + "MTC+VC", + "RBV", + "RBV+RL", + "RBV+VC", + "RL", + "VC", ] class PETPVCInputSpec(CommandLineInputSpec): - in_file = File( - desc="PET image file", exists=True, mandatory=True, argstr="-i %s") - out_file = File( - desc="Output file", genfile=True, hash_files=False, argstr="-o %s") + in_file = File(desc="PET image file", exists=True, mandatory=True, argstr="-i %s") + out_file = File(desc="Output file", genfile=True, hash_files=False, argstr="-o %s") mask_file = File( - desc="Mask image file", exists=True, mandatory=True, argstr="-m %s") + desc="Mask image file", exists=True, mandatory=True, argstr="-m %s" + ) pvc = traits.Enum( - pvc_methods, desc="Desired PVC method", mandatory=True, argstr="-p %s") + pvc_methods, desc="Desired PVC method", mandatory=True, argstr="-p %s" + ) fwhm_x = traits.Float( desc="The full-width at half maximum in mm along x-axis", mandatory=True, - argstr="-x %.4f") + argstr="-x %.4f", + ) fwhm_y = traits.Float( desc="The full-width at half maximum in mm along y-axis", mandatory=True, - argstr="-y %.4f") + argstr="-y %.4f", + ) fwhm_z = traits.Float( desc="The full-width at half maximum in mm along z-axis", mandatory=True, - argstr="-z %.4f") + argstr="-z %.4f", + ) debug = traits.Bool( desc="Prints debug information", usedefault=True, default_value=False, - argstr="-d") + argstr="-d", + ) n_iter = traits.Int( - desc="Number of iterations", default_value=10, usedefault=True, - argstr="-n %d") + desc="Number of iterations", default_value=10, usedefault=True, argstr="-n %d" + ) n_deconv = traits.Int( desc="Number of deconvolution iterations", default_value=10, usedefault=True, - argstr="-k %d") + argstr="-k %d", + ) alpha = traits.Float( - desc="Alpha value", default_value=1.5, usedefault=True, - argstr="-a %.4f") + desc="Alpha value", default_value=1.5, usedefault=True, argstr="-a %.4f" + ) stop_crit = traits.Float( - desc="Stopping criterion", default_value=0.01, usedefault=True, - argstr="-s %.4f") + desc="Stopping criterion", default_value=0.01, usedefault=True, argstr="-s %.4f" + ) class PETPVCOutputSpec(TraitedSpec): @@ -145,48 +176,48 @@ class PETPVC(CommandLine): >>> pvc.inputs.fwhm_z = 2.0 >>> outs = pvc.run() #doctest: +SKIP """ + input_spec = PETPVCInputSpec output_spec = PETPVCOutputSpec - _cmd = 'petpvc' - - references_ = [{ - 'entry': - BibTeX( - "@article{0031-9155-61-22-7975," - "author={Benjamin A Thomas and Vesna Cuplov and Alexandre Bousse and " - "Adriana Mendes and Kris Thielemans and Brian F Hutton and Kjell Erlandsson}," - "title={PETPVC: a toolbox for performing partial volume correction " - "techniques in positron emission tomography}," - "journal={Physics in Medicine and Biology}," - "volume={61}," - "number={22}," - "pages={7975}," - "url={http://stacks.iop.org/0031-9155/61/i=22/a=7975}," - "doi={https://doi.org/10.1088/0031-9155/61/22/7975}," - "year={2016}," - "}"), - 'description': - 'PETPVC software implementation publication', - 'tags': ['implementation'], - }] + _cmd = "petpvc" + + references_ = [ + { + "entry": BibTeX( + "@article{0031-9155-61-22-7975," + "author={Benjamin A Thomas and Vesna Cuplov and Alexandre Bousse and " + "Adriana Mendes and Kris Thielemans and Brian F Hutton and Kjell Erlandsson}," + "title={PETPVC: a toolbox for performing partial volume correction " + "techniques in positron emission tomography}," + "journal={Physics in Medicine and Biology}," + "volume={61}," + "number={22}," + "pages={7975}," + "url={http://stacks.iop.org/0031-9155/61/i=22/a=7975}," + "doi={https://doi.org/10.1088/0031-9155/61/22/7975}," + "year={2016}," + "}" + ), + "description": "PETPVC software implementation publication", + "tags": ["implementation"], + } + ] def _list_outputs(self): outputs = self.output_spec().get() - outputs['out_file'] = self.inputs.out_file - if not isdefined(outputs['out_file']): + outputs["out_file"] = self.inputs.out_file + if not isdefined(outputs["out_file"]): method_name = self.inputs.pvc.lower() - outputs['out_file'] = self._gen_fname( - self.inputs.in_file, suffix='_{}_pvc'.format(method_name)) + outputs["out_file"] = self._gen_fname( + self.inputs.in_file, suffix="_{}_pvc".format(method_name) + ) - outputs['out_file'] = os.path.abspath(outputs['out_file']) + outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs - def _gen_fname(self, - basename, - cwd=None, - suffix=None, - change_ext=True, - ext='.nii.gz'): + def _gen_fname( + self, basename, cwd=None, suffix=None, change_ext=True, ext=".nii.gz" + ): """Generate a filename based on the given parameters. The filename will take the form: cwd/basename. @@ -211,24 +242,23 @@ def _gen_fname(self, New filename based on given parameters. """ - if basename == '': - msg = 'Unable to generate filename for command %s. ' % self.cmd - msg += 'basename is not set!' + if basename == "": + msg = "Unable to generate filename for command %s. " % self.cmd + msg += "basename is not set!" raise ValueError(msg) if cwd is None: cwd = os.getcwd() if change_ext: if suffix: - suffix = ''.join((suffix, ext)) + suffix = "".join((suffix, ext)) else: suffix = ext if suffix is None: - suffix = '' - fname = fname_presuffix( - basename, suffix=suffix, use_ext=False, newpath=cwd) + suffix = "" + fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) return fname def _gen_filename(self, name): - if name == 'out_file': - return self._list_outputs()['out_file'] + if name == "out_file": + return self._list_outputs()["out_file"] return None diff --git a/nipype/interfaces/quickshear.py b/nipype/interfaces/quickshear.py index 7508c0a356..7725abfeb5 100644 --- a/nipype/interfaces/quickshear.py +++ b/nipype/interfaces/quickshear.py @@ -10,27 +10,26 @@ class QuickshearInputSpec(CommandLineInputSpec): in_file = File( exists=True, position=1, - argstr='%s', + argstr="%s", mandatory=True, - desc="neuroimage to deface") + desc="neuroimage to deface", + ) mask_file = File( - exists=True, - position=2, - argstr='%s', - desc="brain mask", - mandatory=True) + exists=True, position=2, argstr="%s", desc="brain mask", mandatory=True + ) out_file = File( name_template="%s_defaced", - name_source='in_file', + name_source="in_file", position=3, - argstr='%s', + argstr="%s", desc="defaced output image", - keep_extension=True) + keep_extension=True, + ) buff = traits.Int( position=4, - argstr='%d', - desc='buffer size (in voxels) between shearing ' - 'plane and the brain') + argstr="%d", + desc="buffer size (in voxels) between shearing " "plane and the brain", + ) class QuickshearOutputSpec(TraitedSpec): @@ -72,19 +71,23 @@ class Quickshear(CommandLine): >>> inputnode.inputs.in_file = 'T1.nii' >>> res = deface_wf.run() # doctest: +SKIP """ - _cmd = 'quickshear' + + _cmd = "quickshear" input_spec = QuickshearInputSpec output_spec = QuickshearOutputSpec - references_ = [{ - 'entry': - BibTeX('@inproceedings{Schimke2011,' - 'address = {San Francisco},' - 'author = {Schimke, Nakeisha and Hale, John},' - 'booktitle = {Proceedings of the 2nd USENIX Conference on ' - 'Health Security and Privacy},' - 'title = {{Quickshear Defacing for Neuroimages}},' - 'year = {2011},' - 'month = sep}'), - 'tags': ['implementation'], - }] + references_ = [ + { + "entry": BibTeX( + "@inproceedings{Schimke2011," + "address = {San Francisco}," + "author = {Schimke, Nakeisha and Hale, John}," + "booktitle = {Proceedings of the 2nd USENIX Conference on " + "Health Security and Privacy}," + "title = {{Quickshear Defacing for Neuroimages}}," + "year = {2011}," + "month = sep}" + ), + "tags": ["implementation"], + } + ] diff --git a/nipype/interfaces/semtools/brains/__init__.py b/nipype/interfaces/semtools/brains/__init__.py index a8bd05a1be..dd369fb168 100644 --- a/nipype/interfaces/semtools/brains/__init__.py +++ b/nipype/interfaces/semtools/brains/__init__.py @@ -1,5 +1,8 @@ # -*- coding: utf-8 -*- from .segmentation import SimilarityIndex, BRAINSTalairach, BRAINSTalairachMask -from .utilities import (HistogramMatchingFilter, GenerateEdgeMapImage, - GeneratePurePlugMask) +from .utilities import ( + HistogramMatchingFilter, + GenerateEdgeMapImage, + GeneratePurePlugMask, +) from .classify import BRAINSPosteriorToContinuousClass diff --git a/nipype/interfaces/semtools/brains/classify.py b/nipype/interfaces/semtools/brains/classify.py index 89bb74f039..f59b53183e 100644 --- a/nipype/interfaces/semtools/brains/classify.py +++ b/nipype/interfaces/semtools/brains/classify.py @@ -5,49 +5,63 @@ import os -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class BRAINSPosteriorToContinuousClassInputSpec(CommandLineInputSpec): inputWhiteVolume = File( desc="White Matter Posterior Volume", exists=True, - argstr="--inputWhiteVolume %s") + argstr="--inputWhiteVolume %s", + ) inputBasalGmVolume = File( desc="Basal Grey Matter Posterior Volume", exists=True, - argstr="--inputBasalGmVolume %s") + argstr="--inputBasalGmVolume %s", + ) inputSurfaceGmVolume = File( desc="Surface Grey Matter Posterior Volume", exists=True, - argstr="--inputSurfaceGmVolume %s") + argstr="--inputSurfaceGmVolume %s", + ) inputCsfVolume = File( - desc="CSF Posterior Volume", exists=True, argstr="--inputCsfVolume %s") + desc="CSF Posterior Volume", exists=True, argstr="--inputCsfVolume %s" + ) inputVbVolume = File( - desc="Venous Blood Posterior Volume", - exists=True, - argstr="--inputVbVolume %s") + desc="Venous Blood Posterior Volume", exists=True, argstr="--inputVbVolume %s" + ) inputCrblGmVolume = File( desc="Cerebellum Grey Matter Posterior Volume", exists=True, - argstr="--inputCrblGmVolume %s") + argstr="--inputCrblGmVolume %s", + ) inputCrblWmVolume = File( desc="Cerebellum White Matter Posterior Volume", exists=True, - argstr="--inputCrblWmVolume %s") + argstr="--inputCrblWmVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output Continuous Tissue Classified Image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class BRAINSPosteriorToContinuousClassOutputSpec(TraitedSpec): - outputVolume = File( - desc="Output Continuous Tissue Classified Image", exists=True) + outputVolume = File(desc="Output Continuous Tissue Classified Image", exists=True) class BRAINSPosteriorToContinuousClass(SEMLikeCommandLine): @@ -72,5 +86,5 @@ class BRAINSPosteriorToContinuousClass(SEMLikeCommandLine): input_spec = BRAINSPosteriorToContinuousClassInputSpec output_spec = BRAINSPosteriorToContinuousClassOutputSpec _cmd = " BRAINSPosteriorToContinuousClass " - _outputs_filenames = {'outputVolume': 'outputVolume'} + _outputs_filenames = {"outputVolume": "outputVolume"} _redirect_x = False diff --git a/nipype/interfaces/semtools/brains/segmentation.py b/nipype/interfaces/semtools/brains/segmentation.py index fae5e4f1a2..5b0a901277 100644 --- a/nipype/interfaces/semtools/brains/segmentation.py +++ b/nipype/interfaces/semtools/brains/segmentation.py @@ -5,28 +5,38 @@ import os -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class SimilarityIndexInputSpec(CommandLineInputSpec): outputCSVFilename = File( - desc="output CSV Filename", - exists=True, - argstr="--outputCSVFilename %s") + desc="output CSV Filename", exists=True, argstr="--outputCSVFilename %s" + ) ANNContinuousVolume = File( desc="ANN Continuous volume to be compared to the manual volume", exists=True, - argstr="--ANNContinuousVolume %s") + argstr="--ANNContinuousVolume %s", + ) inputManualVolume = File( desc="input manual(reference) volume", exists=True, - argstr="--inputManualVolume %s") + argstr="--inputManualVolume %s", + ) thresholdInterval = traits.Float( - desc= - "Threshold interval to compute similarity index between zero and one", - argstr="--thresholdInterval %f") + desc="Threshold interval to compute similarity index between zero and one", + argstr="--thresholdInterval %f", + ) class SimilarityIndexOutputSpec(TraitedSpec): @@ -57,46 +67,47 @@ class SimilarityIndex(SEMLikeCommandLine): class BRAINSTalairachInputSpec(CommandLineInputSpec): AC = InputMultiPath( - traits.Float, desc="Location of AC Point ", sep=",", argstr="--AC %s") + traits.Float, desc="Location of AC Point ", sep=",", argstr="--AC %s" + ) ACisIndex = traits.Bool(desc="AC Point is Index", argstr="--ACisIndex ") PC = InputMultiPath( - traits.Float, desc="Location of PC Point ", sep=",", argstr="--PC %s") + traits.Float, desc="Location of PC Point ", sep=",", argstr="--PC %s" + ) PCisIndex = traits.Bool(desc="PC Point is Index", argstr="--PCisIndex ") SLA = InputMultiPath( - traits.Float, - desc="Location of SLA Point ", - sep=",", - argstr="--SLA %s") + traits.Float, desc="Location of SLA Point ", sep=",", argstr="--SLA %s" + ) SLAisIndex = traits.Bool(desc="SLA Point is Index", argstr="--SLAisIndex ") IRP = InputMultiPath( - traits.Float, - desc="Location of IRP Point ", - sep=",", - argstr="--IRP %s") + traits.Float, desc="Location of IRP Point ", sep=",", argstr="--IRP %s" + ) IRPisIndex = traits.Bool(desc="IRP Point is Index", argstr="--IRPisIndex ") inputVolume = File( desc="Input image used to define physical space of images", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) outputBox = traits.Either( traits.Bool, File(), hash_files=False, desc="Name of the resulting Talairach Bounding Box file", - argstr="--outputBox %s") + argstr="--outputBox %s", + ) outputGrid = traits.Either( traits.Bool, File(), hash_files=False, desc="Name of the resulting Talairach Grid file", - argstr="--outputGrid %s") + argstr="--outputGrid %s", + ) class BRAINSTalairachOutputSpec(TraitedSpec): outputBox = File( - desc="Name of the resulting Talairach Bounding Box file", exists=True) - outputGrid = File( - desc="Name of the resulting Talairach Grid file", exists=True) + desc="Name of the resulting Talairach Bounding Box file", exists=True + ) + outputGrid = File(desc="Name of the resulting Talairach Grid file", exists=True) class BRAINSTalairach(SEMLikeCommandLine): @@ -121,7 +132,7 @@ class BRAINSTalairach(SEMLikeCommandLine): input_spec = BRAINSTalairachInputSpec output_spec = BRAINSTalairachOutputSpec _cmd = " BRAINSTalairach " - _outputs_filenames = {'outputGrid': 'outputGrid', 'outputBox': 'outputBox'} + _outputs_filenames = {"outputGrid": "outputGrid", "outputBox": "outputBox"} _redirect_x = False @@ -129,34 +140,39 @@ class BRAINSTalairachMaskInputSpec(CommandLineInputSpec): inputVolume = File( desc="Input image used to define physical space of resulting mask", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) talairachParameters = File( desc="Name of the Talairach parameter file.", exists=True, - argstr="--talairachParameters %s") + argstr="--talairachParameters %s", + ) talairachBox = File( - desc="Name of the Talairach box file.", - exists=True, - argstr="--talairachBox %s") + desc="Name of the Talairach box file.", exists=True, argstr="--talairachBox %s" + ) hemisphereMode = traits.Enum( "left", "right", "both", desc="Mode for box creation: left, right, both", - argstr="--hemisphereMode %s") + argstr="--hemisphereMode %s", + ) expand = traits.Bool( - desc="Expand exterior box to include surface CSF", argstr="--expand ") + desc="Expand exterior box to include surface CSF", argstr="--expand " + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output filename for the resulting binary image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class BRAINSTalairachMaskOutputSpec(TraitedSpec): outputVolume = File( - desc="Output filename for the resulting binary image", exists=True) + desc="Output filename for the resulting binary image", exists=True + ) class BRAINSTalairachMask(SEMLikeCommandLine): @@ -181,5 +197,5 @@ class BRAINSTalairachMask(SEMLikeCommandLine): input_spec = BRAINSTalairachMaskInputSpec output_spec = BRAINSTalairachMaskOutputSpec _cmd = " BRAINSTalairachMask " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False diff --git a/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSPosteriorToContinuousClass.py b/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSPosteriorToContinuousClass.py index a3478e0af0..fd22f39bba 100644 --- a/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSPosteriorToContinuousClass.py +++ b/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSPosteriorToContinuousClass.py @@ -4,51 +4,26 @@ def test_BRAINSPosteriorToContinuousClass_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputBasalGmVolume=dict( - argstr='--inputBasalGmVolume %s', - extensions=None, - ), - inputCrblGmVolume=dict( - argstr='--inputCrblGmVolume %s', - extensions=None, - ), - inputCrblWmVolume=dict( - argstr='--inputCrblWmVolume %s', - extensions=None, - ), - inputCsfVolume=dict( - argstr='--inputCsfVolume %s', - extensions=None, - ), - inputSurfaceGmVolume=dict( - argstr='--inputSurfaceGmVolume %s', - extensions=None, - ), - inputVbVolume=dict( - argstr='--inputVbVolume %s', - extensions=None, - ), - inputWhiteVolume=dict( - argstr='--inputWhiteVolume %s', - extensions=None, - ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputBasalGmVolume=dict(argstr="--inputBasalGmVolume %s", extensions=None,), + inputCrblGmVolume=dict(argstr="--inputCrblGmVolume %s", extensions=None,), + inputCrblWmVolume=dict(argstr="--inputCrblWmVolume %s", extensions=None,), + inputCsfVolume=dict(argstr="--inputCsfVolume %s", extensions=None,), + inputSurfaceGmVolume=dict(argstr="--inputSurfaceGmVolume %s", extensions=None,), + inputVbVolume=dict(argstr="--inputVbVolume %s", extensions=None,), + inputWhiteVolume=dict(argstr="--inputWhiteVolume %s", extensions=None,), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), ) inputs = BRAINSPosteriorToContinuousClass.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSPosteriorToContinuousClass_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = BRAINSPosteriorToContinuousClass.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSTalairach.py b/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSTalairach.py index 398330e5f9..0999af73d0 100644 --- a/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSTalairach.py +++ b/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSTalairach.py @@ -4,53 +4,30 @@ def test_BRAINSTalairach_inputs(): input_map = dict( - AC=dict( - argstr='--AC %s', - sep=',', - ), - ACisIndex=dict(argstr='--ACisIndex ', ), - IRP=dict( - argstr='--IRP %s', - sep=',', - ), - IRPisIndex=dict(argstr='--IRPisIndex ', ), - PC=dict( - argstr='--PC %s', - sep=',', - ), - PCisIndex=dict(argstr='--PCisIndex ', ), - SLA=dict( - argstr='--SLA %s', - sep=',', - ), - SLAisIndex=dict(argstr='--SLAisIndex ', ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - outputBox=dict( - argstr='--outputBox %s', - hash_files=False, - ), - outputGrid=dict( - argstr='--outputGrid %s', - hash_files=False, - ), + AC=dict(argstr="--AC %s", sep=",",), + ACisIndex=dict(argstr="--ACisIndex ",), + IRP=dict(argstr="--IRP %s", sep=",",), + IRPisIndex=dict(argstr="--IRPisIndex ",), + PC=dict(argstr="--PC %s", sep=",",), + PCisIndex=dict(argstr="--PCisIndex ",), + SLA=dict(argstr="--SLA %s", sep=",",), + SLAisIndex=dict(argstr="--SLAisIndex ",), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + outputBox=dict(argstr="--outputBox %s", hash_files=False,), + outputGrid=dict(argstr="--outputGrid %s", hash_files=False,), ) inputs = BRAINSTalairach.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSTalairach_outputs(): output_map = dict( - outputBox=dict(extensions=None, ), - outputGrid=dict(extensions=None, ), + outputBox=dict(extensions=None,), outputGrid=dict(extensions=None,), ) outputs = BRAINSTalairach.output_spec() diff --git a/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSTalairachMask.py b/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSTalairachMask.py index 97f353ad96..959733ce42 100644 --- a/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSTalairachMask.py +++ b/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSTalairachMask.py @@ -4,37 +4,24 @@ def test_BRAINSTalairachMask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - expand=dict(argstr='--expand ', ), - hemisphereMode=dict(argstr='--hemisphereMode %s', ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - talairachBox=dict( - argstr='--talairachBox %s', - extensions=None, - ), - talairachParameters=dict( - argstr='--talairachParameters %s', - extensions=None, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + expand=dict(argstr="--expand ",), + hemisphereMode=dict(argstr="--hemisphereMode %s",), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + talairachBox=dict(argstr="--talairachBox %s", extensions=None,), + talairachParameters=dict(argstr="--talairachParameters %s", extensions=None,), ) inputs = BRAINSTalairachMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSTalairachMask_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = BRAINSTalairachMask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/brains/tests/test_auto_GenerateEdgeMapImage.py b/nipype/interfaces/semtools/brains/tests/test_auto_GenerateEdgeMapImage.py index 7c5256cea4..766c9c7a2b 100644 --- a/nipype/interfaces/semtools/brains/tests/test_auto_GenerateEdgeMapImage.py +++ b/nipype/interfaces/semtools/brains/tests/test_auto_GenerateEdgeMapImage.py @@ -4,39 +4,31 @@ def test_GenerateEdgeMapImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputMRVolumes=dict(argstr='--inputMRVolumes %s...', ), - inputMask=dict( - argstr='--inputMask %s', - extensions=None, - ), - lowerPercentileMatching=dict(argstr='--lowerPercentileMatching %f', ), - maximumOutputRange=dict(argstr='--maximumOutputRange %d', ), - minimumOutputRange=dict(argstr='--minimumOutputRange %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputEdgeMap=dict( - argstr='--outputEdgeMap %s', - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputMRVolumes=dict(argstr="--inputMRVolumes %s...",), + inputMask=dict(argstr="--inputMask %s", extensions=None,), + lowerPercentileMatching=dict(argstr="--lowerPercentileMatching %f",), + maximumOutputRange=dict(argstr="--maximumOutputRange %d",), + minimumOutputRange=dict(argstr="--minimumOutputRange %d",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputEdgeMap=dict(argstr="--outputEdgeMap %s", hash_files=False,), outputMaximumGradientImage=dict( - argstr='--outputMaximumGradientImage %s', - hash_files=False, + argstr="--outputMaximumGradientImage %s", hash_files=False, ), - upperPercentileMatching=dict(argstr='--upperPercentileMatching %f', ), + upperPercentileMatching=dict(argstr="--upperPercentileMatching %f",), ) inputs = GenerateEdgeMapImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GenerateEdgeMapImage_outputs(): output_map = dict( - outputEdgeMap=dict(extensions=None, ), - outputMaximumGradientImage=dict(extensions=None, ), + outputEdgeMap=dict(extensions=None,), + outputMaximumGradientImage=dict(extensions=None,), ) outputs = GenerateEdgeMapImage.output_spec() diff --git a/nipype/interfaces/semtools/brains/tests/test_auto_GeneratePurePlugMask.py b/nipype/interfaces/semtools/brains/tests/test_auto_GeneratePurePlugMask.py index 2cc68391bd..5cba5f42d9 100644 --- a/nipype/interfaces/semtools/brains/tests/test_auto_GeneratePurePlugMask.py +++ b/nipype/interfaces/semtools/brains/tests/test_auto_GeneratePurePlugMask.py @@ -4,29 +4,22 @@ def test_GeneratePurePlugMask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputImageModalities=dict(argstr='--inputImageModalities %s...', ), - numberOfSubSamples=dict( - argstr='--numberOfSubSamples %s', - sep=',', - ), - outputMaskFile=dict( - argstr='--outputMaskFile %s', - hash_files=False, - ), - threshold=dict(argstr='--threshold %f', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputImageModalities=dict(argstr="--inputImageModalities %s...",), + numberOfSubSamples=dict(argstr="--numberOfSubSamples %s", sep=",",), + outputMaskFile=dict(argstr="--outputMaskFile %s", hash_files=False,), + threshold=dict(argstr="--threshold %f",), ) inputs = GeneratePurePlugMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GeneratePurePlugMask_outputs(): - output_map = dict(outputMaskFile=dict(extensions=None, ), ) + output_map = dict(outputMaskFile=dict(extensions=None,),) outputs = GeneratePurePlugMask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/brains/tests/test_auto_HistogramMatchingFilter.py b/nipype/interfaces/semtools/brains/tests/test_auto_HistogramMatchingFilter.py index a257b52384..746857c627 100644 --- a/nipype/interfaces/semtools/brains/tests/test_auto_HistogramMatchingFilter.py +++ b/nipype/interfaces/semtools/brains/tests/test_auto_HistogramMatchingFilter.py @@ -4,44 +4,30 @@ def test_HistogramMatchingFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - histogramAlgorithm=dict(argstr='--histogramAlgorithm %s', ), - inputBinaryVolume=dict( - argstr='--inputBinaryVolume %s', - extensions=None, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - numberOfHistogramBins=dict(argstr='--numberOfHistogramBins %d', ), - numberOfMatchPoints=dict(argstr='--numberOfMatchPoints %d', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + histogramAlgorithm=dict(argstr="--histogramAlgorithm %s",), + inputBinaryVolume=dict(argstr="--inputBinaryVolume %s", extensions=None,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + numberOfHistogramBins=dict(argstr="--numberOfHistogramBins %d",), + numberOfMatchPoints=dict(argstr="--numberOfMatchPoints %d",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), referenceBinaryVolume=dict( - argstr='--referenceBinaryVolume %s', - extensions=None, + argstr="--referenceBinaryVolume %s", extensions=None, ), - referenceVolume=dict( - argstr='--referenceVolume %s', - extensions=None, - ), - verbose=dict(argstr='--verbose ', ), - writeHistogram=dict(argstr='--writeHistogram %s', ), + referenceVolume=dict(argstr="--referenceVolume %s", extensions=None,), + verbose=dict(argstr="--verbose ",), + writeHistogram=dict(argstr="--writeHistogram %s",), ) inputs = HistogramMatchingFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_HistogramMatchingFilter_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = HistogramMatchingFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/brains/tests/test_auto_SimilarityIndex.py b/nipype/interfaces/semtools/brains/tests/test_auto_SimilarityIndex.py index 8a09941f16..348ecf67d0 100644 --- a/nipype/interfaces/semtools/brains/tests/test_auto_SimilarityIndex.py +++ b/nipype/interfaces/semtools/brains/tests/test_auto_SimilarityIndex.py @@ -4,30 +4,20 @@ def test_SimilarityIndex_inputs(): input_map = dict( - ANNContinuousVolume=dict( - argstr='--ANNContinuousVolume %s', - extensions=None, - ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputManualVolume=dict( - argstr='--inputManualVolume %s', - extensions=None, - ), - outputCSVFilename=dict( - argstr='--outputCSVFilename %s', - extensions=None, - ), - thresholdInterval=dict(argstr='--thresholdInterval %f', ), + ANNContinuousVolume=dict(argstr="--ANNContinuousVolume %s", extensions=None,), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputManualVolume=dict(argstr="--inputManualVolume %s", extensions=None,), + outputCSVFilename=dict(argstr="--outputCSVFilename %s", extensions=None,), + thresholdInterval=dict(argstr="--thresholdInterval %f",), ) inputs = SimilarityIndex.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SimilarityIndex_outputs(): output_map = dict() outputs = SimilarityIndex.output_spec() diff --git a/nipype/interfaces/semtools/brains/utilities.py b/nipype/interfaces/semtools/brains/utilities.py index d794c9c587..7c6351eaab 100644 --- a/nipype/interfaces/semtools/brains/utilities.py +++ b/nipype/interfaces/semtools/brains/utilities.py @@ -5,47 +5,62 @@ import os -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class HistogramMatchingFilterInputSpec(CommandLineInputSpec): inputVolume = File( desc="The Input image to be computed for statistics", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) referenceVolume = File( desc="The Input image to be computed for statistics", exists=True, - argstr="--referenceVolume %s") + argstr="--referenceVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output Image File Name", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) referenceBinaryVolume = File( - desc="referenceBinaryVolume", - exists=True, - argstr="--referenceBinaryVolume %s") + desc="referenceBinaryVolume", exists=True, argstr="--referenceBinaryVolume %s" + ) inputBinaryVolume = File( - desc="inputBinaryVolume", exists=True, argstr="--inputBinaryVolume %s") + desc="inputBinaryVolume", exists=True, argstr="--inputBinaryVolume %s" + ) numberOfMatchPoints = traits.Int( - desc=" number of histogram matching points", - argstr="--numberOfMatchPoints %d") + desc=" number of histogram matching points", argstr="--numberOfMatchPoints %d" + ) numberOfHistogramBins = traits.Int( - desc=" number of histogram bin", argstr="--numberOfHistogramBins %d") + desc=" number of histogram bin", argstr="--numberOfHistogramBins %d" + ) writeHistogram = traits.Str( - desc= - " decide if histogram data would be written with prefixe of the file name", - argstr="--writeHistogram %s") + desc=" decide if histogram data would be written with prefixe of the file name", + argstr="--writeHistogram %s", + ) histogramAlgorithm = traits.Enum( "OtsuHistogramMatching", desc=" histogram algrithm selection", - argstr="--histogramAlgorithm %s") + argstr="--histogramAlgorithm %s", + ) verbose = traits.Bool( - desc=" verbose mode running for debbuging", argstr="--verbose ") + desc=" verbose mode running for debbuging", argstr="--verbose " + ) class HistogramMatchingFilterOutputSpec(TraitedSpec): @@ -68,58 +83,62 @@ class HistogramMatchingFilter(SEMLikeCommandLine): input_spec = HistogramMatchingFilterInputSpec output_spec = HistogramMatchingFilterOutputSpec _cmd = " HistogramMatchingFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class GenerateEdgeMapImageInputSpec(CommandLineInputSpec): inputMRVolumes = InputMultiPath( File(exists=True), - desc= - "List of input structural MR volumes to create the maximum edgemap", - argstr="--inputMRVolumes %s...") + desc="List of input structural MR volumes to create the maximum edgemap", + argstr="--inputMRVolumes %s...", + ) inputMask = File( - desc= - "Input mask file name. If set, image histogram percentiles will be calculated within the mask", + desc="Input mask file name. If set, image histogram percentiles will be calculated within the mask", exists=True, - argstr="--inputMask %s") + argstr="--inputMask %s", + ) minimumOutputRange = traits.Int( - desc= - "Map lower quantile and below to minimum output range. It should be a small number greater than zero. Default is 1", - argstr="--minimumOutputRange %d") + desc="Map lower quantile and below to minimum output range. It should be a small number greater than zero. Default is 1", + argstr="--minimumOutputRange %d", + ) maximumOutputRange = traits.Int( - desc= - "Map upper quantile and above to maximum output range. Default is 255 that is the maximum range of unsigned char", - argstr="--maximumOutputRange %d") + desc="Map upper quantile and above to maximum output range. Default is 255 that is the maximum range of unsigned char", + argstr="--maximumOutputRange %d", + ) lowerPercentileMatching = traits.Float( - desc= - "Map lower quantile and below to minOutputRange. It should be a value between zero and one", - argstr="--lowerPercentileMatching %f") + desc="Map lower quantile and below to minOutputRange. It should be a value between zero and one", + argstr="--lowerPercentileMatching %f", + ) upperPercentileMatching = traits.Float( - desc= - "Map upper quantile and above to maxOutputRange. It should be a value between zero and one", - argstr="--upperPercentileMatching %f") + desc="Map upper quantile and above to maxOutputRange. It should be a value between zero and one", + argstr="--upperPercentileMatching %f", + ) outputEdgeMap = traits.Either( traits.Bool, File(), hash_files=False, desc="output edgemap file name", - argstr="--outputEdgeMap %s") + argstr="--outputEdgeMap %s", + ) outputMaximumGradientImage = traits.Either( traits.Bool, File(), hash_files=False, desc="output gradient image file name", - argstr="--outputMaximumGradientImage %s") + argstr="--outputMaximumGradientImage %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class GenerateEdgeMapImageOutputSpec(TraitedSpec): outputEdgeMap = File(desc="(required) output file name", exists=True) outputMaximumGradientImage = File( - desc="output gradient image file name", exists=True) + desc="output gradient image file name", exists=True + ) class GenerateEdgeMapImage(SEMLikeCommandLine): @@ -139,8 +158,8 @@ class GenerateEdgeMapImage(SEMLikeCommandLine): output_spec = GenerateEdgeMapImageOutputSpec _cmd = " GenerateEdgeMapImage " _outputs_filenames = { - 'outputEdgeMap': 'outputEdgeMap', - 'outputMaximumGradientImage': 'outputMaximumGradientImage' + "outputEdgeMap": "outputEdgeMap", + "outputMaximumGradientImage": "outputMaximumGradientImage", } _redirect_x = False @@ -149,27 +168,28 @@ class GeneratePurePlugMaskInputSpec(CommandLineInputSpec): inputImageModalities = InputMultiPath( File(exists=True), desc="List of input image file names to create pure plugs mask", - argstr="--inputImageModalities %s...") + argstr="--inputImageModalities %s...", + ) threshold = traits.Float( - desc="threshold value to define class membership", - argstr="--threshold %f") + desc="threshold value to define class membership", argstr="--threshold %f" + ) numberOfSubSamples = InputMultiPath( traits.Int, - desc= - "Number of continous index samples taken at each direction of lattice space for each plug volume", + desc="Number of continous index samples taken at each direction of lattice space for each plug volume", sep=",", - argstr="--numberOfSubSamples %s") + argstr="--numberOfSubSamples %s", + ) outputMaskFile = traits.Either( traits.Bool, File(), hash_files=False, desc="Output binary mask file name", - argstr="--outputMaskFile %s") + argstr="--outputMaskFile %s", + ) class GeneratePurePlugMaskOutputSpec(TraitedSpec): - outputMaskFile = File( - desc="(required) Output binary mask file name", exists=True) + outputMaskFile = File(desc="(required) Output binary mask file name", exists=True) class GeneratePurePlugMask(SEMLikeCommandLine): @@ -188,5 +208,5 @@ class GeneratePurePlugMask(SEMLikeCommandLine): input_spec = GeneratePurePlugMaskInputSpec output_spec = GeneratePurePlugMaskOutputSpec _cmd = " GeneratePurePlugMask " - _outputs_filenames = {'outputMaskFile': 'outputMaskFile'} + _outputs_filenames = {"outputMaskFile": "outputMaskFile"} _redirect_x = False diff --git a/nipype/interfaces/semtools/converters.py b/nipype/interfaces/semtools/converters.py index de638935e5..f3c1d432f5 100644 --- a/nipype/interfaces/semtools/converters.py +++ b/nipype/interfaces/semtools/converters.py @@ -5,23 +5,35 @@ import os -from ..base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ..base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class DWISimpleCompareInputSpec(CommandLineInputSpec): inputVolume1 = File( desc="First input volume (.nhdr or .nrrd)", exists=True, - argstr="--inputVolume1 %s") + argstr="--inputVolume1 %s", + ) inputVolume2 = File( desc="Second input volume (.nhdr or .nrrd)", exists=True, - argstr="--inputVolume2 %s") + argstr="--inputVolume2 %s", + ) checkDWIData = traits.Bool( desc="check for existence of DWI data, and if present, compare it", - argstr="--checkDWIData ") + argstr="--checkDWIData ", + ) class DWISimpleCompareOutputSpec(TraitedSpec): @@ -58,11 +70,13 @@ class DWICompareInputSpec(CommandLineInputSpec): inputVolume1 = File( desc="First input volume (.nhdr or .nrrd)", exists=True, - argstr="--inputVolume1 %s") + argstr="--inputVolume1 %s", + ) inputVolume2 = File( desc="Second input volume (.nhdr or .nrrd)", exists=True, - argstr="--inputVolume2 %s") + argstr="--inputVolume2 %s", + ) class DWICompareOutputSpec(TraitedSpec): diff --git a/nipype/interfaces/semtools/diffusion/__init__.py b/nipype/interfaces/semtools/diffusion/__init__.py index f9f414d087..28044fe337 100644 --- a/nipype/interfaces/semtools/diffusion/__init__.py +++ b/nipype/interfaces/semtools/diffusion/__init__.py @@ -2,13 +2,29 @@ from .diffusion import dtiaverage, dtiestim, dtiprocess, DWIConvert from .tractography import * from .gtract import ( - gtractTransformToDisplacementField, gtractInvertBSplineTransform, - gtractConcatDwi, gtractAverageBvalues, gtractCoregBvalues, - gtractResampleAnisotropy, gtractResampleCodeImage, - gtractCopyImageOrientation, gtractCreateGuideFiber, gtractAnisotropyMap, - gtractClipAnisotropy, gtractResampleB0, gtractInvertRigidTransform, - gtractImageConformity, compareTractInclusion, gtractFastMarchingTracking, - gtractInvertDisplacementField, gtractCoRegAnatomy, - gtractResampleDWIInPlace, gtractCostFastMarching, gtractFiberTracking, - extractNrrdVectorIndex, gtractResampleFibers, gtractTensor) + gtractTransformToDisplacementField, + gtractInvertBSplineTransform, + gtractConcatDwi, + gtractAverageBvalues, + gtractCoregBvalues, + gtractResampleAnisotropy, + gtractResampleCodeImage, + gtractCopyImageOrientation, + gtractCreateGuideFiber, + gtractAnisotropyMap, + gtractClipAnisotropy, + gtractResampleB0, + gtractInvertRigidTransform, + gtractImageConformity, + compareTractInclusion, + gtractFastMarchingTracking, + gtractInvertDisplacementField, + gtractCoRegAnatomy, + gtractResampleDWIInPlace, + gtractCostFastMarching, + gtractFiberTracking, + extractNrrdVectorIndex, + gtractResampleFibers, + gtractTensor, +) from .maxcurvature import maxcurvature diff --git a/nipype/interfaces/semtools/diffusion/diffusion.py b/nipype/interfaces/semtools/diffusion/diffusion.py index af943a04fb..90377b8ee0 100644 --- a/nipype/interfaces/semtools/diffusion/diffusion.py +++ b/nipype/interfaces/semtools/diffusion/diffusion.py @@ -5,26 +5,37 @@ import os -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class dtiaverageInputSpec(CommandLineInputSpec): inputs = InputMultiPath( File(exists=True), desc="List of all the tensor fields to be averaged", - argstr="--inputs %s...") + argstr="--inputs %s...", + ) tensor_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Averaged tensor volume", - argstr="--tensor_output %s") + argstr="--tensor_output %s", + ) DTI_double = traits.Bool( - desc= - "Tensor components are saved as doubles (cannot be visualized in Slicer)", - argstr="--DTI_double ") + desc="Tensor components are saved as doubles (cannot be visualized in Slicer)", + argstr="--DTI_double ", + ) verbose = traits.Bool(desc="produce verbose output", argstr="--verbose ") @@ -57,112 +68,112 @@ class dtiaverage(SEMLikeCommandLine): input_spec = dtiaverageInputSpec output_spec = dtiaverageOutputSpec _cmd = " dtiaverage " - _outputs_filenames = {'tensor_output': 'tensor_output.nii'} + _outputs_filenames = {"tensor_output": "tensor_output.nii"} _redirect_x = False class dtiestimInputSpec(CommandLineInputSpec): dwi_image = File( - desc="DWI image volume (required)", - exists=True, - argstr="--dwi_image %s") + desc="DWI image volume (required)", exists=True, argstr="--dwi_image %s" + ) tensor_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Tensor OutputImage", - argstr="--tensor_output %s") + argstr="--tensor_output %s", + ) B0 = traits.Either( traits.Bool, File(), hash_files=False, desc="Baseline image, average of all baseline images", - argstr="--B0 %s") + argstr="--B0 %s", + ) idwi = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "idwi output image. Image with isotropic diffusion-weighted information = geometric mean of diffusion images", - argstr="--idwi %s") + desc="idwi output image. Image with isotropic diffusion-weighted information = geometric mean of diffusion images", + argstr="--idwi %s", + ) B0_mask_output = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "B0 mask used for the estimation. B0 thresholded either with the -t option value or the automatic OTSU value", - argstr="--B0_mask_output %s") + desc="B0 mask used for the estimation. B0 thresholded either with the -t option value or the automatic OTSU value", + argstr="--B0_mask_output %s", + ) brain_mask = File( - desc= - "Brain mask. Image where for every voxel == 0 the tensors are not estimated. Be aware that in addition a threshold based masking will be performed by default. If such an additional threshold masking is NOT desired, then use option -t 0.", + desc="Brain mask. Image where for every voxel == 0 the tensors are not estimated. Be aware that in addition a threshold based masking will be performed by default. If such an additional threshold masking is NOT desired, then use option -t 0.", exists=True, - argstr="--brain_mask %s") + argstr="--brain_mask %s", + ) bad_region_mask = File( - desc= - "Bad region mask. Image where for every voxel > 0 the tensors are not estimated", + desc="Bad region mask. Image where for every voxel > 0 the tensors are not estimated", exists=True, - argstr="--bad_region_mask %s") + argstr="--bad_region_mask %s", + ) method = traits.Enum( "lls", "wls", "nls", "ml", - desc= - "Esitmation method (lls:linear least squares, wls:weighted least squares, nls:non-linear least squares, ml:maximum likelihood)", - argstr="--method %s") + desc="Esitmation method (lls:linear least squares, wls:weighted least squares, nls:non-linear least squares, ml:maximum likelihood)", + argstr="--method %s", + ) correction = traits.Enum( "none", "zero", "abs", "nearest", - desc= - "Correct the tensors if computed tensor is not semi-definite positive", - argstr="--correction %s") + desc="Correct the tensors if computed tensor is not semi-definite positive", + argstr="--correction %s", + ) threshold = traits.Int( - desc= - "Baseline threshold for estimation. If not specified calculated using an OTSU threshold on the baseline image.", - argstr="--threshold %d") + desc="Baseline threshold for estimation. If not specified calculated using an OTSU threshold on the baseline image.", + argstr="--threshold %d", + ) weight_iterations = traits.Int( - desc= - "Number of iterations to recaluate weightings from tensor estimate", - argstr="--weight_iterations %d") + desc="Number of iterations to recaluate weightings from tensor estimate", + argstr="--weight_iterations %d", + ) step = traits.Float( - desc="Gradient descent step size (for nls and ml methods)", - argstr="--step %f") + desc="Gradient descent step size (for nls and ml methods)", argstr="--step %f" + ) sigma = traits.Float(argstr="--sigma %f") DTI_double = traits.Bool( - desc= - "Tensor components are saved as doubles (cannot be visualized in Slicer)", - argstr="--DTI_double ") + desc="Tensor components are saved as doubles (cannot be visualized in Slicer)", + argstr="--DTI_double ", + ) verbose = traits.Bool(desc="produce verbose output", argstr="--verbose ") defaultTensor = InputMultiPath( traits.Float, - desc= - "Default tensor used if estimated tensor is below a given threshold", + desc="Default tensor used if estimated tensor is below a given threshold", sep=",", - argstr="--defaultTensor %s") + argstr="--defaultTensor %s", + ) shiftNeg = traits.Bool( - desc= - "Shift eigenvalues so all are positive (accounts for bad tensors related to noise or acquisition error). This is the same option as the one available in DWIToDTIEstimation in Slicer (but instead of just adding the minimum eigenvalue to all the eigenvalues if it is smaller than 0, we use a coefficient to have stictly positive eigenvalues", - argstr="--shiftNeg ") + desc="Shift eigenvalues so all are positive (accounts for bad tensors related to noise or acquisition error). This is the same option as the one available in DWIToDTIEstimation in Slicer (but instead of just adding the minimum eigenvalue to all the eigenvalues if it is smaller than 0, we use a coefficient to have stictly positive eigenvalues", + argstr="--shiftNeg ", + ) shiftNegCoeff = traits.Float( - desc= - "Shift eigenvalues so all are positive (accounts for bad tensors related to noise or acquisition error). Instead of just adding the minimum eigenvalue to all the eigenvalues if it is smaller than 0, we use a coefficient to have stictly positive eigenvalues. Coefficient must be between 1.0 and 1.001 (included).", - argstr="--shiftNegCoeff %f") + desc="Shift eigenvalues so all are positive (accounts for bad tensors related to noise or acquisition error). Instead of just adding the minimum eigenvalue to all the eigenvalues if it is smaller than 0, we use a coefficient to have stictly positive eigenvalues. Coefficient must be between 1.0 and 1.001 (included).", + argstr="--shiftNegCoeff %f", + ) class dtiestimOutputSpec(TraitedSpec): tensor_output = File(desc="Tensor OutputImage", exists=True) - B0 = File( - desc="Baseline image, average of all baseline images", exists=True) + B0 = File(desc="Baseline image, average of all baseline images", exists=True) idwi = File( - desc= - "idwi output image. Image with isotropic diffusion-weighted information = geometric mean of diffusion images", - exists=True) + desc="idwi output image. Image with isotropic diffusion-weighted information = geometric mean of diffusion images", + exists=True, + ) B0_mask_output = File( - desc= - "B0 mask used for the estimation. B0 thresholded either with the -t option value or the automatic OTSU value", - exists=True) + desc="B0 mask used for the estimation. B0 thresholded either with the -t option value or the automatic OTSU value", + exists=True, + ) class dtiestim(SEMLikeCommandLine): @@ -212,165 +223,177 @@ class dtiestim(SEMLikeCommandLine): output_spec = dtiestimOutputSpec _cmd = " dtiestim " _outputs_filenames = { - 'B0': 'B0.nii', - 'idwi': 'idwi.nii', - 'tensor_output': 'tensor_output.nii', - 'B0_mask_output': 'B0_mask_output.nii' + "B0": "B0.nii", + "idwi": "idwi.nii", + "tensor_output": "tensor_output.nii", + "B0_mask_output": "B0_mask_output.nii", } _redirect_x = False class dtiprocessInputSpec(CommandLineInputSpec): - dti_image = File( - desc="DTI tensor volume", exists=True, argstr="--dti_image %s") + dti_image = File(desc="DTI tensor volume", exists=True, argstr="--dti_image %s") fa_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Fractional Anisotropy output file", - argstr="--fa_output %s") + argstr="--fa_output %s", + ) md_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Mean Diffusivity output file", - argstr="--md_output %s") + argstr="--md_output %s", + ) sigma = traits.Float(desc="Scale of gradients", argstr="--sigma %f") fa_gradient_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Fractional Anisotropy Gradient output file", - argstr="--fa_gradient_output %s") + argstr="--fa_gradient_output %s", + ) fa_gradmag_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Fractional Anisotropy Gradient Magnitude output file", - argstr="--fa_gradmag_output %s") + argstr="--fa_gradmag_output %s", + ) color_fa_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Color Fractional Anisotropy output file", - argstr="--color_fa_output %s") + argstr="--color_fa_output %s", + ) principal_eigenvector_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Principal Eigenvectors Output", - argstr="--principal_eigenvector_output %s") + argstr="--principal_eigenvector_output %s", + ) negative_eigenvector_output = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Negative Eigenvectors Output: create a binary image where if any of the eigen value is below zero, the voxel is set to 1, otherwise 0.", - argstr="--negative_eigenvector_output %s") + desc="Negative Eigenvectors Output: create a binary image where if any of the eigen value is below zero, the voxel is set to 1, otherwise 0.", + argstr="--negative_eigenvector_output %s", + ) frobenius_norm_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Frobenius Norm Output", - argstr="--frobenius_norm_output %s") + argstr="--frobenius_norm_output %s", + ) lambda1_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Axial Diffusivity - Lambda 1 (largest eigenvalue) output", - argstr="--lambda1_output %s") + argstr="--lambda1_output %s", + ) lambda2_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Lambda 2 (middle eigenvalue) output", - argstr="--lambda2_output %s") + argstr="--lambda2_output %s", + ) lambda3_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Lambda 3 (smallest eigenvalue) output", - argstr="--lambda3_output %s") + argstr="--lambda3_output %s", + ) RD_output = traits.Either( traits.Bool, File(), hash_files=False, desc="RD (Radial Diffusivity 1/2*(lambda2+lambda3)) output", - argstr="--RD_output %s") + argstr="--RD_output %s", + ) rot_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Rotated tensor output file. Must also specify the dof file.", - argstr="--rot_output %s") + argstr="--rot_output %s", + ) affineitk_file = File( desc="Transformation file for affine transformation. ITK format.", exists=True, - argstr="--affineitk_file %s") + argstr="--affineitk_file %s", + ) dof_file = File( - desc= - "Transformation file for affine transformation. This can be ITK format (or the outdated RView).", + desc="Transformation file for affine transformation. This can be ITK format (or the outdated RView).", exists=True, - argstr="--dof_file %s") + argstr="--dof_file %s", + ) newdof_file = File( - desc= - "Transformation file for affine transformation. RView NEW format. (txt file output of dof2mat)", + desc="Transformation file for affine transformation. RView NEW format. (txt file output of dof2mat)", exists=True, - argstr="--newdof_file %s") + argstr="--newdof_file %s", + ) mask = File( - desc= - "Mask tensors. Specify --outmask if you want to save the masked tensor field, otherwise the mask is applied just for the current processing ", + desc="Mask tensors. Specify --outmask if you want to save the masked tensor field, otherwise the mask is applied just for the current processing ", exists=True, - argstr="--mask %s") + argstr="--mask %s", + ) outmask = traits.Either( traits.Bool, File(), hash_files=False, desc="Name of the masked tensor field.", - argstr="--outmask %s") + argstr="--outmask %s", + ) hField = traits.Bool( - desc= - "forward and inverse transformations are h-fields instead of displacement fields", - argstr="--hField ") + desc="forward and inverse transformations are h-fields instead of displacement fields", + argstr="--hField ", + ) forward = File( - desc= - "Forward transformation. Assumed to be a deformation field in world coordinates, unless the --h-field option is specified.", + desc="Forward transformation. Assumed to be a deformation field in world coordinates, unless the --h-field option is specified.", exists=True, - argstr="--forward %s") + argstr="--forward %s", + ) deformation_output = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Warped tensor field based on a deformation field. This option requires the --forward,-F transformation to be specified.", - argstr="--deformation_output %s") + desc="Warped tensor field based on a deformation field. This option requires the --forward,-F transformation to be specified.", + argstr="--deformation_output %s", + ) interpolation = traits.Enum( "nearestneighbor", "linear", "cubic", desc="Interpolation type (nearestneighbor, linear, cubic)", - argstr="--interpolation %s") + argstr="--interpolation %s", + ) reorientation = traits.Enum( - "fs", - "ppd", - desc="Reorientation type (fs, ppd)", - argstr="--reorientation %s") + "fs", "ppd", desc="Reorientation type (fs, ppd)", argstr="--reorientation %s" + ) correction = traits.Enum( "none", "zero", "abs", "nearest", - desc= - "Correct the tensors if computed tensor is not semi-definite positive", - argstr="--correction %s") + desc="Correct the tensors if computed tensor is not semi-definite positive", + argstr="--correction %s", + ) scalar_float = traits.Bool( - desc= - "Write scalar [FA,MD] as unscaled float (with their actual values, otherwise scaled by 10 000). Also causes FA to be unscaled [0..1].", - argstr="--scalar_float ") + desc="Write scalar [FA,MD] as unscaled float (with their actual values, otherwise scaled by 10 000). Also causes FA to be unscaled [0..1].", + argstr="--scalar_float ", + ) DTI_double = traits.Bool( - desc= - "Tensor components are saved as doubles (cannot be visualized in Slicer)", - argstr="--DTI_double ") + desc="Tensor components are saved as doubles (cannot be visualized in Slicer)", + argstr="--DTI_double ", + ) verbose = traits.Bool(desc="produce verbose output", argstr="--verbose ") @@ -378,37 +401,36 @@ class dtiprocessOutputSpec(TraitedSpec): fa_output = File(desc="Fractional Anisotropy output file", exists=True) md_output = File(desc="Mean Diffusivity output file", exists=True) fa_gradient_output = File( - desc="Fractional Anisotropy Gradient output file", exists=True) + desc="Fractional Anisotropy Gradient output file", exists=True + ) fa_gradmag_output = File( - desc="Fractional Anisotropy Gradient Magnitude output file", - exists=True) - color_fa_output = File( - desc="Color Fractional Anisotropy output file", exists=True) + desc="Fractional Anisotropy Gradient Magnitude output file", exists=True + ) + color_fa_output = File(desc="Color Fractional Anisotropy output file", exists=True) principal_eigenvector_output = File( - desc="Principal Eigenvectors Output", exists=True) + desc="Principal Eigenvectors Output", exists=True + ) negative_eigenvector_output = File( - desc= - "Negative Eigenvectors Output: create a binary image where if any of the eigen value is below zero, the voxel is set to 1, otherwise 0.", - exists=True) + desc="Negative Eigenvectors Output: create a binary image where if any of the eigen value is below zero, the voxel is set to 1, otherwise 0.", + exists=True, + ) frobenius_norm_output = File(desc="Frobenius Norm Output", exists=True) lambda1_output = File( - desc="Axial Diffusivity - Lambda 1 (largest eigenvalue) output", - exists=True) - lambda2_output = File( - desc="Lambda 2 (middle eigenvalue) output", exists=True) - lambda3_output = File( - desc="Lambda 3 (smallest eigenvalue) output", exists=True) + desc="Axial Diffusivity - Lambda 1 (largest eigenvalue) output", exists=True + ) + lambda2_output = File(desc="Lambda 2 (middle eigenvalue) output", exists=True) + lambda3_output = File(desc="Lambda 3 (smallest eigenvalue) output", exists=True) RD_output = File( - desc="RD (Radial Diffusivity 1/2*(lambda2+lambda3)) output", - exists=True) + desc="RD (Radial Diffusivity 1/2*(lambda2+lambda3)) output", exists=True + ) rot_output = File( - desc="Rotated tensor output file. Must also specify the dof file.", - exists=True) + desc="Rotated tensor output file. Must also specify the dof file.", exists=True + ) outmask = File(desc="Name of the masked tensor field.", exists=True) deformation_output = File( - desc= - "Warped tensor field based on a deformation field. This option requires the --forward,-F transformation to be specified.", - exists=True) + desc="Warped tensor field based on a deformation field. This option requires the --forward,-F transformation to be specified.", + exists=True, + ) class dtiprocess(SEMLikeCommandLine): @@ -448,21 +470,21 @@ class dtiprocess(SEMLikeCommandLine): output_spec = dtiprocessOutputSpec _cmd = " dtiprocess " _outputs_filenames = { - 'fa_gradmag_output': 'fa_gradmag_output.nii', - 'fa_gradient_output': 'fa_gradient_output.nii', - 'lambda1_output': 'lambda1_output.nii', - 'lambda2_output': 'lambda2_output.nii', - 'color_fa_output': 'color_fa_output.nii', - 'fa_output': 'fa_output.nii', - 'frobenius_norm_output': 'frobenius_norm_output.nii', - 'principal_eigenvector_output': 'principal_eigenvector_output.nii', - 'outmask': 'outmask.nii', - 'lambda3_output': 'lambda3_output.nii', - 'negative_eigenvector_output': 'negative_eigenvector_output.nii', - 'md_output': 'md_output.nii', - 'RD_output': 'RD_output.nii', - 'deformation_output': 'deformation_output.nii', - 'rot_output': 'rot_output.nii' + "fa_gradmag_output": "fa_gradmag_output.nii", + "fa_gradient_output": "fa_gradient_output.nii", + "lambda1_output": "lambda1_output.nii", + "lambda2_output": "lambda2_output.nii", + "color_fa_output": "color_fa_output.nii", + "fa_output": "fa_output.nii", + "frobenius_norm_output": "frobenius_norm_output.nii", + "principal_eigenvector_output": "principal_eigenvector_output.nii", + "outmask": "outmask.nii", + "lambda3_output": "lambda3_output.nii", + "negative_eigenvector_output": "negative_eigenvector_output.nii", + "md_output": "md_output.nii", + "RD_output": "RD_output.nii", + "deformation_output": "deformation_output.nii", + "rot_output": "rot_output.nii", } _redirect_x = False @@ -473,103 +495,112 @@ class DWIConvertInputSpec(CommandLineInputSpec): "DicomToFSL", "NrrdToFSL", "FSLToNrrd", - desc= - "Determine which conversion to perform. DicomToNrrd (default): Convert DICOM series to NRRD DicomToFSL: Convert DICOM series to NIfTI File + gradient/bvalue text files NrrdToFSL: Convert DWI NRRD file to NIfTI File + gradient/bvalue text files FSLToNrrd: Convert NIfTI File + gradient/bvalue text files to NRRD file.", - argstr="--conversionMode %s") + desc="Determine which conversion to perform. DicomToNrrd (default): Convert DICOM series to NRRD DicomToFSL: Convert DICOM series to NIfTI File + gradient/bvalue text files NrrdToFSL: Convert DWI NRRD file to NIfTI File + gradient/bvalue text files FSLToNrrd: Convert NIfTI File + gradient/bvalue text files to NRRD file.", + argstr="--conversionMode %s", + ) inputVolume = File( desc="Input DWI volume -- not used for DicomToNrrd mode.", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output filename (.nhdr or .nrrd)", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) inputDicomDirectory = Directory( desc="Directory holding Dicom series", exists=True, - argstr="--inputDicomDirectory %s") + argstr="--inputDicomDirectory %s", + ) fslNIFTIFile = File( desc="4D NIfTI file containing gradient volumes", exists=True, - argstr="--fslNIFTIFile %s") + argstr="--fslNIFTIFile %s", + ) inputBValues = File( desc="The B Values are stored in FSL .bval text file format", exists=True, - argstr="--inputBValues %s") + argstr="--inputBValues %s", + ) inputBVectors = File( desc="The Gradient Vectors are stored in FSL .bvec text file format", exists=True, - argstr="--inputBVectors %s") + argstr="--inputBVectors %s", + ) outputBValues = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "The B Values are stored in FSL .bval text file format (defaults to .bval)", - argstr="--outputBValues %s") + desc="The B Values are stored in FSL .bval text file format (defaults to .bval)", + argstr="--outputBValues %s", + ) outputBVectors = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "The Gradient Vectors are stored in FSL .bvec text file format (defaults to .bvec)", - argstr="--outputBVectors %s") + desc="The Gradient Vectors are stored in FSL .bvec text file format (defaults to .bvec)", + argstr="--outputBVectors %s", + ) fMRI = traits.Bool( - desc="Output a NRRD file, but without gradients", argstr="--fMRI ") + desc="Output a NRRD file, but without gradients", argstr="--fMRI " + ) writeProtocolGradientsFile = traits.Bool( - desc= - "Write the protocol gradients to a file suffixed by \'.txt\' as they were specified in the procol by multiplying each diffusion gradient direction by the measurement frame. This file is for debugging purposes only, the format is not fixed, and will likely change as debugging of new dicom formats is necessary.", - argstr="--writeProtocolGradientsFile ") + desc="Write the protocol gradients to a file suffixed by '.txt' as they were specified in the procol by multiplying each diffusion gradient direction by the measurement frame. This file is for debugging purposes only, the format is not fixed, and will likely change as debugging of new dicom formats is necessary.", + argstr="--writeProtocolGradientsFile ", + ) useIdentityMeaseurementFrame = traits.Bool( - desc= - "Adjust all the gradients so that the measurement frame is an identity matrix.", - argstr="--useIdentityMeaseurementFrame ") + desc="Adjust all the gradients so that the measurement frame is an identity matrix.", + argstr="--useIdentityMeaseurementFrame ", + ) useBMatrixGradientDirections = traits.Bool( - desc= - "Fill the nhdr header with the gradient directions and bvalues computed out of the BMatrix. Only changes behavior for Siemens data. In some cases the standard public gradients are not properly computed. The gradients can emperically computed from the private BMatrix fields. In some cases the private BMatrix is consistent with the public grandients, but not in all cases, when it exists BMatrix is usually most robust.", - argstr="--useBMatrixGradientDirections ") + desc="Fill the nhdr header with the gradient directions and bvalues computed out of the BMatrix. Only changes behavior for Siemens data. In some cases the standard public gradients are not properly computed. The gradients can emperically computed from the private BMatrix fields. In some cases the private BMatrix is consistent with the public grandients, but not in all cases, when it exists BMatrix is usually most robust.", + argstr="--useBMatrixGradientDirections ", + ) outputDirectory = traits.Either( traits.Bool, Directory(), hash_files=False, desc="Directory holding the output NRRD file", - argstr="--outputDirectory %s") + argstr="--outputDirectory %s", + ) gradientVectorFile = traits.Either( traits.Bool, File(), hash_files=False, desc="Text file giving gradient vectors", - argstr="--gradientVectorFile %s") + argstr="--gradientVectorFile %s", + ) smallGradientThreshold = traits.Float( - desc= - "If a gradient magnitude is greater than 0 and less than smallGradientThreshold, then DWIConvert will display an error message and quit, unless the useBMatrixGradientDirections option is set.", - argstr="--smallGradientThreshold %f") + desc="If a gradient magnitude is greater than 0 and less than smallGradientThreshold, then DWIConvert will display an error message and quit, unless the useBMatrixGradientDirections option is set.", + argstr="--smallGradientThreshold %f", + ) allowLossyConversion = traits.Bool( - desc= - "The only supported output type is \'short\'. Conversion from images of a different type may cause data loss due to rounding or truncation. Use with caution!", - argstr="--allowLossyConversion ") + desc="The only supported output type is 'short'. Conversion from images of a different type may cause data loss due to rounding or truncation. Use with caution!", + argstr="--allowLossyConversion ", + ) transposeInputBVectors = traits.Bool( - desc= - "FSL input BVectors are expected to be encoded in the input file as one vector per line. If it is not the case, use this option to transpose the file as it is read.", - argstr="--transposeInputBVectors ") + desc="FSL input BVectors are expected to be encoded in the input file as one vector per line. If it is not the case, use this option to transpose the file as it is read.", + argstr="--transposeInputBVectors ", + ) class DWIConvertOutputSpec(TraitedSpec): outputVolume = File(desc="Output filename (.nhdr or .nrrd)", exists=True) outputBValues = File( - desc= - "The B Values are stored in FSL .bval text file format (defaults to .bval)", - exists=True) + desc="The B Values are stored in FSL .bval text file format (defaults to .bval)", + exists=True, + ) outputBVectors = File( - desc= - "The Gradient Vectors are stored in FSL .bvec text file format (defaults to .bvec)", - exists=True) + desc="The Gradient Vectors are stored in FSL .bvec text file format (defaults to .bvec)", + exists=True, + ) outputDirectory = Directory( - desc="Directory holding the output NRRD file", exists=True) - gradientVectorFile = File( - desc="Text file giving gradient vectors", exists=True) + desc="Directory holding the output NRRD file", exists=True + ) + gradientVectorFile = File(desc="Text file giving gradient vectors", exists=True) class DWIConvert(SEMLikeCommandLine): @@ -595,10 +626,10 @@ class DWIConvert(SEMLikeCommandLine): output_spec = DWIConvertOutputSpec _cmd = " DWIConvert " _outputs_filenames = { - 'outputVolume': 'outputVolume.nii', - 'outputDirectory': 'outputDirectory', - 'outputBValues': 'outputBValues.bval', - 'gradientVectorFile': 'gradientVectorFile', - 'outputBVectors': 'outputBVectors.bvec' + "outputVolume": "outputVolume.nii", + "outputDirectory": "outputDirectory", + "outputBValues": "outputBValues.bval", + "gradientVectorFile": "gradientVectorFile", + "outputBVectors": "outputBVectors.bvec", } _redirect_x = False diff --git a/nipype/interfaces/semtools/diffusion/gtract.py b/nipype/interfaces/semtools/diffusion/gtract.py index 999c898599..0ad255b536 100644 --- a/nipype/interfaces/semtools/diffusion/gtract.py +++ b/nipype/interfaces/semtools/diffusion/gtract.py @@ -5,35 +5,44 @@ import os -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class gtractTransformToDisplacementFieldInputSpec(CommandLineInputSpec): inputTransform = File( - desc="Input Transform File Name", - exists=True, - argstr="--inputTransform %s") + desc="Input Transform File Name", exists=True, argstr="--inputTransform %s" + ) inputReferenceVolume = File( - desc= - "Required: input image file name to exemplify the anatomical space over which to vcl_express the transform as a displacement field.", + desc="Required: input image file name to exemplify the anatomical space over which to vcl_express the transform as a displacement field.", exists=True, - argstr="--inputReferenceVolume %s") + argstr="--inputReferenceVolume %s", + ) outputDeformationFieldVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output deformation field", - argstr="--outputDeformationFieldVolume %s") + argstr="--outputDeformationFieldVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractTransformToDisplacementFieldOutputSpec(TraitedSpec): - outputDeformationFieldVolume = File( - desc="Output deformation field", exists=True) + outputDeformationFieldVolume = File(desc="Output deformation field", exists=True) class gtractTransformToDisplacementField(SEMLikeCommandLine): @@ -59,40 +68,43 @@ class gtractTransformToDisplacementField(SEMLikeCommandLine): output_spec = gtractTransformToDisplacementFieldOutputSpec _cmd = " gtractTransformToDisplacementField " _outputs_filenames = { - 'outputDeformationFieldVolume': 'outputDeformationFieldVolume.nii' + "outputDeformationFieldVolume": "outputDeformationFieldVolume.nii" } _redirect_x = False class gtractInvertBSplineTransformInputSpec(CommandLineInputSpec): inputReferenceVolume = File( - desc= - "Required: input image file name to exemplify the anatomical space to interpolate over.", + desc="Required: input image file name to exemplify the anatomical space to interpolate over.", exists=True, - argstr="--inputReferenceVolume %s") + argstr="--inputReferenceVolume %s", + ) inputTransform = File( desc="Required: input B-Spline transform file name", exists=True, - argstr="--inputTransform %s") + argstr="--inputTransform %s", + ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output transform file name", - argstr="--outputTransform %s") + argstr="--outputTransform %s", + ) landmarkDensity = InputMultiPath( traits.Int, desc="Number of landmark subdivisions in all 3 directions", sep=",", - argstr="--landmarkDensity %s") + argstr="--landmarkDensity %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractInvertBSplineTransformOutputSpec(TraitedSpec): - outputTransform = File( - desc="Required: output transform file name", exists=True) + outputTransform = File(desc="Required: output transform file name", exists=True) class gtractInvertBSplineTransform(SEMLikeCommandLine): @@ -117,37 +129,38 @@ class gtractInvertBSplineTransform(SEMLikeCommandLine): input_spec = gtractInvertBSplineTransformInputSpec output_spec = gtractInvertBSplineTransformOutputSpec _cmd = " gtractInvertBSplineTransform " - _outputs_filenames = {'outputTransform': 'outputTransform.h5'} + _outputs_filenames = {"outputTransform": "outputTransform.h5"} _redirect_x = False class gtractConcatDwiInputSpec(CommandLineInputSpec): inputVolume = InputMultiPath( File(exists=True), - desc= - "Required: input file containing the first diffusion weighted image", - argstr="--inputVolume %s...") + desc="Required: input file containing the first diffusion weighted image", + argstr="--inputVolume %s...", + ) ignoreOrigins = traits.Bool( - desc= - "If image origins are different force all images to origin of first image", - argstr="--ignoreOrigins ") + desc="If image origins are different force all images to origin of first image", + argstr="--ignoreOrigins ", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD file containing the combined diffusion weighted images.", - argstr="--outputVolume %s") + desc="Required: name of output NRRD file containing the combined diffusion weighted images.", + argstr="--outputVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractConcatDwiOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD file containing the combined diffusion weighted images.", - exists=True) + desc="Required: name of output NRRD file containing the combined diffusion weighted images.", + exists=True, + ) class gtractConcatDwi(SEMLikeCommandLine): @@ -172,40 +185,42 @@ class gtractConcatDwi(SEMLikeCommandLine): input_spec = gtractConcatDwiInputSpec output_spec = gtractConcatDwiOutputSpec _cmd = " gtractConcatDwi " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False class gtractAverageBvaluesInputSpec(CommandLineInputSpec): inputVolume = File( - desc= - "Required: input image file name containing multiple baseline gradients to average", + desc="Required: input image file name containing multiple baseline gradients to average", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD file containing directly averaged baseline images", - argstr="--outputVolume %s") + desc="Required: name of output NRRD file containing directly averaged baseline images", + argstr="--outputVolume %s", + ) directionsTolerance = traits.Float( desc="Tolerance for matching identical gradient direction pairs", - argstr="--directionsTolerance %f") + argstr="--directionsTolerance %f", + ) averageB0only = traits.Bool( - desc= - "Average only baseline gradients. All other gradient directions are not averaged, but retained in the outputVolume", - argstr="--averageB0only ") + desc="Average only baseline gradients. All other gradient directions are not averaged, but retained in the outputVolume", + argstr="--averageB0only ", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractAverageBvaluesOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD file containing directly averaged baseline images", - exists=True) + desc="Required: name of output NRRD file containing directly averaged baseline images", + exists=True, + ) class gtractAverageBvalues(SEMLikeCommandLine): @@ -230,90 +245,92 @@ class gtractAverageBvalues(SEMLikeCommandLine): input_spec = gtractAverageBvaluesInputSpec output_spec = gtractAverageBvaluesOutputSpec _cmd = " gtractAverageBvalues " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False class gtractCoregBvaluesInputSpec(CommandLineInputSpec): movingVolume = File( - desc= - "Required: input moving image file name. In order to register gradients within a scan to its first gradient, set the movingVolume and fixedVolume as the same image.", + desc="Required: input moving image file name. In order to register gradients within a scan to its first gradient, set the movingVolume and fixedVolume as the same image.", exists=True, - argstr="--movingVolume %s") + argstr="--movingVolume %s", + ) fixedVolume = File( - desc= - "Required: input fixed image file name. It is recommended that this image should either contain or be a b0 image.", + desc="Required: input fixed image file name. It is recommended that this image should either contain or be a b0 image.", exists=True, - argstr="--fixedVolume %s") + argstr="--fixedVolume %s", + ) fixedVolumeIndex = traits.Int( - desc= - "Index in the fixed image for registration. It is recommended that this image should be a b0 image.", - argstr="--fixedVolumeIndex %d") + desc="Index in the fixed image for registration. It is recommended that this image should be a b0 image.", + argstr="--fixedVolumeIndex %d", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD file containing moving images individually resampled and fit to the specified fixed image index.", - argstr="--outputVolume %s") + desc="Required: name of output NRRD file containing moving images individually resampled and fit to the specified fixed image index.", + argstr="--outputVolume %s", + ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Registration 3D transforms concatenated in a single output file. There are no tools that can use this, but can be used for debugging purposes.", - argstr="--outputTransform %s") + desc="Registration 3D transforms concatenated in a single output file. There are no tools that can use this, but can be used for debugging purposes.", + argstr="--outputTransform %s", + ) eddyCurrentCorrection = traits.Bool( - desc= - "Flag to perform eddy current corection in addition to motion correction (recommended)", - argstr="--eddyCurrentCorrection ") + desc="Flag to perform eddy current corection in addition to motion correction (recommended)", + argstr="--eddyCurrentCorrection ", + ) numberOfIterations = traits.Int( - desc="Number of iterations in each 3D fit", - argstr="--numberOfIterations %d") + desc="Number of iterations in each 3D fit", argstr="--numberOfIterations %d" + ) numberOfSpatialSamples = traits.Int( - desc= - "The number of voxels sampled for mutual information computation. Increase this for a slower, more careful fit. NOTE that it is suggested to use samplingPercentage instead of this option. However, if set, it overwrites the samplingPercentage option. ", - argstr="--numberOfSpatialSamples %d") + desc="The number of voxels sampled for mutual information computation. Increase this for a slower, more careful fit. NOTE that it is suggested to use samplingPercentage instead of this option. However, if set, it overwrites the samplingPercentage option. ", + argstr="--numberOfSpatialSamples %d", + ) samplingPercentage = traits.Float( - desc= - "This is a number in (0.0,1.0] interval that shows the percentage of the input fixed image voxels that are sampled for mutual information computation. Increase this for a slower, more careful fit. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation. The default is to use approximately 5% of voxels (for backwards compatibility 5% ~= 500000/(256*256*256)). Typical values range from 1% for low detail images to 20% for high detail images.", - argstr="--samplingPercentage %f") + desc="This is a number in (0.0,1.0] interval that shows the percentage of the input fixed image voxels that are sampled for mutual information computation. Increase this for a slower, more careful fit. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation. The default is to use approximately 5% of voxels (for backwards compatibility 5% ~= 500000/(256*256*256)). Typical values range from 1% for low detail images to 20% for high detail images.", + argstr="--samplingPercentage %f", + ) relaxationFactor = traits.Float( - desc= - "Fraction of gradient from Jacobian to attempt to move in each 3D fit step (adjust when eddyCurrentCorrection is enabled; suggested value = 0.25)", - argstr="--relaxationFactor %f") + desc="Fraction of gradient from Jacobian to attempt to move in each 3D fit step (adjust when eddyCurrentCorrection is enabled; suggested value = 0.25)", + argstr="--relaxationFactor %f", + ) maximumStepSize = traits.Float( - desc= - "Maximum permitted step size to move in each 3D fit step (adjust when eddyCurrentCorrection is enabled; suggested value = 0.1)", - argstr="--maximumStepSize %f") + desc="Maximum permitted step size to move in each 3D fit step (adjust when eddyCurrentCorrection is enabled; suggested value = 0.1)", + argstr="--maximumStepSize %f", + ) minimumStepSize = traits.Float( - desc= - "Minimum required step size to move in each 3D fit step without converging -- decrease this to make the fit more exacting", - argstr="--minimumStepSize %f") + desc="Minimum required step size to move in each 3D fit step without converging -- decrease this to make the fit more exacting", + argstr="--minimumStepSize %f", + ) spatialScale = traits.Float( - desc= - "How much to scale up changes in position compared to unit rotational changes in radians -- decrease this to put more rotation in the fit", - argstr="--spatialScale %f") + desc="How much to scale up changes in position compared to unit rotational changes in radians -- decrease this to put more rotation in the fit", + argstr="--spatialScale %f", + ) registerB0Only = traits.Bool( - desc="Register the B0 images only", argstr="--registerB0Only ") + desc="Register the B0 images only", argstr="--registerB0Only " + ) debugLevel = traits.Int( - desc= - "Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", - argstr="--debugLevel %d") + desc="Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", + argstr="--debugLevel %d", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractCoregBvaluesOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD file containing moving images individually resampled and fit to the specified fixed image index.", - exists=True) + desc="Required: name of output NRRD file containing moving images individually resampled and fit to the specified fixed image index.", + exists=True, + ) outputTransform = File( - desc= - "Registration 3D transforms concatenated in a single output file. There are no tools that can use this, but can be used for debugging purposes.", - exists=True) + desc="Registration 3D transforms concatenated in a single output file. There are no tools that can use this, but can be used for debugging purposes.", + exists=True, + ) class gtractCoregBvalues(SEMLikeCommandLine): @@ -339,8 +356,8 @@ class gtractCoregBvalues(SEMLikeCommandLine): output_spec = gtractCoregBvaluesOutputSpec _cmd = " gtractCoregBvalues " _outputs_filenames = { - 'outputVolume': 'outputVolume.nrrd', - 'outputTransform': 'outputTransform.h5' + "outputVolume": "outputVolume.nrrd", + "outputTransform": "outputTransform.h5", } _redirect_x = False @@ -349,38 +366,42 @@ class gtractResampleAnisotropyInputSpec(CommandLineInputSpec): inputAnisotropyVolume = File( desc="Required: input file containing the anisotropy image", exists=True, - argstr="--inputAnisotropyVolume %s") + argstr="--inputAnisotropyVolume %s", + ) inputAnatomicalVolume = File( - desc= - "Required: input file containing the anatomical image whose characteristics will be cloned.", + desc="Required: input file containing the anatomical image whose characteristics will be cloned.", exists=True, - argstr="--inputAnatomicalVolume %s") + argstr="--inputAnatomicalVolume %s", + ) inputTransform = File( desc="Required: input Rigid OR Bspline transform file name", exists=True, - argstr="--inputTransform %s") + argstr="--inputTransform %s", + ) transformType = traits.Enum( "Rigid", "B-Spline", desc="Transform type: Rigid, B-Spline", - argstr="--transformType %s") + argstr="--transformType %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD file containing the resampled transformed anisotropy image.", - argstr="--outputVolume %s") + desc="Required: name of output NRRD file containing the resampled transformed anisotropy image.", + argstr="--outputVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractResampleAnisotropyOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD file containing the resampled transformed anisotropy image.", - exists=True) + desc="Required: name of output NRRD file containing the resampled transformed anisotropy image.", + exists=True, + ) class gtractResampleAnisotropy(SEMLikeCommandLine): @@ -405,7 +426,7 @@ class gtractResampleAnisotropy(SEMLikeCommandLine): input_spec = gtractResampleAnisotropyInputSpec output_spec = gtractResampleAnisotropyOutputSpec _cmd = " gtractResampleAnisotropy " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False @@ -413,16 +434,18 @@ class gtractResampleCodeImageInputSpec(CommandLineInputSpec): inputCodeVolume = File( desc="Required: input file containing the code image", exists=True, - argstr="--inputCodeVolume %s") + argstr="--inputCodeVolume %s", + ) inputReferenceVolume = File( - desc= - "Required: input file containing the standard image to clone the characteristics of.", + desc="Required: input file containing the standard image to clone the characteristics of.", exists=True, - argstr="--inputReferenceVolume %s") + argstr="--inputReferenceVolume %s", + ) inputTransform = File( desc="Required: input Rigid or Inverse-B-Spline transform file name", exists=True, - argstr="--inputTransform %s") + argstr="--inputTransform %s", + ) transformType = traits.Enum( "Rigid", "Affine", @@ -430,24 +453,26 @@ class gtractResampleCodeImageInputSpec(CommandLineInputSpec): "Inverse-B-Spline", "None", desc="Transform type: Rigid or Inverse-B-Spline", - argstr="--transformType %s") + argstr="--transformType %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD file containing the resampled code image in acquisition space.", - argstr="--outputVolume %s") + desc="Required: name of output NRRD file containing the resampled code image in acquisition space.", + argstr="--outputVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractResampleCodeImageOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD file containing the resampled code image in acquisition space.", - exists=True) + desc="Required: name of output NRRD file containing the resampled code image in acquisition space.", + exists=True, + ) class gtractResampleCodeImage(SEMLikeCommandLine): @@ -472,37 +497,39 @@ class gtractResampleCodeImage(SEMLikeCommandLine): input_spec = gtractResampleCodeImageInputSpec output_spec = gtractResampleCodeImageOutputSpec _cmd = " gtractResampleCodeImage " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False class gtractCopyImageOrientationInputSpec(CommandLineInputSpec): inputVolume = File( - desc= - "Required: input file containing the signed short image to reorient without resampling.", + desc="Required: input file containing the signed short image to reorient without resampling.", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) inputReferenceVolume = File( desc="Required: input file containing orietation that will be cloned.", exists=True, - argstr="--inputReferenceVolume %s") + argstr="--inputReferenceVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD or Nifti file containing the reoriented image in reference image space.", - argstr="--outputVolume %s") + desc="Required: name of output NRRD or Nifti file containing the reoriented image in reference image space.", + argstr="--outputVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractCopyImageOrientationOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD or Nifti file containing the reoriented image in reference image space.", - exists=True) + desc="Required: name of output NRRD or Nifti file containing the reoriented image in reference image space.", + exists=True, + ) class gtractCopyImageOrientation(SEMLikeCommandLine): @@ -527,7 +554,7 @@ class gtractCopyImageOrientation(SEMLikeCommandLine): input_spec = gtractCopyImageOrientationInputSpec output_spec = gtractCopyImageOrientationOutputSpec _cmd = " gtractCopyImageOrientation " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False @@ -535,28 +562,30 @@ class gtractCreateGuideFiberInputSpec(CommandLineInputSpec): inputFiber = File( desc="Required: input fiber tract file name", exists=True, - argstr="--inputFiber %s") + argstr="--inputFiber %s", + ) numberOfPoints = traits.Int( - desc="Number of points in output guide fiber", - argstr="--numberOfPoints %d") + desc="Number of points in output guide fiber", argstr="--numberOfPoints %d" + ) outputFiber = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output guide fiber file name", - argstr="--outputFiber %s") + argstr="--outputFiber %s", + ) writeXMLPolyDataFile = traits.Bool( - desc= - "Flag to make use of XML files when reading and writing vtkPolyData.", - argstr="--writeXMLPolyDataFile ") + desc="Flag to make use of XML files when reading and writing vtkPolyData.", + argstr="--writeXMLPolyDataFile ", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractCreateGuideFiberOutputSpec(TraitedSpec): - outputFiber = File( - desc="Required: output guide fiber file name", exists=True) + outputFiber = File(desc="Required: output guide fiber file name", exists=True) class gtractCreateGuideFiber(SEMLikeCommandLine): @@ -581,7 +610,7 @@ class gtractCreateGuideFiber(SEMLikeCommandLine): input_spec = gtractCreateGuideFiberInputSpec output_spec = gtractCreateGuideFiberOutputSpec _cmd = " gtractCreateGuideFiber " - _outputs_filenames = {'outputFiber': 'outputFiber.vtk'} + _outputs_filenames = {"outputFiber": "outputFiber.vtk"} _redirect_x = False @@ -589,7 +618,8 @@ class gtractAnisotropyMapInputSpec(CommandLineInputSpec): inputTensorVolume = File( desc="Required: input file containing the diffusion tensor image", exists=True, - argstr="--inputTensorVolume %s") + argstr="--inputTensorVolume %s", + ) anisotropyType = traits.Enum( "ADC", "FA", @@ -599,24 +629,26 @@ class gtractAnisotropyMapInputSpec(CommandLineInputSpec): "RD", "LI", desc="Anisotropy Mapping Type: ADC, FA, RA, VR, AD, RD, LI", - argstr="--anisotropyType %s") + argstr="--anisotropyType %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD file containing the selected kind of anisotropy scalar.", - argstr="--outputVolume %s") + desc="Required: name of output NRRD file containing the selected kind of anisotropy scalar.", + argstr="--outputVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractAnisotropyMapOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD file containing the selected kind of anisotropy scalar.", - exists=True) + desc="Required: name of output NRRD file containing the selected kind of anisotropy scalar.", + exists=True, + ) class gtractAnisotropyMap(SEMLikeCommandLine): @@ -641,38 +673,38 @@ class gtractAnisotropyMap(SEMLikeCommandLine): input_spec = gtractAnisotropyMapInputSpec output_spec = gtractAnisotropyMapOutputSpec _cmd = " gtractAnisotropyMap " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False class gtractClipAnisotropyInputSpec(CommandLineInputSpec): inputVolume = File( - desc="Required: input image file name", - exists=True, - argstr="--inputVolume %s") + desc="Required: input image file name", exists=True, argstr="--inputVolume %s" + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD file containing the clipped anisotropy image", - argstr="--outputVolume %s") + desc="Required: name of output NRRD file containing the clipped anisotropy image", + argstr="--outputVolume %s", + ) clipFirstSlice = traits.Bool( - desc="Clip the first slice of the anisotropy image", - argstr="--clipFirstSlice ") + desc="Clip the first slice of the anisotropy image", argstr="--clipFirstSlice " + ) clipLastSlice = traits.Bool( - desc="Clip the last slice of the anisotropy image", - argstr="--clipLastSlice ") + desc="Clip the last slice of the anisotropy image", argstr="--clipLastSlice " + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractClipAnisotropyOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD file containing the clipped anisotropy image", - exists=True) + desc="Required: name of output NRRD file containing the clipped anisotropy image", + exists=True, + ) class gtractClipAnisotropy(SEMLikeCommandLine): @@ -697,7 +729,7 @@ class gtractClipAnisotropy(SEMLikeCommandLine): input_spec = gtractClipAnisotropyInputSpec output_spec = gtractClipAnisotropyOutputSpec _cmd = " gtractClipAnisotropy " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False @@ -705,41 +737,46 @@ class gtractResampleB0InputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input file containing the 4D image", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) inputAnatomicalVolume = File( - desc= - "Required: input file containing the anatomical image defining the origin, spacing and size of the resampled image (template)", + desc="Required: input file containing the anatomical image defining the origin, spacing and size of the resampled image (template)", exists=True, - argstr="--inputAnatomicalVolume %s") + argstr="--inputAnatomicalVolume %s", + ) inputTransform = File( desc="Required: input Rigid OR Bspline transform file name", exists=True, - argstr="--inputTransform %s") + argstr="--inputTransform %s", + ) vectorIndex = traits.Int( desc="Index in the diffusion weighted image set for the B0 image", - argstr="--vectorIndex %d") + argstr="--vectorIndex %d", + ) transformType = traits.Enum( "Rigid", "B-Spline", desc="Transform type: Rigid, B-Spline", - argstr="--transformType %s") + argstr="--transformType %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD file containing the resampled input image.", - argstr="--outputVolume %s") + desc="Required: name of output NRRD file containing the resampled input image.", + argstr="--outputVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractResampleB0OutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD file containing the resampled input image.", - exists=True) + desc="Required: name of output NRRD file containing the resampled input image.", + exists=True, + ) class gtractResampleB0(SEMLikeCommandLine): @@ -764,7 +801,7 @@ class gtractResampleB0(SEMLikeCommandLine): input_spec = gtractResampleB0InputSpec output_spec = gtractResampleB0OutputSpec _cmd = " gtractResampleB0 " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False @@ -772,21 +809,23 @@ class gtractInvertRigidTransformInputSpec(CommandLineInputSpec): inputTransform = File( desc="Required: input rigid transform file name", exists=True, - argstr="--inputTransform %s") + argstr="--inputTransform %s", + ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output transform file name", - argstr="--outputTransform %s") + argstr="--outputTransform %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractInvertRigidTransformOutputSpec(TraitedSpec): - outputTransform = File( - desc="Required: output transform file name", exists=True) + outputTransform = File(desc="Required: output transform file name", exists=True) class gtractInvertRigidTransform(SEMLikeCommandLine): @@ -811,38 +850,39 @@ class gtractInvertRigidTransform(SEMLikeCommandLine): input_spec = gtractInvertRigidTransformInputSpec output_spec = gtractInvertRigidTransformOutputSpec _cmd = " gtractInvertRigidTransform " - _outputs_filenames = {'outputTransform': 'outputTransform.h5'} + _outputs_filenames = {"outputTransform": "outputTransform.h5"} _redirect_x = False class gtractImageConformityInputSpec(CommandLineInputSpec): inputVolume = File( - desc= - "Required: input file containing the signed short image to reorient without resampling.", + desc="Required: input file containing the signed short image to reorient without resampling.", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) inputReferenceVolume = File( - desc= - "Required: input file containing the standard image to clone the characteristics of.", + desc="Required: input file containing the standard image to clone the characteristics of.", exists=True, - argstr="--inputReferenceVolume %s") + argstr="--inputReferenceVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output Nrrd or Nifti file containing the reoriented image in reference image space.", - argstr="--outputVolume %s") + desc="Required: name of output Nrrd or Nifti file containing the reoriented image in reference image space.", + argstr="--outputVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractImageConformityOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output Nrrd or Nifti file containing the reoriented image in reference image space.", - exists=True) + desc="Required: name of output Nrrd or Nifti file containing the reoriented image in reference image space.", + exists=True, + ) class gtractImageConformity(SEMLikeCommandLine): @@ -867,7 +907,7 @@ class gtractImageConformity(SEMLikeCommandLine): input_spec = gtractImageConformityInputSpec output_spec = gtractImageConformityOutputSpec _cmd = " gtractImageConformity " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False @@ -875,31 +915,36 @@ class compareTractInclusionInputSpec(CommandLineInputSpec): testFiber = File( desc="Required: test fiber tract file name", exists=True, - argstr="--testFiber %s") + argstr="--testFiber %s", + ) standardFiber = File( desc="Required: standard fiber tract file name", exists=True, - argstr="--standardFiber %s") + argstr="--standardFiber %s", + ) closeness = traits.Float( - desc= - "Closeness of every test fiber to some fiber in the standard tract, computed as a sum of squares of spatial differences of standard points", - argstr="--closeness %f") + desc="Closeness of every test fiber to some fiber in the standard tract, computed as a sum of squares of spatial differences of standard points", + argstr="--closeness %f", + ) numberOfPoints = traits.Int( - desc="Number of points in comparison fiber pairs", - argstr="--numberOfPoints %d") + desc="Number of points in comparison fiber pairs", argstr="--numberOfPoints %d" + ) testForBijection = traits.Bool( desc="Flag to apply the closeness criterion both ways", - argstr="--testForBijection ") + argstr="--testForBijection ", + ) testForFiberCardinality = traits.Bool( desc="Flag to require the same number of fibers in both tracts", - argstr="--testForFiberCardinality ") + argstr="--testForFiberCardinality ", + ) writeXMLPolyDataFile = traits.Bool( - desc= - "Flag to make use of XML files when reading and writing vtkPolyData.", - argstr="--writeXMLPolyDataFile ") + desc="Flag to make use of XML files when reading and writing vtkPolyData.", + argstr="--writeXMLPolyDataFile ", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class compareTractInclusionOutputSpec(TraitedSpec): @@ -936,59 +981,68 @@ class gtractFastMarchingTrackingInputSpec(CommandLineInputSpec): inputTensorVolume = File( desc="Required: input tensor image file name", exists=True, - argstr="--inputTensorVolume %s") + argstr="--inputTensorVolume %s", + ) inputAnisotropyVolume = File( desc="Required: input anisotropy image file name", exists=True, - argstr="--inputAnisotropyVolume %s") + argstr="--inputAnisotropyVolume %s", + ) inputCostVolume = File( desc="Required: input vcl_cost image file name", exists=True, - argstr="--inputCostVolume %s") + argstr="--inputCostVolume %s", + ) inputStartingSeedsLabelMapVolume = File( desc="Required: input starting seeds LabelMap image file name", exists=True, - argstr="--inputStartingSeedsLabelMapVolume %s") + argstr="--inputStartingSeedsLabelMapVolume %s", + ) startingSeedsLabel = traits.Int( - desc="Label value for Starting Seeds", - argstr="--startingSeedsLabel %d") + desc="Label value for Starting Seeds", argstr="--startingSeedsLabel %d" + ) outputTract = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", - argstr="--outputTract %s") + desc="Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", + argstr="--outputTract %s", + ) writeXMLPolyDataFile = traits.Bool( desc="Flag to make use of the XML format for vtkPolyData fiber tracts.", - argstr="--writeXMLPolyDataFile ") + argstr="--writeXMLPolyDataFile ", + ) numberOfIterations = traits.Int( desc="Number of iterations used for the optimization", - argstr="--numberOfIterations %d") + argstr="--numberOfIterations %d", + ) seedThreshold = traits.Float( - desc="Anisotropy threshold used for seed selection", - argstr="--seedThreshold %f") + desc="Anisotropy threshold used for seed selection", argstr="--seedThreshold %f" + ) trackingThreshold = traits.Float( desc="Anisotropy threshold used for fiber tracking", - argstr="--trackingThreshold %f") + argstr="--trackingThreshold %f", + ) costStepSize = traits.Float( - desc="Cost image sub-voxel sampling", argstr="--costStepSize %f") + desc="Cost image sub-voxel sampling", argstr="--costStepSize %f" + ) maximumStepSize = traits.Float( - desc="Maximum step size to move when tracking", - argstr="--maximumStepSize %f") + desc="Maximum step size to move when tracking", argstr="--maximumStepSize %f" + ) minimumStepSize = traits.Float( - desc="Minimum step size to move when tracking", - argstr="--minimumStepSize %f") + desc="Minimum step size to move when tracking", argstr="--minimumStepSize %f" + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractFastMarchingTrackingOutputSpec(TraitedSpec): outputTract = File( - desc= - "Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", - exists=True) + desc="Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", + exists=True, + ) class gtractFastMarchingTracking(SEMLikeCommandLine): @@ -1013,32 +1067,36 @@ class gtractFastMarchingTracking(SEMLikeCommandLine): input_spec = gtractFastMarchingTrackingInputSpec output_spec = gtractFastMarchingTrackingOutputSpec _cmd = " gtractFastMarchingTracking " - _outputs_filenames = {'outputTract': 'outputTract.vtk'} + _outputs_filenames = {"outputTract": "outputTract.vtk"} _redirect_x = False class gtractInvertDisplacementFieldInputSpec(CommandLineInputSpec): baseImage = File( - desc= - "Required: base image used to define the size of the inverse field", + desc="Required: base image used to define the size of the inverse field", exists=True, - argstr="--baseImage %s") + argstr="--baseImage %s", + ) deformationImage = File( desc="Required: Displacement field image", exists=True, - argstr="--deformationImage %s") + argstr="--deformationImage %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: Output deformation field", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) subsamplingFactor = traits.Int( desc="Subsampling factor for the deformation field", - argstr="--subsamplingFactor %d") + argstr="--subsamplingFactor %d", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractInvertDisplacementFieldOutputSpec(TraitedSpec): @@ -1067,107 +1125,115 @@ class gtractInvertDisplacementField(SEMLikeCommandLine): input_spec = gtractInvertDisplacementFieldInputSpec output_spec = gtractInvertDisplacementFieldOutputSpec _cmd = " gtractInvertDisplacementField " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False class gtractCoRegAnatomyInputSpec(CommandLineInputSpec): inputVolume = File( - desc= - "Required: input vector image file name. It is recommended that the input volume is the skull stripped baseline image of the DWI scan.", + desc="Required: input vector image file name. It is recommended that the input volume is the skull stripped baseline image of the DWI scan.", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) inputAnatomicalVolume = File( - desc= - "Required: input anatomical image file name. It is recommended that that the input anatomical image has been skull stripped and has the same orientation as the DWI scan.", + desc="Required: input anatomical image file name. It is recommended that that the input anatomical image has been skull stripped and has the same orientation as the DWI scan.", exists=True, - argstr="--inputAnatomicalVolume %s") + argstr="--inputAnatomicalVolume %s", + ) vectorIndex = traits.Int( - desc= - "Vector image index in the moving image (within the DWI) to be used for registration.", - argstr="--vectorIndex %d") + desc="Vector image index in the moving image (within the DWI) to be used for registration.", + argstr="--vectorIndex %d", + ) inputRigidTransform = File( - desc= - "Required (for B-Spline type co-registration): input rigid transform file name. Used as a starting point for the anatomical B-Spline registration.", + desc="Required (for B-Spline type co-registration): input rigid transform file name. Used as a starting point for the anatomical B-Spline registration.", exists=True, - argstr="--inputRigidTransform %s") + argstr="--inputRigidTransform %s", + ) outputTransformName = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: filename for the fit transform.", - argstr="--outputTransformName %s") + argstr="--outputTransformName %s", + ) transformType = traits.Enum( "Rigid", "Bspline", desc="Transform Type: Rigid|Bspline", - argstr="--transformType %s") + argstr="--transformType %s", + ) numberOfIterations = traits.Int( desc="Number of iterations in the selected 3D fit", - argstr="--numberOfIterations %d") + argstr="--numberOfIterations %d", + ) gridSize = InputMultiPath( traits.Int, desc="Number of grid subdivisions in all 3 directions", sep=",", - argstr="--gridSize %s") + argstr="--gridSize %s", + ) borderSize = traits.Int(desc="Size of border", argstr="--borderSize %d") numberOfHistogramBins = traits.Int( - desc="Number of histogram bins", argstr="--numberOfHistogramBins %d") + desc="Number of histogram bins", argstr="--numberOfHistogramBins %d" + ) spatialScale = traits.Int( - desc= - "Scales the number of voxels in the image by this value to specify the number of voxels used in the registration", - argstr="--spatialScale %d") - convergence = traits.Float( - desc="Convergence Factor", argstr="--convergence %f") + desc="Scales the number of voxels in the image by this value to specify the number of voxels used in the registration", + argstr="--spatialScale %d", + ) + convergence = traits.Float(desc="Convergence Factor", argstr="--convergence %f") gradientTolerance = traits.Float( - desc="Gradient Tolerance", argstr="--gradientTolerance %f") + desc="Gradient Tolerance", argstr="--gradientTolerance %f" + ) maxBSplineDisplacement = traits.Float( - desc= - " Sets the maximum allowed displacements in image physical coordinates for BSpline control grid along each axis. A value of 0.0 indicates that the problem should be unbounded. NOTE: This only constrains the BSpline portion, and does not limit the displacement from the associated bulk transform. This can lead to a substantial reduction in computation time in the BSpline optimizer., ", - argstr="--maxBSplineDisplacement %f") + desc=" Sets the maximum allowed displacements in image physical coordinates for BSpline control grid along each axis. A value of 0.0 indicates that the problem should be unbounded. NOTE: This only constrains the BSpline portion, and does not limit the displacement from the associated bulk transform. This can lead to a substantial reduction in computation time in the BSpline optimizer., ", + argstr="--maxBSplineDisplacement %f", + ) maximumStepSize = traits.Float( desc="Maximum permitted step size to move in the selected 3D fit", - argstr="--maximumStepSize %f") + argstr="--maximumStepSize %f", + ) minimumStepSize = traits.Float( - desc= - "Minimum required step size to move in the selected 3D fit without converging -- decrease this to make the fit more exacting", - argstr="--minimumStepSize %f") + desc="Minimum required step size to move in the selected 3D fit without converging -- decrease this to make the fit more exacting", + argstr="--minimumStepSize %f", + ) translationScale = traits.Float( - desc= - "How much to scale up changes in position compared to unit rotational changes in radians -- decrease this to put more translation in the fit", - argstr="--translationScale %f") + desc="How much to scale up changes in position compared to unit rotational changes in radians -- decrease this to put more translation in the fit", + argstr="--translationScale %f", + ) relaxationFactor = traits.Float( - desc= - "Fraction of gradient from Jacobian to attempt to move in the selected 3D fit", - argstr="--relaxationFactor %f") + desc="Fraction of gradient from Jacobian to attempt to move in the selected 3D fit", + argstr="--relaxationFactor %f", + ) numberOfSamples = traits.Int( - desc= - "The number of voxels sampled for mutual information computation. Increase this for a slower, more careful fit. NOTE that it is suggested to use samplingPercentage instead of this option. However, if set, it overwrites the samplingPercentage option. ", - argstr="--numberOfSamples %d") + desc="The number of voxels sampled for mutual information computation. Increase this for a slower, more careful fit. NOTE that it is suggested to use samplingPercentage instead of this option. However, if set, it overwrites the samplingPercentage option. ", + argstr="--numberOfSamples %d", + ) samplingPercentage = traits.Float( - desc= - "This is a number in (0.0,1.0] interval that shows the percentage of the input fixed image voxels that are sampled for mutual information computation. Increase this for a slower, more careful fit. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation. The default is to use approximately 5% of voxels (for backwards compatibility 5% ~= 500000/(256*256*256)). Typical values range from 1% for low detail images to 20% for high detail images.", - argstr="--samplingPercentage %f") + desc="This is a number in (0.0,1.0] interval that shows the percentage of the input fixed image voxels that are sampled for mutual information computation. Increase this for a slower, more careful fit. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation. The default is to use approximately 5% of voxels (for backwards compatibility 5% ~= 500000/(256*256*256)). Typical values range from 1% for low detail images to 20% for high detail images.", + argstr="--samplingPercentage %f", + ) useMomentsAlign = traits.Bool( - desc= - "MomentsAlign assumes that the center of mass of the images represent similar structures. Perform a MomentsAlign registration as part of the sequential registration steps. This option MUST come first, and CAN NOT be used with either CenterOfHeadLAlign, GeometryAlign, or initialTransform file. This family of options superceeds the use of transformType if any of them are set.", - argstr="--useMomentsAlign ") + desc="MomentsAlign assumes that the center of mass of the images represent similar structures. Perform a MomentsAlign registration as part of the sequential registration steps. This option MUST come first, and CAN NOT be used with either CenterOfHeadLAlign, GeometryAlign, or initialTransform file. This family of options superceeds the use of transformType if any of them are set.", + argstr="--useMomentsAlign ", + ) useGeometryAlign = traits.Bool( - desc= - "GeometryAlign on assumes that the center of the voxel lattice of the images represent similar structures. Perform a GeometryCenterAlign registration as part of the sequential registration steps. This option MUST come first, and CAN NOT be used with either MomentsAlign, CenterOfHeadAlign, or initialTransform file. This family of options superceeds the use of transformType if any of them are set.", - argstr="--useGeometryAlign ") + desc="GeometryAlign on assumes that the center of the voxel lattice of the images represent similar structures. Perform a GeometryCenterAlign registration as part of the sequential registration steps. This option MUST come first, and CAN NOT be used with either MomentsAlign, CenterOfHeadAlign, or initialTransform file. This family of options superceeds the use of transformType if any of them are set.", + argstr="--useGeometryAlign ", + ) useCenterOfHeadAlign = traits.Bool( - desc= - "CenterOfHeadAlign attempts to find a hemisphere full of foreground voxels from the superior direction as an estimate of where the center of a head shape would be to drive a center of mass estimate. Perform a CenterOfHeadAlign registration as part of the sequential registration steps. This option MUST come first, and CAN NOT be used with either MomentsAlign, GeometryAlign, or initialTransform file. This family of options superceeds the use of transformType if any of them are set.", - argstr="--useCenterOfHeadAlign ") + desc="CenterOfHeadAlign attempts to find a hemisphere full of foreground voxels from the superior direction as an estimate of where the center of a head shape would be to drive a center of mass estimate. Perform a CenterOfHeadAlign registration as part of the sequential registration steps. This option MUST come first, and CAN NOT be used with either MomentsAlign, GeometryAlign, or initialTransform file. This family of options superceeds the use of transformType if any of them are set.", + argstr="--useCenterOfHeadAlign ", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractCoRegAnatomyOutputSpec(TraitedSpec): outputTransformName = File( - desc="Required: filename for the fit transform.", exists=True) + desc="Required: filename for the fit transform.", exists=True + ) class gtractCoRegAnatomy(SEMLikeCommandLine): @@ -1192,7 +1258,7 @@ class gtractCoRegAnatomy(SEMLikeCommandLine): input_spec = gtractCoRegAnatomyInputSpec output_spec = gtractCoRegAnatomyOutputSpec _cmd = " gtractCoRegAnatomy " - _outputs_filenames = {'outputTransformName': 'outputTransformName.h5'} + _outputs_filenames = {"outputTransformName": "outputTransformName.h5"} _redirect_x = False @@ -1200,59 +1266,62 @@ class gtractResampleDWIInPlaceInputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input image is a 4D NRRD image.", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) referenceVolume = File( - desc= - "If provided, resample to the final space of the referenceVolume 3D data set.", + desc="If provided, resample to the final space of the referenceVolume 3D data set.", exists=True, - argstr="--referenceVolume %s") + argstr="--referenceVolume %s", + ) outputResampledB0 = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Convenience function for extracting the first index location (assumed to be the B0)", - argstr="--outputResampledB0 %s") + desc="Convenience function for extracting the first index location (assumed to be the B0)", + argstr="--outputResampledB0 %s", + ) inputTransform = File( - desc= - "Required: transform file derived from rigid registration of b0 image to reference structural image.", + desc="Required: transform file derived from rigid registration of b0 image to reference structural image.", exists=True, - argstr="--inputTransform %s") + argstr="--inputTransform %s", + ) warpDWITransform = File( desc="Optional: transform file to warp gradient volumes.", exists=True, - argstr="--warpDWITransform %s") + argstr="--warpDWITransform %s", + ) debugLevel = traits.Int( - desc= - "Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", - argstr="--debugLevel %d") + desc="Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", + argstr="--debugLevel %d", + ) imageOutputSize = InputMultiPath( traits.Int, - desc= - "The voxel lattice for the output image, padding is added if necessary. NOTE: if 0,0,0, then the inputVolume size is used.", + desc="The voxel lattice for the output image, padding is added if necessary. NOTE: if 0,0,0, then the inputVolume size is used.", sep=",", - argstr="--imageOutputSize %s") + argstr="--imageOutputSize %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: output image (NRRD file) that has been rigidly transformed into the space of the structural image and padded if image padding was changed from 0,0,0 default.", - argstr="--outputVolume %s") + desc="Required: output image (NRRD file) that has been rigidly transformed into the space of the structural image and padded if image padding was changed from 0,0,0 default.", + argstr="--outputVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractResampleDWIInPlaceOutputSpec(TraitedSpec): outputResampledB0 = File( - desc= - "Convenience function for extracting the first index location (assumed to be the B0)", - exists=True) + desc="Convenience function for extracting the first index location (assumed to be the B0)", + exists=True, + ) outputVolume = File( - desc= - "Required: output image (NRRD file) that has been rigidly transformed into the space of the structural image and padded if image padding was changed from 0,0,0 default.", - exists=True) + desc="Required: output image (NRRD file) that has been rigidly transformed into the space of the structural image and padded if image padding was changed from 0,0,0 default.", + exists=True, + ) class gtractResampleDWIInPlace(SEMLikeCommandLine): @@ -1278,8 +1347,8 @@ class gtractResampleDWIInPlace(SEMLikeCommandLine): output_spec = gtractResampleDWIInPlaceOutputSpec _cmd = " gtractResampleDWIInPlace " _outputs_filenames = { - 'outputResampledB0': 'outputResampledB0.nii', - 'outputVolume': 'outputVolume.nii' + "outputResampledB0": "outputResampledB0.nii", + "outputVolume": "outputVolume.nii", } _redirect_x = False @@ -1288,42 +1357,50 @@ class gtractCostFastMarchingInputSpec(CommandLineInputSpec): inputTensorVolume = File( desc="Required: input tensor image file name", exists=True, - argstr="--inputTensorVolume %s") + argstr="--inputTensorVolume %s", + ) inputAnisotropyVolume = File( desc="Required: input anisotropy image file name", exists=True, - argstr="--inputAnisotropyVolume %s") + argstr="--inputAnisotropyVolume %s", + ) inputStartingSeedsLabelMapVolume = File( desc="Required: input starting seeds LabelMap image file name", exists=True, - argstr="--inputStartingSeedsLabelMapVolume %s") + argstr="--inputStartingSeedsLabelMapVolume %s", + ) startingSeedsLabel = traits.Int( - desc="Label value for Starting Seeds", - argstr="--startingSeedsLabel %d") + desc="Label value for Starting Seeds", argstr="--startingSeedsLabel %d" + ) outputCostVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output vcl_cost image", - argstr="--outputCostVolume %s") + argstr="--outputCostVolume %s", + ) outputSpeedVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output speed image", - argstr="--outputSpeedVolume %s") + argstr="--outputSpeedVolume %s", + ) anisotropyWeight = traits.Float( desc="Anisotropy weight used for vcl_cost function calculations", - argstr="--anisotropyWeight %f") + argstr="--anisotropyWeight %f", + ) stoppingValue = traits.Float( desc="Terminiating value for vcl_cost function estimation", - argstr="--stoppingValue %f") + argstr="--stoppingValue %f", + ) seedThreshold = traits.Float( - desc="Anisotropy threshold used for seed selection", - argstr="--seedThreshold %f") + desc="Anisotropy threshold used for seed selection", argstr="--seedThreshold %f" + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractCostFastMarchingOutputSpec(TraitedSpec): @@ -1354,122 +1431,130 @@ class gtractCostFastMarching(SEMLikeCommandLine): output_spec = gtractCostFastMarchingOutputSpec _cmd = " gtractCostFastMarching " _outputs_filenames = { - 'outputCostVolume': 'outputCostVolume.nrrd', - 'outputSpeedVolume': 'outputSpeedVolume.nrrd' + "outputCostVolume": "outputCostVolume.nrrd", + "outputSpeedVolume": "outputSpeedVolume.nrrd", } _redirect_x = False class gtractFiberTrackingInputSpec(CommandLineInputSpec): inputTensorVolume = File( - desc= - "Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): input tensor image file name", + desc="Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): input tensor image file name", exists=True, - argstr="--inputTensorVolume %s") + argstr="--inputTensorVolume %s", + ) inputAnisotropyVolume = File( - desc= - "Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): input anisotropy image file name", + desc="Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): input anisotropy image file name", exists=True, - argstr="--inputAnisotropyVolume %s") + argstr="--inputAnisotropyVolume %s", + ) inputStartingSeedsLabelMapVolume = File( - desc= - "Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): input starting seeds LabelMap image file name", + desc="Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): input starting seeds LabelMap image file name", exists=True, - argstr="--inputStartingSeedsLabelMapVolume %s") + argstr="--inputStartingSeedsLabelMapVolume %s", + ) startingSeedsLabel = traits.Int( - desc= - "Label value for Starting Seeds (required if Label number used to create seed point in Slicer was not 1)", - argstr="--startingSeedsLabel %d") + desc="Label value for Starting Seeds (required if Label number used to create seed point in Slicer was not 1)", + argstr="--startingSeedsLabel %d", + ) inputEndingSeedsLabelMapVolume = File( - desc= - "Required (for Streamline, GraphSearch, and Guided fiber tracking methods): input ending seeds LabelMap image file name", + desc="Required (for Streamline, GraphSearch, and Guided fiber tracking methods): input ending seeds LabelMap image file name", exists=True, - argstr="--inputEndingSeedsLabelMapVolume %s") + argstr="--inputEndingSeedsLabelMapVolume %s", + ) endingSeedsLabel = traits.Int( - desc= - "Label value for Ending Seeds (required if Label number used to create seed point in Slicer was not 1)", - argstr="--endingSeedsLabel %d") + desc="Label value for Ending Seeds (required if Label number used to create seed point in Slicer was not 1)", + argstr="--endingSeedsLabel %d", + ) inputTract = File( - desc= - "Required (for Guided fiber tracking method): guide fiber in vtkPolydata file containing one tract line.", + desc="Required (for Guided fiber tracking method): guide fiber in vtkPolydata file containing one tract line.", exists=True, - argstr="--inputTract %s") + argstr="--inputTract %s", + ) outputTract = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): name of output vtkPolydata file containing tract lines and the point data collected along them.", - argstr="--outputTract %s") + desc="Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): name of output vtkPolydata file containing tract lines and the point data collected along them.", + argstr="--outputTract %s", + ) writeXMLPolyDataFile = traits.Bool( desc="Flag to make use of the XML format for vtkPolyData fiber tracts.", - argstr="--writeXMLPolyDataFile ") + argstr="--writeXMLPolyDataFile ", + ) trackingMethod = traits.Enum( "Guided", "Free", "Streamline", "GraphSearch", desc="Fiber tracking Filter Type: Guided|Free|Streamline|GraphSearch", - argstr="--trackingMethod %s") + argstr="--trackingMethod %s", + ) guidedCurvatureThreshold = traits.Float( desc="Guided Curvature Threshold (Degrees)", - argstr="--guidedCurvatureThreshold %f") + argstr="--guidedCurvatureThreshold %f", + ) maximumGuideDistance = traits.Float( desc="Maximum distance for using the guide fiber direction", - argstr="--maximumGuideDistance %f") + argstr="--maximumGuideDistance %f", + ) seedThreshold = traits.Float( - desc= - "Anisotropy threshold for seed selection (recommended for Free fiber tracking)", - argstr="--seedThreshold %f") + desc="Anisotropy threshold for seed selection (recommended for Free fiber tracking)", + argstr="--seedThreshold %f", + ) trackingThreshold = traits.Float( - desc= - "Anisotropy threshold for fiber tracking (anisotropy values of the next point along the path)", - argstr="--trackingThreshold %f") + desc="Anisotropy threshold for fiber tracking (anisotropy values of the next point along the path)", + argstr="--trackingThreshold %f", + ) curvatureThreshold = traits.Float( - desc= - "Curvature threshold in degrees (recommended for Free fiber tracking)", - argstr="--curvatureThreshold %f") + desc="Curvature threshold in degrees (recommended for Free fiber tracking)", + argstr="--curvatureThreshold %f", + ) branchingThreshold = traits.Float( - desc= - "Anisotropy Branching threshold (recommended for GraphSearch fiber tracking method)", - argstr="--branchingThreshold %f") + desc="Anisotropy Branching threshold (recommended for GraphSearch fiber tracking method)", + argstr="--branchingThreshold %f", + ) maximumBranchPoints = traits.Int( - desc= - "Maximum branch points (recommended for GraphSearch fiber tracking method)", - argstr="--maximumBranchPoints %d") + desc="Maximum branch points (recommended for GraphSearch fiber tracking method)", + argstr="--maximumBranchPoints %d", + ) useRandomWalk = traits.Bool( - desc="Flag to use random walk.", argstr="--useRandomWalk ") + desc="Flag to use random walk.", argstr="--useRandomWalk " + ) randomSeed = traits.Int( - desc="Random number generator seed", argstr="--randomSeed %d") + desc="Random number generator seed", argstr="--randomSeed %d" + ) branchingAngle = traits.Float( - desc= - "Branching angle in degrees (recommended for GraphSearch fiber tracking method)", - argstr="--branchingAngle %f") + desc="Branching angle in degrees (recommended for GraphSearch fiber tracking method)", + argstr="--branchingAngle %f", + ) minimumLength = traits.Float( desc="Minimum fiber length. Helpful for filtering invalid tracts.", - argstr="--minimumLength %f") + argstr="--minimumLength %f", + ) maximumLength = traits.Float( - desc="Maximum fiber length (voxels)", argstr="--maximumLength %f") - stepSize = traits.Float( - desc="Fiber tracking step size", argstr="--stepSize %f") + desc="Maximum fiber length (voxels)", argstr="--maximumLength %f" + ) + stepSize = traits.Float(desc="Fiber tracking step size", argstr="--stepSize %f") useLoopDetection = traits.Bool( - desc="Flag to make use of loop detection.", - argstr="--useLoopDetection ") + desc="Flag to make use of loop detection.", argstr="--useLoopDetection " + ) useTend = traits.Bool( - desc="Flag to make use of Tend F and Tend G parameters.", - argstr="--useTend ") + desc="Flag to make use of Tend F and Tend G parameters.", argstr="--useTend " + ) tendF = traits.Float(desc="Tend F parameter", argstr="--tendF %f") tendG = traits.Float(desc="Tend G parameter", argstr="--tendG %f") numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractFiberTrackingOutputSpec(TraitedSpec): outputTract = File( - desc= - "Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): name of output vtkPolydata file containing tract lines and the point data collected along them.", - exists=True) + desc="Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): name of output vtkPolydata file containing tract lines and the point data collected along them.", + exists=True, + ) class gtractFiberTracking(SEMLikeCommandLine): @@ -1494,43 +1579,45 @@ class gtractFiberTracking(SEMLikeCommandLine): input_spec = gtractFiberTrackingInputSpec output_spec = gtractFiberTrackingOutputSpec _cmd = " gtractFiberTracking " - _outputs_filenames = {'outputTract': 'outputTract.vtk'} + _outputs_filenames = {"outputTract": "outputTract.vtk"} _redirect_x = False class extractNrrdVectorIndexInputSpec(CommandLineInputSpec): inputVolume = File( - desc= - "Required: input file containing the vector that will be extracted", + desc="Required: input file containing the vector that will be extracted", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) vectorIndex = traits.Int( - desc="Index in the vector image to extract", argstr="--vectorIndex %d") + desc="Index in the vector image to extract", argstr="--vectorIndex %d" + ) setImageOrientation = traits.Enum( "AsAcquired", "Axial", "Coronal", "Sagittal", - desc= - "Sets the image orientation of the extracted vector (Axial, Coronal, Sagittal)", - argstr="--setImageOrientation %s") + desc="Sets the image orientation of the extracted vector (Axial, Coronal, Sagittal)", + argstr="--setImageOrientation %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD file containing the vector image at the given index", - argstr="--outputVolume %s") + desc="Required: name of output NRRD file containing the vector image at the given index", + argstr="--outputVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class extractNrrdVectorIndexOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD file containing the vector image at the given index", - exists=True) + desc="Required: name of output NRRD file containing the vector image at the given index", + exists=True, + ) class extractNrrdVectorIndex(SEMLikeCommandLine): @@ -1555,7 +1642,7 @@ class extractNrrdVectorIndex(SEMLikeCommandLine): input_spec = extractNrrdVectorIndexInputSpec output_spec = extractNrrdVectorIndexOutputSpec _cmd = " extractNrrdVectorIndex " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False @@ -1563,35 +1650,40 @@ class gtractResampleFibersInputSpec(CommandLineInputSpec): inputForwardDeformationFieldVolume = File( desc="Required: input forward deformation field image file name", exists=True, - argstr="--inputForwardDeformationFieldVolume %s") + argstr="--inputForwardDeformationFieldVolume %s", + ) inputReverseDeformationFieldVolume = File( desc="Required: input reverse deformation field image file name", exists=True, - argstr="--inputReverseDeformationFieldVolume %s") + argstr="--inputReverseDeformationFieldVolume %s", + ) inputTract = File( desc="Required: name of input vtkPolydata file containing tract lines.", exists=True, - argstr="--inputTract %s") + argstr="--inputTract %s", + ) outputTract = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", - argstr="--outputTract %s") + desc="Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", + argstr="--outputTract %s", + ) writeXMLPolyDataFile = traits.Bool( desc="Flag to make use of the XML format for vtkPolyData fiber tracts.", - argstr="--writeXMLPolyDataFile ") + argstr="--writeXMLPolyDataFile ", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractResampleFibersOutputSpec(TraitedSpec): outputTract = File( - desc= - "Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", - exists=True) + desc="Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", + exists=True, + ) class gtractResampleFibers(SEMLikeCommandLine): @@ -1616,70 +1708,74 @@ class gtractResampleFibers(SEMLikeCommandLine): input_spec = gtractResampleFibersInputSpec output_spec = gtractResampleFibersOutputSpec _cmd = " gtractResampleFibers " - _outputs_filenames = {'outputTract': 'outputTract.vtk'} + _outputs_filenames = {"outputTract": "outputTract.vtk"} _redirect_x = False class gtractTensorInputSpec(CommandLineInputSpec): inputVolume = File( - desc= - "Required: input image 4D NRRD image. Must contain data based on at least 6 distinct diffusion directions. The inputVolume is allowed to have multiple b0 and gradient direction images. Averaging of the b0 image is done internally in this step. Prior averaging of the DWIs is not required.", + desc="Required: input image 4D NRRD image. Must contain data based on at least 6 distinct diffusion directions. The inputVolume is allowed to have multiple b0 and gradient direction images. Averaging of the b0 image is done internally in this step. Prior averaging of the DWIs is not required.", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: name of output NRRD file containing the Tensor vector image", - argstr="--outputVolume %s") + desc="Required: name of output NRRD file containing the Tensor vector image", + argstr="--outputVolume %s", + ) medianFilterSize = InputMultiPath( traits.Int, desc="Median filter radius in all 3 directions", sep=",", - argstr="--medianFilterSize %s") + argstr="--medianFilterSize %s", + ) maskProcessingMode = traits.Enum( "NOMASK", "ROIAUTO", "ROI", - desc= - "ROIAUTO: mask is implicitly defined using a otsu forground and hole filling algorithm. ROI: Uses the masks to define what parts of the image should be used for computing the transform. NOMASK: no mask used", - argstr="--maskProcessingMode %s") + desc="ROIAUTO: mask is implicitly defined using a otsu forground and hole filling algorithm. ROI: Uses the masks to define what parts of the image should be used for computing the transform. NOMASK: no mask used", + argstr="--maskProcessingMode %s", + ) maskVolume = File( desc="Mask Image, if maskProcessingMode is ROI", exists=True, - argstr="--maskVolume %s") + argstr="--maskVolume %s", + ) backgroundSuppressingThreshold = traits.Int( - desc= - "Image threshold to suppress background. This sets a threshold used on the b0 image to remove background voxels from processing. Typically, values of 100 and 500 work well for Siemens and GE DTI data, respectively. Check your data particularly in the globus pallidus to make sure the brain tissue is not being eliminated with this threshold.", - argstr="--backgroundSuppressingThreshold %d") + desc="Image threshold to suppress background. This sets a threshold used on the b0 image to remove background voxels from processing. Typically, values of 100 and 500 work well for Siemens and GE DTI data, respectively. Check your data particularly in the globus pallidus to make sure the brain tissue is not being eliminated with this threshold.", + argstr="--backgroundSuppressingThreshold %d", + ) resampleIsotropic = traits.Bool( - desc= - "Flag to resample to isotropic voxels. Enabling this feature is recommended if fiber tracking will be performed.", - argstr="--resampleIsotropic ") - size = traits.Float( - desc="Isotropic voxel size to resample to", argstr="--size %f") + desc="Flag to resample to isotropic voxels. Enabling this feature is recommended if fiber tracking will be performed.", + argstr="--resampleIsotropic ", + ) + size = traits.Float(desc="Isotropic voxel size to resample to", argstr="--size %f") b0Index = traits.Int( - desc="Index in input vector index to extract", argstr="--b0Index %d") + desc="Index in input vector index to extract", argstr="--b0Index %d" + ) applyMeasurementFrame = traits.Bool( desc="Flag to apply the measurement frame to the gradient directions", - argstr="--applyMeasurementFrame ") + argstr="--applyMeasurementFrame ", + ) ignoreIndex = InputMultiPath( traits.Int, - desc= - "Ignore diffusion gradient index. Used to remove specific gradient directions with artifacts.", + desc="Ignore diffusion gradient index. Used to remove specific gradient directions with artifacts.", sep=",", - argstr="--ignoreIndex %s") + argstr="--ignoreIndex %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class gtractTensorOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: name of output NRRD file containing the Tensor vector image", - exists=True) + desc="Required: name of output NRRD file containing the Tensor vector image", + exists=True, + ) class gtractTensor(SEMLikeCommandLine): @@ -1704,5 +1800,5 @@ class gtractTensor(SEMLikeCommandLine): input_spec = gtractTensorInputSpec output_spec = gtractTensorOutputSpec _cmd = " gtractTensor " - _outputs_filenames = {'outputVolume': 'outputVolume.nrrd'} + _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False diff --git a/nipype/interfaces/semtools/diffusion/maxcurvature.py b/nipype/interfaces/semtools/diffusion/maxcurvature.py index 570109eb1b..fdd5057097 100644 --- a/nipype/interfaces/semtools/diffusion/maxcurvature.py +++ b/nipype/interfaces/semtools/diffusion/maxcurvature.py @@ -5,19 +5,25 @@ import os -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class maxcurvatureInputSpec(CommandLineInputSpec): image = File(desc="FA Image", exists=True, argstr="--image %s") output = traits.Either( - traits.Bool, - File(), - hash_files=False, - desc="Output File", - argstr="--output %s") + traits.Bool, File(), hash_files=False, desc="Output File", argstr="--output %s" + ) sigma = traits.Float(desc="Scale of Gradients", argstr="--sigma %f") verbose = traits.Bool(desc="produce verbose output", argstr="--verbose ") @@ -52,5 +58,5 @@ class maxcurvature(SEMLikeCommandLine): input_spec = maxcurvatureInputSpec output_spec = maxcurvatureOutputSpec _cmd = " maxcurvature " - _outputs_filenames = {'output': 'output.nii'} + _outputs_filenames = {"output": "output.nii"} _redirect_x = False diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_DWIConvert.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_DWIConvert.py index 3b1578cc42..d1f8c33324 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_DWIConvert.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_DWIConvert.py @@ -4,72 +4,41 @@ def test_DWIConvert_inputs(): input_map = dict( - allowLossyConversion=dict(argstr='--allowLossyConversion ', ), - args=dict(argstr='%s', ), - conversionMode=dict(argstr='--conversionMode %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fMRI=dict(argstr='--fMRI ', ), - fslNIFTIFile=dict( - argstr='--fslNIFTIFile %s', - extensions=None, - ), - gradientVectorFile=dict( - argstr='--gradientVectorFile %s', - hash_files=False, - ), - inputBValues=dict( - argstr='--inputBValues %s', - extensions=None, - ), - inputBVectors=dict( - argstr='--inputBVectors %s', - extensions=None, - ), - inputDicomDirectory=dict(argstr='--inputDicomDirectory %s', ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - outputBValues=dict( - argstr='--outputBValues %s', - hash_files=False, - ), - outputBVectors=dict( - argstr='--outputBVectors %s', - hash_files=False, - ), - outputDirectory=dict( - argstr='--outputDirectory %s', - hash_files=False, - ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - smallGradientThreshold=dict(argstr='--smallGradientThreshold %f', ), - transposeInputBVectors=dict(argstr='--transposeInputBVectors ', ), - useBMatrixGradientDirections=dict( - argstr='--useBMatrixGradientDirections ', ), - useIdentityMeaseurementFrame=dict( - argstr='--useIdentityMeaseurementFrame ', ), - writeProtocolGradientsFile=dict( - argstr='--writeProtocolGradientsFile ', ), + allowLossyConversion=dict(argstr="--allowLossyConversion ",), + args=dict(argstr="%s",), + conversionMode=dict(argstr="--conversionMode %s",), + environ=dict(nohash=True, usedefault=True,), + fMRI=dict(argstr="--fMRI ",), + fslNIFTIFile=dict(argstr="--fslNIFTIFile %s", extensions=None,), + gradientVectorFile=dict(argstr="--gradientVectorFile %s", hash_files=False,), + inputBValues=dict(argstr="--inputBValues %s", extensions=None,), + inputBVectors=dict(argstr="--inputBVectors %s", extensions=None,), + inputDicomDirectory=dict(argstr="--inputDicomDirectory %s",), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + outputBValues=dict(argstr="--outputBValues %s", hash_files=False,), + outputBVectors=dict(argstr="--outputBVectors %s", hash_files=False,), + outputDirectory=dict(argstr="--outputDirectory %s", hash_files=False,), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + smallGradientThreshold=dict(argstr="--smallGradientThreshold %f",), + transposeInputBVectors=dict(argstr="--transposeInputBVectors ",), + useBMatrixGradientDirections=dict(argstr="--useBMatrixGradientDirections ",), + useIdentityMeaseurementFrame=dict(argstr="--useIdentityMeaseurementFrame ",), + writeProtocolGradientsFile=dict(argstr="--writeProtocolGradientsFile ",), ) inputs = DWIConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWIConvert_outputs(): output_map = dict( - gradientVectorFile=dict(extensions=None, ), - outputBValues=dict(extensions=None, ), - outputBVectors=dict(extensions=None, ), + gradientVectorFile=dict(extensions=None,), + outputBValues=dict(extensions=None,), + outputBVectors=dict(extensions=None,), outputDirectory=dict(), - outputVolume=dict(extensions=None, ), + outputVolume=dict(extensions=None,), ) outputs = DWIConvert.output_spec() diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_compareTractInclusion.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_compareTractInclusion.py index 601c33628b..ed184ae4f2 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_compareTractInclusion.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_compareTractInclusion.py @@ -4,31 +4,24 @@ def test_compareTractInclusion_inputs(): input_map = dict( - args=dict(argstr='%s', ), - closeness=dict(argstr='--closeness %f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - numberOfPoints=dict(argstr='--numberOfPoints %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - standardFiber=dict( - argstr='--standardFiber %s', - extensions=None, - ), - testFiber=dict( - argstr='--testFiber %s', - extensions=None, - ), - testForBijection=dict(argstr='--testForBijection ', ), - testForFiberCardinality=dict(argstr='--testForFiberCardinality ', ), - writeXMLPolyDataFile=dict(argstr='--writeXMLPolyDataFile ', ), + args=dict(argstr="%s",), + closeness=dict(argstr="--closeness %f",), + environ=dict(nohash=True, usedefault=True,), + numberOfPoints=dict(argstr="--numberOfPoints %d",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + standardFiber=dict(argstr="--standardFiber %s", extensions=None,), + testFiber=dict(argstr="--testFiber %s", extensions=None,), + testForBijection=dict(argstr="--testForBijection ",), + testForFiberCardinality=dict(argstr="--testForFiberCardinality ",), + writeXMLPolyDataFile=dict(argstr="--writeXMLPolyDataFile ",), ) inputs = compareTractInclusion.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_compareTractInclusion_outputs(): output_map = dict() outputs = compareTractInclusion.output_spec() diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiaverage.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiaverage.py index 6af27942ea..fe4e00032b 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiaverage.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiaverage.py @@ -4,26 +4,22 @@ def test_dtiaverage_inputs(): input_map = dict( - DTI_double=dict(argstr='--DTI_double ', ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputs=dict(argstr='--inputs %s...', ), - tensor_output=dict( - argstr='--tensor_output %s', - hash_files=False, - ), - verbose=dict(argstr='--verbose ', ), + DTI_double=dict(argstr="--DTI_double ",), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputs=dict(argstr="--inputs %s...",), + tensor_output=dict(argstr="--tensor_output %s", hash_files=False,), + verbose=dict(argstr="--verbose ",), ) inputs = dtiaverage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_dtiaverage_outputs(): - output_map = dict(tensor_output=dict(extensions=None, ), ) + output_map = dict(tensor_output=dict(extensions=None,),) outputs = dtiaverage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiestim.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiestim.py index afa519667b..c7586fc34d 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiestim.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiestim.py @@ -4,65 +4,40 @@ def test_dtiestim_inputs(): input_map = dict( - B0=dict( - argstr='--B0 %s', - hash_files=False, - ), - B0_mask_output=dict( - argstr='--B0_mask_output %s', - hash_files=False, - ), - DTI_double=dict(argstr='--DTI_double ', ), - args=dict(argstr='%s', ), - bad_region_mask=dict( - argstr='--bad_region_mask %s', - extensions=None, - ), - brain_mask=dict( - argstr='--brain_mask %s', - extensions=None, - ), - correction=dict(argstr='--correction %s', ), - defaultTensor=dict( - argstr='--defaultTensor %s', - sep=',', - ), - dwi_image=dict( - argstr='--dwi_image %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - idwi=dict( - argstr='--idwi %s', - hash_files=False, - ), - method=dict(argstr='--method %s', ), - shiftNeg=dict(argstr='--shiftNeg ', ), - shiftNegCoeff=dict(argstr='--shiftNegCoeff %f', ), - sigma=dict(argstr='--sigma %f', ), - step=dict(argstr='--step %f', ), - tensor_output=dict( - argstr='--tensor_output %s', - hash_files=False, - ), - threshold=dict(argstr='--threshold %d', ), - verbose=dict(argstr='--verbose ', ), - weight_iterations=dict(argstr='--weight_iterations %d', ), + B0=dict(argstr="--B0 %s", hash_files=False,), + B0_mask_output=dict(argstr="--B0_mask_output %s", hash_files=False,), + DTI_double=dict(argstr="--DTI_double ",), + args=dict(argstr="%s",), + bad_region_mask=dict(argstr="--bad_region_mask %s", extensions=None,), + brain_mask=dict(argstr="--brain_mask %s", extensions=None,), + correction=dict(argstr="--correction %s",), + defaultTensor=dict(argstr="--defaultTensor %s", sep=",",), + dwi_image=dict(argstr="--dwi_image %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + idwi=dict(argstr="--idwi %s", hash_files=False,), + method=dict(argstr="--method %s",), + shiftNeg=dict(argstr="--shiftNeg ",), + shiftNegCoeff=dict(argstr="--shiftNegCoeff %f",), + sigma=dict(argstr="--sigma %f",), + step=dict(argstr="--step %f",), + tensor_output=dict(argstr="--tensor_output %s", hash_files=False,), + threshold=dict(argstr="--threshold %d",), + verbose=dict(argstr="--verbose ",), + weight_iterations=dict(argstr="--weight_iterations %d",), ) inputs = dtiestim.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_dtiestim_outputs(): output_map = dict( - B0=dict(extensions=None, ), - B0_mask_output=dict(extensions=None, ), - idwi=dict(extensions=None, ), - tensor_output=dict(extensions=None, ), + B0=dict(extensions=None,), + B0_mask_output=dict(extensions=None,), + idwi=dict(extensions=None,), + tensor_output=dict(extensions=None,), ) outputs = dtiestim.output_spec() diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiprocess.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiprocess.py index 5095e7d469..01a53b18cc 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiprocess.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiprocess.py @@ -4,126 +4,68 @@ def test_dtiprocess_inputs(): input_map = dict( - DTI_double=dict(argstr='--DTI_double ', ), - RD_output=dict( - argstr='--RD_output %s', - hash_files=False, - ), - affineitk_file=dict( - argstr='--affineitk_file %s', - extensions=None, - ), - args=dict(argstr='%s', ), - color_fa_output=dict( - argstr='--color_fa_output %s', - hash_files=False, - ), - correction=dict(argstr='--correction %s', ), - deformation_output=dict( - argstr='--deformation_output %s', - hash_files=False, - ), - dof_file=dict( - argstr='--dof_file %s', - extensions=None, - ), - dti_image=dict( - argstr='--dti_image %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fa_gradient_output=dict( - argstr='--fa_gradient_output %s', - hash_files=False, - ), - fa_gradmag_output=dict( - argstr='--fa_gradmag_output %s', - hash_files=False, - ), - fa_output=dict( - argstr='--fa_output %s', - hash_files=False, - ), - forward=dict( - argstr='--forward %s', - extensions=None, - ), + DTI_double=dict(argstr="--DTI_double ",), + RD_output=dict(argstr="--RD_output %s", hash_files=False,), + affineitk_file=dict(argstr="--affineitk_file %s", extensions=None,), + args=dict(argstr="%s",), + color_fa_output=dict(argstr="--color_fa_output %s", hash_files=False,), + correction=dict(argstr="--correction %s",), + deformation_output=dict(argstr="--deformation_output %s", hash_files=False,), + dof_file=dict(argstr="--dof_file %s", extensions=None,), + dti_image=dict(argstr="--dti_image %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + fa_gradient_output=dict(argstr="--fa_gradient_output %s", hash_files=False,), + fa_gradmag_output=dict(argstr="--fa_gradmag_output %s", hash_files=False,), + fa_output=dict(argstr="--fa_output %s", hash_files=False,), + forward=dict(argstr="--forward %s", extensions=None,), frobenius_norm_output=dict( - argstr='--frobenius_norm_output %s', - hash_files=False, - ), - hField=dict(argstr='--hField ', ), - interpolation=dict(argstr='--interpolation %s', ), - lambda1_output=dict( - argstr='--lambda1_output %s', - hash_files=False, - ), - lambda2_output=dict( - argstr='--lambda2_output %s', - hash_files=False, - ), - lambda3_output=dict( - argstr='--lambda3_output %s', - hash_files=False, - ), - mask=dict( - argstr='--mask %s', - extensions=None, - ), - md_output=dict( - argstr='--md_output %s', - hash_files=False, - ), + argstr="--frobenius_norm_output %s", hash_files=False, + ), + hField=dict(argstr="--hField ",), + interpolation=dict(argstr="--interpolation %s",), + lambda1_output=dict(argstr="--lambda1_output %s", hash_files=False,), + lambda2_output=dict(argstr="--lambda2_output %s", hash_files=False,), + lambda3_output=dict(argstr="--lambda3_output %s", hash_files=False,), + mask=dict(argstr="--mask %s", extensions=None,), + md_output=dict(argstr="--md_output %s", hash_files=False,), negative_eigenvector_output=dict( - argstr='--negative_eigenvector_output %s', - hash_files=False, - ), - newdof_file=dict( - argstr='--newdof_file %s', - extensions=None, - ), - outmask=dict( - argstr='--outmask %s', - hash_files=False, + argstr="--negative_eigenvector_output %s", hash_files=False, ), + newdof_file=dict(argstr="--newdof_file %s", extensions=None,), + outmask=dict(argstr="--outmask %s", hash_files=False,), principal_eigenvector_output=dict( - argstr='--principal_eigenvector_output %s', - hash_files=False, - ), - reorientation=dict(argstr='--reorientation %s', ), - rot_output=dict( - argstr='--rot_output %s', - hash_files=False, + argstr="--principal_eigenvector_output %s", hash_files=False, ), - scalar_float=dict(argstr='--scalar_float ', ), - sigma=dict(argstr='--sigma %f', ), - verbose=dict(argstr='--verbose ', ), + reorientation=dict(argstr="--reorientation %s",), + rot_output=dict(argstr="--rot_output %s", hash_files=False,), + scalar_float=dict(argstr="--scalar_float ",), + sigma=dict(argstr="--sigma %f",), + verbose=dict(argstr="--verbose ",), ) inputs = dtiprocess.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_dtiprocess_outputs(): output_map = dict( - RD_output=dict(extensions=None, ), - color_fa_output=dict(extensions=None, ), - deformation_output=dict(extensions=None, ), - fa_gradient_output=dict(extensions=None, ), - fa_gradmag_output=dict(extensions=None, ), - fa_output=dict(extensions=None, ), - frobenius_norm_output=dict(extensions=None, ), - lambda1_output=dict(extensions=None, ), - lambda2_output=dict(extensions=None, ), - lambda3_output=dict(extensions=None, ), - md_output=dict(extensions=None, ), - negative_eigenvector_output=dict(extensions=None, ), - outmask=dict(extensions=None, ), - principal_eigenvector_output=dict(extensions=None, ), - rot_output=dict(extensions=None, ), + RD_output=dict(extensions=None,), + color_fa_output=dict(extensions=None,), + deformation_output=dict(extensions=None,), + fa_gradient_output=dict(extensions=None,), + fa_gradmag_output=dict(extensions=None,), + fa_output=dict(extensions=None,), + frobenius_norm_output=dict(extensions=None,), + lambda1_output=dict(extensions=None,), + lambda2_output=dict(extensions=None,), + lambda3_output=dict(extensions=None,), + md_output=dict(extensions=None,), + negative_eigenvector_output=dict(extensions=None,), + outmask=dict(extensions=None,), + principal_eigenvector_output=dict(extensions=None,), + rot_output=dict(extensions=None,), ) outputs = dtiprocess.output_spec() diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_extractNrrdVectorIndex.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_extractNrrdVectorIndex.py index f0ef5a1bbd..51ec99b1b8 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_extractNrrdVectorIndex.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_extractNrrdVectorIndex.py @@ -4,30 +4,23 @@ def test_extractNrrdVectorIndex_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - setImageOrientation=dict(argstr='--setImageOrientation %s', ), - vectorIndex=dict(argstr='--vectorIndex %d', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + setImageOrientation=dict(argstr="--setImageOrientation %s",), + vectorIndex=dict(argstr="--vectorIndex %d",), ) inputs = extractNrrdVectorIndex.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_extractNrrdVectorIndex_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = extractNrrdVectorIndex.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractAnisotropyMap.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractAnisotropyMap.py index 02e8e364ee..3af3c53648 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractAnisotropyMap.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractAnisotropyMap.py @@ -4,29 +4,22 @@ def test_gtractAnisotropyMap_inputs(): input_map = dict( - anisotropyType=dict(argstr='--anisotropyType %s', ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputTensorVolume=dict( - argstr='--inputTensorVolume %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), + anisotropyType=dict(argstr="--anisotropyType %s",), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputTensorVolume=dict(argstr="--inputTensorVolume %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), ) inputs = gtractAnisotropyMap.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractAnisotropyMap_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = gtractAnisotropyMap.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractAverageBvalues.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractAverageBvalues.py index 59f8fef00c..1155f11628 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractAverageBvalues.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractAverageBvalues.py @@ -4,30 +4,23 @@ def test_gtractAverageBvalues_inputs(): input_map = dict( - args=dict(argstr='%s', ), - averageB0only=dict(argstr='--averageB0only ', ), - directionsTolerance=dict(argstr='--directionsTolerance %f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), + args=dict(argstr="%s",), + averageB0only=dict(argstr="--averageB0only ",), + directionsTolerance=dict(argstr="--directionsTolerance %f",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), ) inputs = gtractAverageBvalues.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractAverageBvalues_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = gtractAverageBvalues.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractClipAnisotropy.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractClipAnisotropy.py index cf44a5361e..00fc963f69 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractClipAnisotropy.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractClipAnisotropy.py @@ -4,30 +4,23 @@ def test_gtractClipAnisotropy_inputs(): input_map = dict( - args=dict(argstr='%s', ), - clipFirstSlice=dict(argstr='--clipFirstSlice ', ), - clipLastSlice=dict(argstr='--clipLastSlice ', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), + args=dict(argstr="%s",), + clipFirstSlice=dict(argstr="--clipFirstSlice ",), + clipLastSlice=dict(argstr="--clipLastSlice ",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), ) inputs = gtractClipAnisotropy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractClipAnisotropy_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = gtractClipAnisotropy.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCoRegAnatomy.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCoRegAnatomy.py index ae2924540d..a5d2337c44 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCoRegAnatomy.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCoRegAnatomy.py @@ -4,58 +4,44 @@ def test_gtractCoRegAnatomy_inputs(): input_map = dict( - args=dict(argstr='%s', ), - borderSize=dict(argstr='--borderSize %d', ), - convergence=dict(argstr='--convergence %f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - gradientTolerance=dict(argstr='--gradientTolerance %f', ), - gridSize=dict( - argstr='--gridSize %s', - sep=',', - ), + args=dict(argstr="%s",), + borderSize=dict(argstr="--borderSize %d",), + convergence=dict(argstr="--convergence %f",), + environ=dict(nohash=True, usedefault=True,), + gradientTolerance=dict(argstr="--gradientTolerance %f",), + gridSize=dict(argstr="--gridSize %s", sep=",",), inputAnatomicalVolume=dict( - argstr='--inputAnatomicalVolume %s', - extensions=None, - ), - inputRigidTransform=dict( - argstr='--inputRigidTransform %s', - extensions=None, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, + argstr="--inputAnatomicalVolume %s", extensions=None, ), - maxBSplineDisplacement=dict(argstr='--maxBSplineDisplacement %f', ), - maximumStepSize=dict(argstr='--maximumStepSize %f', ), - minimumStepSize=dict(argstr='--minimumStepSize %f', ), - numberOfHistogramBins=dict(argstr='--numberOfHistogramBins %d', ), - numberOfIterations=dict(argstr='--numberOfIterations %d', ), - numberOfSamples=dict(argstr='--numberOfSamples %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputTransformName=dict( - argstr='--outputTransformName %s', - hash_files=False, - ), - relaxationFactor=dict(argstr='--relaxationFactor %f', ), - samplingPercentage=dict(argstr='--samplingPercentage %f', ), - spatialScale=dict(argstr='--spatialScale %d', ), - transformType=dict(argstr='--transformType %s', ), - translationScale=dict(argstr='--translationScale %f', ), - useCenterOfHeadAlign=dict(argstr='--useCenterOfHeadAlign ', ), - useGeometryAlign=dict(argstr='--useGeometryAlign ', ), - useMomentsAlign=dict(argstr='--useMomentsAlign ', ), - vectorIndex=dict(argstr='--vectorIndex %d', ), + inputRigidTransform=dict(argstr="--inputRigidTransform %s", extensions=None,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + maxBSplineDisplacement=dict(argstr="--maxBSplineDisplacement %f",), + maximumStepSize=dict(argstr="--maximumStepSize %f",), + minimumStepSize=dict(argstr="--minimumStepSize %f",), + numberOfHistogramBins=dict(argstr="--numberOfHistogramBins %d",), + numberOfIterations=dict(argstr="--numberOfIterations %d",), + numberOfSamples=dict(argstr="--numberOfSamples %d",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputTransformName=dict(argstr="--outputTransformName %s", hash_files=False,), + relaxationFactor=dict(argstr="--relaxationFactor %f",), + samplingPercentage=dict(argstr="--samplingPercentage %f",), + spatialScale=dict(argstr="--spatialScale %d",), + transformType=dict(argstr="--transformType %s",), + translationScale=dict(argstr="--translationScale %f",), + useCenterOfHeadAlign=dict(argstr="--useCenterOfHeadAlign ",), + useGeometryAlign=dict(argstr="--useGeometryAlign ",), + useMomentsAlign=dict(argstr="--useMomentsAlign ",), + vectorIndex=dict(argstr="--vectorIndex %d",), ) inputs = gtractCoRegAnatomy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractCoRegAnatomy_outputs(): - output_map = dict(outputTransformName=dict(extensions=None, ), ) + output_map = dict(outputTransformName=dict(extensions=None,),) outputs = gtractCoRegAnatomy.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractConcatDwi.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractConcatDwi.py index 3320da9206..8fd46f9ab6 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractConcatDwi.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractConcatDwi.py @@ -4,26 +4,22 @@ def test_gtractConcatDwi_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - ignoreOrigins=dict(argstr='--ignoreOrigins ', ), - inputVolume=dict(argstr='--inputVolume %s...', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + ignoreOrigins=dict(argstr="--ignoreOrigins ",), + inputVolume=dict(argstr="--inputVolume %s...",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), ) inputs = gtractConcatDwi.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractConcatDwi_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = gtractConcatDwi.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCopyImageOrientation.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCopyImageOrientation.py index e9ac5031cb..4ce50c9faa 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCopyImageOrientation.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCopyImageOrientation.py @@ -4,32 +4,22 @@ def test_gtractCopyImageOrientation_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputReferenceVolume=dict( - argstr='--inputReferenceVolume %s', - extensions=None, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputReferenceVolume=dict(argstr="--inputReferenceVolume %s", extensions=None,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), ) inputs = gtractCopyImageOrientation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractCopyImageOrientation_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = gtractCopyImageOrientation.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCoregBvalues.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCoregBvalues.py index 316c2ab507..639dc8cd69 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCoregBvalues.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCoregBvalues.py @@ -4,49 +4,35 @@ def test_gtractCoregBvalues_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debugLevel=dict(argstr='--debugLevel %d', ), - eddyCurrentCorrection=dict(argstr='--eddyCurrentCorrection ', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixedVolume=dict( - argstr='--fixedVolume %s', - extensions=None, - ), - fixedVolumeIndex=dict(argstr='--fixedVolumeIndex %d', ), - maximumStepSize=dict(argstr='--maximumStepSize %f', ), - minimumStepSize=dict(argstr='--minimumStepSize %f', ), - movingVolume=dict( - argstr='--movingVolume %s', - extensions=None, - ), - numberOfIterations=dict(argstr='--numberOfIterations %d', ), - numberOfSpatialSamples=dict(argstr='--numberOfSpatialSamples %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputTransform=dict( - argstr='--outputTransform %s', - hash_files=False, - ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - registerB0Only=dict(argstr='--registerB0Only ', ), - relaxationFactor=dict(argstr='--relaxationFactor %f', ), - samplingPercentage=dict(argstr='--samplingPercentage %f', ), - spatialScale=dict(argstr='--spatialScale %f', ), + args=dict(argstr="%s",), + debugLevel=dict(argstr="--debugLevel %d",), + eddyCurrentCorrection=dict(argstr="--eddyCurrentCorrection ",), + environ=dict(nohash=True, usedefault=True,), + fixedVolume=dict(argstr="--fixedVolume %s", extensions=None,), + fixedVolumeIndex=dict(argstr="--fixedVolumeIndex %d",), + maximumStepSize=dict(argstr="--maximumStepSize %f",), + minimumStepSize=dict(argstr="--minimumStepSize %f",), + movingVolume=dict(argstr="--movingVolume %s", extensions=None,), + numberOfIterations=dict(argstr="--numberOfIterations %d",), + numberOfSpatialSamples=dict(argstr="--numberOfSpatialSamples %d",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputTransform=dict(argstr="--outputTransform %s", hash_files=False,), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + registerB0Only=dict(argstr="--registerB0Only ",), + relaxationFactor=dict(argstr="--relaxationFactor %f",), + samplingPercentage=dict(argstr="--samplingPercentage %f",), + spatialScale=dict(argstr="--spatialScale %f",), ) inputs = gtractCoregBvalues.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractCoregBvalues_outputs(): output_map = dict( - outputTransform=dict(extensions=None, ), - outputVolume=dict(extensions=None, ), + outputTransform=dict(extensions=None,), outputVolume=dict(extensions=None,), ) outputs = gtractCoregBvalues.output_spec() diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCostFastMarching.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCostFastMarching.py index 08e262b306..cd5d34952d 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCostFastMarching.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCostFastMarching.py @@ -4,46 +4,34 @@ def test_gtractCostFastMarching_inputs(): input_map = dict( - anisotropyWeight=dict(argstr='--anisotropyWeight %f', ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + anisotropyWeight=dict(argstr="--anisotropyWeight %f",), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), inputAnisotropyVolume=dict( - argstr='--inputAnisotropyVolume %s', - extensions=None, + argstr="--inputAnisotropyVolume %s", extensions=None, ), inputStartingSeedsLabelMapVolume=dict( - argstr='--inputStartingSeedsLabelMapVolume %s', - extensions=None, - ), - inputTensorVolume=dict( - argstr='--inputTensorVolume %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputCostVolume=dict( - argstr='--outputCostVolume %s', - hash_files=False, + argstr="--inputStartingSeedsLabelMapVolume %s", extensions=None, ), - outputSpeedVolume=dict( - argstr='--outputSpeedVolume %s', - hash_files=False, - ), - seedThreshold=dict(argstr='--seedThreshold %f', ), - startingSeedsLabel=dict(argstr='--startingSeedsLabel %d', ), - stoppingValue=dict(argstr='--stoppingValue %f', ), + inputTensorVolume=dict(argstr="--inputTensorVolume %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputCostVolume=dict(argstr="--outputCostVolume %s", hash_files=False,), + outputSpeedVolume=dict(argstr="--outputSpeedVolume %s", hash_files=False,), + seedThreshold=dict(argstr="--seedThreshold %f",), + startingSeedsLabel=dict(argstr="--startingSeedsLabel %d",), + stoppingValue=dict(argstr="--stoppingValue %f",), ) inputs = gtractCostFastMarching.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractCostFastMarching_outputs(): output_map = dict( - outputCostVolume=dict(extensions=None, ), - outputSpeedVolume=dict(extensions=None, ), + outputCostVolume=dict(extensions=None,), + outputSpeedVolume=dict(extensions=None,), ) outputs = gtractCostFastMarching.output_spec() diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCreateGuideFiber.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCreateGuideFiber.py index 4129974e29..6b2b0a31e5 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCreateGuideFiber.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCreateGuideFiber.py @@ -4,30 +4,23 @@ def test_gtractCreateGuideFiber_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputFiber=dict( - argstr='--inputFiber %s', - extensions=None, - ), - numberOfPoints=dict(argstr='--numberOfPoints %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputFiber=dict( - argstr='--outputFiber %s', - hash_files=False, - ), - writeXMLPolyDataFile=dict(argstr='--writeXMLPolyDataFile ', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputFiber=dict(argstr="--inputFiber %s", extensions=None,), + numberOfPoints=dict(argstr="--numberOfPoints %d",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputFiber=dict(argstr="--outputFiber %s", hash_files=False,), + writeXMLPolyDataFile=dict(argstr="--writeXMLPolyDataFile ",), ) inputs = gtractCreateGuideFiber.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractCreateGuideFiber_outputs(): - output_map = dict(outputFiber=dict(extensions=None, ), ) + output_map = dict(outputFiber=dict(extensions=None,),) outputs = gtractCreateGuideFiber.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractFastMarchingTracking.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractFastMarchingTracking.py index 1b71e0c953..15ee3053f0 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractFastMarchingTracking.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractFastMarchingTracking.py @@ -4,48 +4,36 @@ def test_gtractFastMarchingTracking_inputs(): input_map = dict( - args=dict(argstr='%s', ), - costStepSize=dict(argstr='--costStepSize %f', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + costStepSize=dict(argstr="--costStepSize %f",), + environ=dict(nohash=True, usedefault=True,), inputAnisotropyVolume=dict( - argstr='--inputAnisotropyVolume %s', - extensions=None, - ), - inputCostVolume=dict( - argstr='--inputCostVolume %s', - extensions=None, + argstr="--inputAnisotropyVolume %s", extensions=None, ), + inputCostVolume=dict(argstr="--inputCostVolume %s", extensions=None,), inputStartingSeedsLabelMapVolume=dict( - argstr='--inputStartingSeedsLabelMapVolume %s', - extensions=None, - ), - inputTensorVolume=dict( - argstr='--inputTensorVolume %s', - extensions=None, + argstr="--inputStartingSeedsLabelMapVolume %s", extensions=None, ), - maximumStepSize=dict(argstr='--maximumStepSize %f', ), - minimumStepSize=dict(argstr='--minimumStepSize %f', ), - numberOfIterations=dict(argstr='--numberOfIterations %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputTract=dict( - argstr='--outputTract %s', - hash_files=False, - ), - seedThreshold=dict(argstr='--seedThreshold %f', ), - startingSeedsLabel=dict(argstr='--startingSeedsLabel %d', ), - trackingThreshold=dict(argstr='--trackingThreshold %f', ), - writeXMLPolyDataFile=dict(argstr='--writeXMLPolyDataFile ', ), + inputTensorVolume=dict(argstr="--inputTensorVolume %s", extensions=None,), + maximumStepSize=dict(argstr="--maximumStepSize %f",), + minimumStepSize=dict(argstr="--minimumStepSize %f",), + numberOfIterations=dict(argstr="--numberOfIterations %d",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputTract=dict(argstr="--outputTract %s", hash_files=False,), + seedThreshold=dict(argstr="--seedThreshold %f",), + startingSeedsLabel=dict(argstr="--startingSeedsLabel %d",), + trackingThreshold=dict(argstr="--trackingThreshold %f",), + writeXMLPolyDataFile=dict(argstr="--writeXMLPolyDataFile ",), ) inputs = gtractFastMarchingTracking.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractFastMarchingTracking_outputs(): - output_map = dict(outputTract=dict(extensions=None, ), ) + output_map = dict(outputTract=dict(extensions=None,),) outputs = gtractFastMarchingTracking.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractFiberTracking.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractFiberTracking.py index 2f68a54610..510c00013a 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractFiberTracking.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractFiberTracking.py @@ -4,66 +4,52 @@ def test_gtractFiberTracking_inputs(): input_map = dict( - args=dict(argstr='%s', ), - branchingAngle=dict(argstr='--branchingAngle %f', ), - branchingThreshold=dict(argstr='--branchingThreshold %f', ), - curvatureThreshold=dict(argstr='--curvatureThreshold %f', ), - endingSeedsLabel=dict(argstr='--endingSeedsLabel %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - guidedCurvatureThreshold=dict( - argstr='--guidedCurvatureThreshold %f', ), + args=dict(argstr="%s",), + branchingAngle=dict(argstr="--branchingAngle %f",), + branchingThreshold=dict(argstr="--branchingThreshold %f",), + curvatureThreshold=dict(argstr="--curvatureThreshold %f",), + endingSeedsLabel=dict(argstr="--endingSeedsLabel %d",), + environ=dict(nohash=True, usedefault=True,), + guidedCurvatureThreshold=dict(argstr="--guidedCurvatureThreshold %f",), inputAnisotropyVolume=dict( - argstr='--inputAnisotropyVolume %s', - extensions=None, + argstr="--inputAnisotropyVolume %s", extensions=None, ), inputEndingSeedsLabelMapVolume=dict( - argstr='--inputEndingSeedsLabelMapVolume %s', - extensions=None, + argstr="--inputEndingSeedsLabelMapVolume %s", extensions=None, ), inputStartingSeedsLabelMapVolume=dict( - argstr='--inputStartingSeedsLabelMapVolume %s', - extensions=None, - ), - inputTensorVolume=dict( - argstr='--inputTensorVolume %s', - extensions=None, - ), - inputTract=dict( - argstr='--inputTract %s', - extensions=None, + argstr="--inputStartingSeedsLabelMapVolume %s", extensions=None, ), - maximumBranchPoints=dict(argstr='--maximumBranchPoints %d', ), - maximumGuideDistance=dict(argstr='--maximumGuideDistance %f', ), - maximumLength=dict(argstr='--maximumLength %f', ), - minimumLength=dict(argstr='--minimumLength %f', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputTract=dict( - argstr='--outputTract %s', - hash_files=False, - ), - randomSeed=dict(argstr='--randomSeed %d', ), - seedThreshold=dict(argstr='--seedThreshold %f', ), - startingSeedsLabel=dict(argstr='--startingSeedsLabel %d', ), - stepSize=dict(argstr='--stepSize %f', ), - tendF=dict(argstr='--tendF %f', ), - tendG=dict(argstr='--tendG %f', ), - trackingMethod=dict(argstr='--trackingMethod %s', ), - trackingThreshold=dict(argstr='--trackingThreshold %f', ), - useLoopDetection=dict(argstr='--useLoopDetection ', ), - useRandomWalk=dict(argstr='--useRandomWalk ', ), - useTend=dict(argstr='--useTend ', ), - writeXMLPolyDataFile=dict(argstr='--writeXMLPolyDataFile ', ), + inputTensorVolume=dict(argstr="--inputTensorVolume %s", extensions=None,), + inputTract=dict(argstr="--inputTract %s", extensions=None,), + maximumBranchPoints=dict(argstr="--maximumBranchPoints %d",), + maximumGuideDistance=dict(argstr="--maximumGuideDistance %f",), + maximumLength=dict(argstr="--maximumLength %f",), + minimumLength=dict(argstr="--minimumLength %f",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputTract=dict(argstr="--outputTract %s", hash_files=False,), + randomSeed=dict(argstr="--randomSeed %d",), + seedThreshold=dict(argstr="--seedThreshold %f",), + startingSeedsLabel=dict(argstr="--startingSeedsLabel %d",), + stepSize=dict(argstr="--stepSize %f",), + tendF=dict(argstr="--tendF %f",), + tendG=dict(argstr="--tendG %f",), + trackingMethod=dict(argstr="--trackingMethod %s",), + trackingThreshold=dict(argstr="--trackingThreshold %f",), + useLoopDetection=dict(argstr="--useLoopDetection ",), + useRandomWalk=dict(argstr="--useRandomWalk ",), + useTend=dict(argstr="--useTend ",), + writeXMLPolyDataFile=dict(argstr="--writeXMLPolyDataFile ",), ) inputs = gtractFiberTracking.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractFiberTracking_outputs(): - output_map = dict(outputTract=dict(extensions=None, ), ) + output_map = dict(outputTract=dict(extensions=None,),) outputs = gtractFiberTracking.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractImageConformity.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractImageConformity.py index 37157c27b6..b382e97133 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractImageConformity.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractImageConformity.py @@ -4,32 +4,22 @@ def test_gtractImageConformity_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputReferenceVolume=dict( - argstr='--inputReferenceVolume %s', - extensions=None, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputReferenceVolume=dict(argstr="--inputReferenceVolume %s", extensions=None,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), ) inputs = gtractImageConformity.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractImageConformity_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = gtractImageConformity.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertBSplineTransform.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertBSplineTransform.py index e15985bbf6..9affd39654 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertBSplineTransform.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertBSplineTransform.py @@ -4,36 +4,23 @@ def test_gtractInvertBSplineTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputReferenceVolume=dict( - argstr='--inputReferenceVolume %s', - extensions=None, - ), - inputTransform=dict( - argstr='--inputTransform %s', - extensions=None, - ), - landmarkDensity=dict( - argstr='--landmarkDensity %s', - sep=',', - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputTransform=dict( - argstr='--outputTransform %s', - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputReferenceVolume=dict(argstr="--inputReferenceVolume %s", extensions=None,), + inputTransform=dict(argstr="--inputTransform %s", extensions=None,), + landmarkDensity=dict(argstr="--landmarkDensity %s", sep=",",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputTransform=dict(argstr="--outputTransform %s", hash_files=False,), ) inputs = gtractInvertBSplineTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractInvertBSplineTransform_outputs(): - output_map = dict(outputTransform=dict(extensions=None, ), ) + output_map = dict(outputTransform=dict(extensions=None,),) outputs = gtractInvertBSplineTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertDisplacementField.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertDisplacementField.py index 97b2038c06..2ec1e53e42 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertDisplacementField.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertDisplacementField.py @@ -4,33 +4,23 @@ def test_gtractInvertDisplacementField_inputs(): input_map = dict( - args=dict(argstr='%s', ), - baseImage=dict( - argstr='--baseImage %s', - extensions=None, - ), - deformationImage=dict( - argstr='--deformationImage %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - subsamplingFactor=dict(argstr='--subsamplingFactor %d', ), + args=dict(argstr="%s",), + baseImage=dict(argstr="--baseImage %s", extensions=None,), + deformationImage=dict(argstr="--deformationImage %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + subsamplingFactor=dict(argstr="--subsamplingFactor %d",), ) inputs = gtractInvertDisplacementField.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractInvertDisplacementField_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = gtractInvertDisplacementField.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertRigidTransform.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertRigidTransform.py index 60d92c44ac..bbf3b5b260 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertRigidTransform.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertRigidTransform.py @@ -4,28 +4,21 @@ def test_gtractInvertRigidTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputTransform=dict( - argstr='--inputTransform %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputTransform=dict( - argstr='--outputTransform %s', - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputTransform=dict(argstr="--inputTransform %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputTransform=dict(argstr="--outputTransform %s", hash_files=False,), ) inputs = gtractInvertRigidTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractInvertRigidTransform_outputs(): - output_map = dict(outputTransform=dict(extensions=None, ), ) + output_map = dict(outputTransform=dict(extensions=None,),) outputs = gtractInvertRigidTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleAnisotropy.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleAnisotropy.py index dc2916a5fe..dd30cda525 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleAnisotropy.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleAnisotropy.py @@ -4,37 +4,28 @@ def test_gtractResampleAnisotropy_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), inputAnatomicalVolume=dict( - argstr='--inputAnatomicalVolume %s', - extensions=None, + argstr="--inputAnatomicalVolume %s", extensions=None, ), inputAnisotropyVolume=dict( - argstr='--inputAnisotropyVolume %s', - extensions=None, - ), - inputTransform=dict( - argstr='--inputTransform %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, + argstr="--inputAnisotropyVolume %s", extensions=None, ), - transformType=dict(argstr='--transformType %s', ), + inputTransform=dict(argstr="--inputTransform %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + transformType=dict(argstr="--transformType %s",), ) inputs = gtractResampleAnisotropy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractResampleAnisotropy_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = gtractResampleAnisotropy.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleB0.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleB0.py index ec80dd6268..e512fed7b5 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleB0.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleB0.py @@ -4,38 +4,27 @@ def test_gtractResampleB0_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), inputAnatomicalVolume=dict( - argstr='--inputAnatomicalVolume %s', - extensions=None, - ), - inputTransform=dict( - argstr='--inputTransform %s', - extensions=None, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, + argstr="--inputAnatomicalVolume %s", extensions=None, ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - transformType=dict(argstr='--transformType %s', ), - vectorIndex=dict(argstr='--vectorIndex %d', ), + inputTransform=dict(argstr="--inputTransform %s", extensions=None,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + transformType=dict(argstr="--transformType %s",), + vectorIndex=dict(argstr="--vectorIndex %d",), ) inputs = gtractResampleB0.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractResampleB0_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = gtractResampleB0.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleCodeImage.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleCodeImage.py index 01c8e024db..4cc5c30e4f 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleCodeImage.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleCodeImage.py @@ -4,37 +4,24 @@ def test_gtractResampleCodeImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputCodeVolume=dict( - argstr='--inputCodeVolume %s', - extensions=None, - ), - inputReferenceVolume=dict( - argstr='--inputReferenceVolume %s', - extensions=None, - ), - inputTransform=dict( - argstr='--inputTransform %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - transformType=dict(argstr='--transformType %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputCodeVolume=dict(argstr="--inputCodeVolume %s", extensions=None,), + inputReferenceVolume=dict(argstr="--inputReferenceVolume %s", extensions=None,), + inputTransform=dict(argstr="--inputTransform %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + transformType=dict(argstr="--transformType %s",), ) inputs = gtractResampleCodeImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractResampleCodeImage_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = gtractResampleCodeImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleDWIInPlace.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleDWIInPlace.py index e1ce459566..f87aa364cc 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleDWIInPlace.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleDWIInPlace.py @@ -4,51 +4,28 @@ def test_gtractResampleDWIInPlace_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debugLevel=dict(argstr='--debugLevel %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - imageOutputSize=dict( - argstr='--imageOutputSize %s', - sep=',', - ), - inputTransform=dict( - argstr='--inputTransform %s', - extensions=None, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputResampledB0=dict( - argstr='--outputResampledB0 %s', - hash_files=False, - ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - referenceVolume=dict( - argstr='--referenceVolume %s', - extensions=None, - ), - warpDWITransform=dict( - argstr='--warpDWITransform %s', - extensions=None, - ), + args=dict(argstr="%s",), + debugLevel=dict(argstr="--debugLevel %d",), + environ=dict(nohash=True, usedefault=True,), + imageOutputSize=dict(argstr="--imageOutputSize %s", sep=",",), + inputTransform=dict(argstr="--inputTransform %s", extensions=None,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputResampledB0=dict(argstr="--outputResampledB0 %s", hash_files=False,), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + referenceVolume=dict(argstr="--referenceVolume %s", extensions=None,), + warpDWITransform=dict(argstr="--warpDWITransform %s", extensions=None,), ) inputs = gtractResampleDWIInPlace.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractResampleDWIInPlace_outputs(): output_map = dict( - outputResampledB0=dict(extensions=None, ), - outputVolume=dict(extensions=None, ), + outputResampledB0=dict(extensions=None,), outputVolume=dict(extensions=None,), ) outputs = gtractResampleDWIInPlace.output_spec() diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleFibers.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleFibers.py index c957857860..3c7a6b33b8 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleFibers.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleFibers.py @@ -4,37 +4,28 @@ def test_gtractResampleFibers_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), inputForwardDeformationFieldVolume=dict( - argstr='--inputForwardDeformationFieldVolume %s', - extensions=None, + argstr="--inputForwardDeformationFieldVolume %s", extensions=None, ), inputReverseDeformationFieldVolume=dict( - argstr='--inputReverseDeformationFieldVolume %s', - extensions=None, - ), - inputTract=dict( - argstr='--inputTract %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputTract=dict( - argstr='--outputTract %s', - hash_files=False, + argstr="--inputReverseDeformationFieldVolume %s", extensions=None, ), - writeXMLPolyDataFile=dict(argstr='--writeXMLPolyDataFile ', ), + inputTract=dict(argstr="--inputTract %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputTract=dict(argstr="--outputTract %s", hash_files=False,), + writeXMLPolyDataFile=dict(argstr="--writeXMLPolyDataFile ",), ) inputs = gtractResampleFibers.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractResampleFibers_outputs(): - output_map = dict(outputTract=dict(extensions=None, ), ) + output_map = dict(outputTract=dict(extensions=None,),) outputs = gtractResampleFibers.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractTensor.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractTensor.py index be941353cd..2372b9599f 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractTensor.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractTensor.py @@ -4,47 +4,32 @@ def test_gtractTensor_inputs(): input_map = dict( - applyMeasurementFrame=dict(argstr='--applyMeasurementFrame ', ), - args=dict(argstr='%s', ), - b0Index=dict(argstr='--b0Index %d', ), + applyMeasurementFrame=dict(argstr="--applyMeasurementFrame ",), + args=dict(argstr="%s",), + b0Index=dict(argstr="--b0Index %d",), backgroundSuppressingThreshold=dict( - argstr='--backgroundSuppressingThreshold %d', ), - environ=dict( - nohash=True, - usedefault=True, + argstr="--backgroundSuppressingThreshold %d", ), - ignoreIndex=dict( - argstr='--ignoreIndex %s', - sep=',', - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - maskProcessingMode=dict(argstr='--maskProcessingMode %s', ), - maskVolume=dict( - argstr='--maskVolume %s', - extensions=None, - ), - medianFilterSize=dict( - argstr='--medianFilterSize %s', - sep=',', - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - resampleIsotropic=dict(argstr='--resampleIsotropic ', ), - size=dict(argstr='--size %f', ), + environ=dict(nohash=True, usedefault=True,), + ignoreIndex=dict(argstr="--ignoreIndex %s", sep=",",), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + maskProcessingMode=dict(argstr="--maskProcessingMode %s",), + maskVolume=dict(argstr="--maskVolume %s", extensions=None,), + medianFilterSize=dict(argstr="--medianFilterSize %s", sep=",",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + resampleIsotropic=dict(argstr="--resampleIsotropic ",), + size=dict(argstr="--size %f",), ) inputs = gtractTensor.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractTensor_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = gtractTensor.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractTransformToDisplacementField.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractTransformToDisplacementField.py index 9b965676db..3999871191 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractTransformToDisplacementField.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractTransformToDisplacementField.py @@ -4,23 +4,13 @@ def test_gtractTransformToDisplacementField_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputReferenceVolume=dict( - argstr='--inputReferenceVolume %s', - extensions=None, - ), - inputTransform=dict( - argstr='--inputTransform %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputReferenceVolume=dict(argstr="--inputReferenceVolume %s", extensions=None,), + inputTransform=dict(argstr="--inputTransform %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), outputDeformationFieldVolume=dict( - argstr='--outputDeformationFieldVolume %s', - hash_files=False, + argstr="--outputDeformationFieldVolume %s", hash_files=False, ), ) inputs = gtractTransformToDisplacementField.input_spec() @@ -28,8 +18,10 @@ def test_gtractTransformToDisplacementField_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_gtractTransformToDisplacementField_outputs(): - output_map = dict(outputDeformationFieldVolume=dict(extensions=None, ), ) + output_map = dict(outputDeformationFieldVolume=dict(extensions=None,),) outputs = gtractTransformToDisplacementField.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tests/test_auto_maxcurvature.py b/nipype/interfaces/semtools/diffusion/tests/test_auto_maxcurvature.py index 1cef41a6c7..9ec247675e 100644 --- a/nipype/interfaces/semtools/diffusion/tests/test_auto_maxcurvature.py +++ b/nipype/interfaces/semtools/diffusion/tests/test_auto_maxcurvature.py @@ -4,29 +4,22 @@ def test_maxcurvature_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - image=dict( - argstr='--image %s', - extensions=None, - ), - output=dict( - argstr='--output %s', - hash_files=False, - ), - sigma=dict(argstr='--sigma %f', ), - verbose=dict(argstr='--verbose ', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + image=dict(argstr="--image %s", extensions=None,), + output=dict(argstr="--output %s", hash_files=False,), + sigma=dict(argstr="--sigma %f",), + verbose=dict(argstr="--verbose ",), ) inputs = maxcurvature.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_maxcurvature_outputs(): - output_map = dict(output=dict(extensions=None, ), ) + output_map = dict(output=dict(extensions=None,),) outputs = maxcurvature.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tractography/commandlineonly.py b/nipype/interfaces/semtools/diffusion/tractography/commandlineonly.py index 19adc2a817..becf1466e9 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/commandlineonly.py +++ b/nipype/interfaces/semtools/diffusion/tractography/commandlineonly.py @@ -5,14 +5,22 @@ import os -from ....base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ....base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class fiberstatsInputSpec(CommandLineInputSpec): - fiber_file = File( - desc="DTI Fiber File", exists=True, argstr="--fiber_file %s") + fiber_file = File(desc="DTI Fiber File", exists=True, argstr="--fiber_file %s") verbose = traits.Bool(desc="produce verbose output", argstr="--verbose ") diff --git a/nipype/interfaces/semtools/diffusion/tractography/fiberprocess.py b/nipype/interfaces/semtools/diffusion/tractography/fiberprocess.py index c0e9dcbbaf..1798ead449 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/fiberprocess.py +++ b/nipype/interfaces/semtools/diffusion/tractography/fiberprocess.py @@ -5,76 +5,86 @@ import os -from ....base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ....base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class fiberprocessInputSpec(CommandLineInputSpec): - fiber_file = File( - desc="DTI fiber file", exists=True, argstr="--fiber_file %s") + fiber_file = File(desc="DTI fiber file", exists=True, argstr="--fiber_file %s") fiber_output = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Output fiber file. May be warped or updated with new data depending on other options used.", - argstr="--fiber_output %s") + desc="Output fiber file. May be warped or updated with new data depending on other options used.", + argstr="--fiber_output %s", + ) tensor_volume = File( desc="Interpolate tensor values from the given field", exists=True, - argstr="--tensor_volume %s") + argstr="--tensor_volume %s", + ) h_field = File( - desc= - "HField for warp and statistics lookup. If this option is used tensor-volume must also be specified.", + desc="HField for warp and statistics lookup. If this option is used tensor-volume must also be specified.", exists=True, - argstr="--h_field %s") + argstr="--h_field %s", + ) displacement_field = File( - desc= - "Displacement Field for warp and statistics lookup. If this option is used tensor-volume must also be specified.", + desc="Displacement Field for warp and statistics lookup. If this option is used tensor-volume must also be specified.", exists=True, - argstr="--displacement_field %s") + argstr="--displacement_field %s", + ) saveProperties = traits.Bool( - desc= - "save the tensor property as scalar data into the vtk (only works for vtk fiber files). ", - argstr="--saveProperties ") + desc="save the tensor property as scalar data into the vtk (only works for vtk fiber files). ", + argstr="--saveProperties ", + ) no_warp = traits.Bool( - desc= - "Do not warp the geometry of the tensors only obtain the new statistics.", - argstr="--no_warp ") + desc="Do not warp the geometry of the tensors only obtain the new statistics.", + argstr="--no_warp ", + ) fiber_radius = traits.Float( - desc="set radius of all fibers to this value", - argstr="--fiber_radius %f") + desc="set radius of all fibers to this value", argstr="--fiber_radius %f" + ) index_space = traits.Bool( - desc= - "Use index-space for fiber output coordinates, otherwise us world space for fiber output coordinates (from tensor file).", - argstr="--index_space ") + desc="Use index-space for fiber output coordinates, otherwise us world space for fiber output coordinates (from tensor file).", + argstr="--index_space ", + ) voxelize = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Voxelize fiber into a label map (the labelmap filename is the argument of -V). The tensor file must be specified using -T for information about the size, origin, spacing of the image. The deformation is applied before the voxelization ", - argstr="--voxelize %s") + desc="Voxelize fiber into a label map (the labelmap filename is the argument of -V). The tensor file must be specified using -T for information about the size, origin, spacing of the image. The deformation is applied before the voxelization ", + argstr="--voxelize %s", + ) voxelize_count_fibers = traits.Bool( desc="Count number of fibers per-voxel instead of just setting to 1", - argstr="--voxelize_count_fibers ") + argstr="--voxelize_count_fibers ", + ) voxel_label = traits.Int( - desc="Label for voxelized fiber", argstr="--voxel_label %d") + desc="Label for voxelized fiber", argstr="--voxel_label %d" + ) verbose = traits.Bool(desc="produce verbose output", argstr="--verbose ") - noDataChange = traits.Bool( - desc="Do not change data ??? ", argstr="--noDataChange ") + noDataChange = traits.Bool(desc="Do not change data ??? ", argstr="--noDataChange ") class fiberprocessOutputSpec(TraitedSpec): fiber_output = File( - desc= - "Output fiber file. May be warped or updated with new data depending on other options used.", - exists=True) + desc="Output fiber file. May be warped or updated with new data depending on other options used.", + exists=True, + ) voxelize = File( - desc= - "Voxelize fiber into a label map (the labelmap filename is the argument of -V). The tensor file must be specified using -T for information about the size, origin, spacing of the image. The deformation is applied before the voxelization ", - exists=True) + desc="Voxelize fiber into a label map (the labelmap filename is the argument of -V). The tensor file must be specified using -T for information about the size, origin, spacing of the image. The deformation is applied before the voxelization ", + exists=True, + ) class fiberprocess(SEMLikeCommandLine): @@ -104,7 +114,7 @@ class fiberprocess(SEMLikeCommandLine): output_spec = fiberprocessOutputSpec _cmd = " fiberprocess " _outputs_filenames = { - 'fiber_output': 'fiber_output.vtk', - 'voxelize': 'voxelize.nii' + "fiber_output": "fiber_output.vtk", + "voxelize": "voxelize.nii", } _redirect_x = False diff --git a/nipype/interfaces/semtools/diffusion/tractography/fibertrack.py b/nipype/interfaces/semtools/diffusion/tractography/fibertrack.py index 498cb2579d..c6eb7f13e0 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/fibertrack.py +++ b/nipype/interfaces/semtools/diffusion/tractography/fibertrack.py @@ -5,60 +5,70 @@ import os -from ....base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ....base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class fibertrackInputSpec(CommandLineInputSpec): input_tensor_file = File( - desc="Tensor Image", exists=True, argstr="--input_tensor_file %s") + desc="Tensor Image", exists=True, argstr="--input_tensor_file %s" + ) input_roi_file = File( - desc= - "The filename of the image which contains the labels used for seeding and constraining the algorithm.", + desc="The filename of the image which contains the labels used for seeding and constraining the algorithm.", exists=True, - argstr="--input_roi_file %s") + argstr="--input_roi_file %s", + ) output_fiber_file = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "The filename for the fiber file produced by the algorithm. This file must end in a .fib or .vtk extension for ITK spatial object and vtkPolyData formats respectively.", - argstr="--output_fiber_file %s") + desc="The filename for the fiber file produced by the algorithm. This file must end in a .fib or .vtk extension for ITK spatial object and vtkPolyData formats respectively.", + argstr="--output_fiber_file %s", + ) source_label = traits.Int( - desc= - "The label of voxels in the labelfile to use for seeding tractography. One tract is seeded from the center of each voxel with this label", - argstr="--source_label %d") + desc="The label of voxels in the labelfile to use for seeding tractography. One tract is seeded from the center of each voxel with this label", + argstr="--source_label %d", + ) target_label = traits.Int( - desc= - "The label of voxels in the labelfile used to constrain tractography. Tracts that do not pass through a voxel with this label are rejected. Set this keep all tracts.", - argstr="--target_label %d") - forbidden_label = traits.Int( - desc="Forbidden label", argstr="--forbidden_label %d") + desc="The label of voxels in the labelfile used to constrain tractography. Tracts that do not pass through a voxel with this label are rejected. Set this keep all tracts.", + argstr="--target_label %d", + ) + forbidden_label = traits.Int(desc="Forbidden label", argstr="--forbidden_label %d") whole_brain = traits.Bool( - desc= - "If this option is enabled all voxels in the image are used to seed tractography. When this option is enabled both source and target labels function as target labels", - argstr="--whole_brain ") + desc="If this option is enabled all voxels in the image are used to seed tractography. When this option is enabled both source and target labels function as target labels", + argstr="--whole_brain ", + ) max_angle = traits.Float( - desc="Maximum angle of change in radians", argstr="--max_angle %f") + desc="Maximum angle of change in radians", argstr="--max_angle %f" + ) step_size = traits.Float( - desc="Step size in mm for the tracking algorithm", - argstr="--step_size %f") + desc="Step size in mm for the tracking algorithm", argstr="--step_size %f" + ) min_fa = traits.Float( - desc="The minimum FA threshold to continue tractography", - argstr="--min_fa %f") + desc="The minimum FA threshold to continue tractography", argstr="--min_fa %f" + ) force = traits.Bool(desc="Ignore sanity checks.", argstr="--force ") verbose = traits.Bool(desc="produce verbose output", argstr="--verbose ") really_verbose = traits.Bool( - desc="Follow detail of fiber tracking algorithm", - argstr="--really_verbose ") + desc="Follow detail of fiber tracking algorithm", argstr="--really_verbose " + ) class fibertrackOutputSpec(TraitedSpec): output_fiber_file = File( - desc= - "The filename for the fiber file produced by the algorithm. This file must end in a .fib or .vtk extension for ITK spatial object and vtkPolyData formats respectively.", - exists=True) + desc="The filename for the fiber file produced by the algorithm. This file must end in a .fib or .vtk extension for ITK spatial object and vtkPolyData formats respectively.", + exists=True, + ) class fibertrack(SEMLikeCommandLine): @@ -90,5 +100,5 @@ class fibertrack(SEMLikeCommandLine): input_spec = fibertrackInputSpec output_spec = fibertrackOutputSpec _cmd = " fibertrack " - _outputs_filenames = {'output_fiber_file': 'output_fiber_file.vtk'} + _outputs_filenames = {"output_fiber_file": "output_fiber_file.vtk"} _redirect_x = False diff --git a/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_UKFTractography.py b/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_UKFTractography.py index 1a40f44cc3..a7e86c79b8 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_UKFTractography.py +++ b/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_UKFTractography.py @@ -4,72 +4,54 @@ def test_UKFTractography_inputs(): input_map = dict( - Ql=dict(argstr='--Ql %f', ), - Qm=dict(argstr='--Qm %f', ), - Qw=dict(argstr='--Qw %f', ), - Rs=dict(argstr='--Rs %f', ), - args=dict(argstr='%s', ), - dwiFile=dict( - argstr='--dwiFile %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - freeWater=dict(argstr='--freeWater ', ), - fullTensorModel=dict(argstr='--fullTensorModel ', ), - labels=dict( - argstr='--labels %s', - sep=',', - ), - maskFile=dict( - argstr='--maskFile %s', - extensions=None, - ), - maxBranchingAngle=dict(argstr='--maxBranchingAngle %f', ), - maxHalfFiberLength=dict(argstr='--maxHalfFiberLength %f', ), - minBranchingAngle=dict(argstr='--minBranchingAngle %f', ), - minFA=dict(argstr='--minFA %f', ), - minGA=dict(argstr='--minGA %f', ), - numTensor=dict(argstr='--numTensor %s', ), - numThreads=dict(argstr='--numThreads %d', ), - recordCovariance=dict(argstr='--recordCovariance ', ), - recordFA=dict(argstr='--recordFA ', ), - recordFreeWater=dict(argstr='--recordFreeWater ', ), - recordLength=dict(argstr='--recordLength %f', ), - recordNMSE=dict(argstr='--recordNMSE ', ), - recordState=dict(argstr='--recordState ', ), - recordTensors=dict(argstr='--recordTensors ', ), - recordTrace=dict(argstr='--recordTrace ', ), - seedFALimit=dict(argstr='--seedFALimit %f', ), - seedsFile=dict( - argstr='--seedsFile %s', - extensions=None, - ), - seedsPerVoxel=dict(argstr='--seedsPerVoxel %d', ), - stepLength=dict(argstr='--stepLength %f', ), - storeGlyphs=dict(argstr='--storeGlyphs ', ), - tracts=dict( - argstr='--tracts %s', - hash_files=False, - ), + Ql=dict(argstr="--Ql %f",), + Qm=dict(argstr="--Qm %f",), + Qw=dict(argstr="--Qw %f",), + Rs=dict(argstr="--Rs %f",), + args=dict(argstr="%s",), + dwiFile=dict(argstr="--dwiFile %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + freeWater=dict(argstr="--freeWater ",), + fullTensorModel=dict(argstr="--fullTensorModel ",), + labels=dict(argstr="--labels %s", sep=",",), + maskFile=dict(argstr="--maskFile %s", extensions=None,), + maxBranchingAngle=dict(argstr="--maxBranchingAngle %f",), + maxHalfFiberLength=dict(argstr="--maxHalfFiberLength %f",), + minBranchingAngle=dict(argstr="--minBranchingAngle %f",), + minFA=dict(argstr="--minFA %f",), + minGA=dict(argstr="--minGA %f",), + numTensor=dict(argstr="--numTensor %s",), + numThreads=dict(argstr="--numThreads %d",), + recordCovariance=dict(argstr="--recordCovariance ",), + recordFA=dict(argstr="--recordFA ",), + recordFreeWater=dict(argstr="--recordFreeWater ",), + recordLength=dict(argstr="--recordLength %f",), + recordNMSE=dict(argstr="--recordNMSE ",), + recordState=dict(argstr="--recordState ",), + recordTensors=dict(argstr="--recordTensors ",), + recordTrace=dict(argstr="--recordTrace ",), + seedFALimit=dict(argstr="--seedFALimit %f",), + seedsFile=dict(argstr="--seedsFile %s", extensions=None,), + seedsPerVoxel=dict(argstr="--seedsPerVoxel %d",), + stepLength=dict(argstr="--stepLength %f",), + storeGlyphs=dict(argstr="--storeGlyphs ",), + tracts=dict(argstr="--tracts %s", hash_files=False,), tractsWithSecondTensor=dict( - argstr='--tractsWithSecondTensor %s', - hash_files=False, + argstr="--tractsWithSecondTensor %s", hash_files=False, ), - writeAsciiTracts=dict(argstr='--writeAsciiTracts ', ), - writeUncompressedTracts=dict(argstr='--writeUncompressedTracts ', ), + writeAsciiTracts=dict(argstr="--writeAsciiTracts ",), + writeUncompressedTracts=dict(argstr="--writeUncompressedTracts ",), ) inputs = UKFTractography.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_UKFTractography_outputs(): output_map = dict( - tracts=dict(extensions=None, ), - tractsWithSecondTensor=dict(extensions=None, ), + tracts=dict(extensions=None,), tractsWithSecondTensor=dict(extensions=None,), ) outputs = UKFTractography.output_spec() diff --git a/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fiberprocess.py b/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fiberprocess.py index e34b12cffc..d25c1a10ca 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fiberprocess.py +++ b/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fiberprocess.py @@ -4,53 +4,33 @@ def test_fiberprocess_inputs(): input_map = dict( - args=dict(argstr='%s', ), - displacement_field=dict( - argstr='--displacement_field %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fiber_file=dict( - argstr='--fiber_file %s', - extensions=None, - ), - fiber_output=dict( - argstr='--fiber_output %s', - hash_files=False, - ), - fiber_radius=dict(argstr='--fiber_radius %f', ), - h_field=dict( - argstr='--h_field %s', - extensions=None, - ), - index_space=dict(argstr='--index_space ', ), - noDataChange=dict(argstr='--noDataChange ', ), - no_warp=dict(argstr='--no_warp ', ), - saveProperties=dict(argstr='--saveProperties ', ), - tensor_volume=dict( - argstr='--tensor_volume %s', - extensions=None, - ), - verbose=dict(argstr='--verbose ', ), - voxel_label=dict(argstr='--voxel_label %d', ), - voxelize=dict( - argstr='--voxelize %s', - hash_files=False, - ), - voxelize_count_fibers=dict(argstr='--voxelize_count_fibers ', ), + args=dict(argstr="%s",), + displacement_field=dict(argstr="--displacement_field %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + fiber_file=dict(argstr="--fiber_file %s", extensions=None,), + fiber_output=dict(argstr="--fiber_output %s", hash_files=False,), + fiber_radius=dict(argstr="--fiber_radius %f",), + h_field=dict(argstr="--h_field %s", extensions=None,), + index_space=dict(argstr="--index_space ",), + noDataChange=dict(argstr="--noDataChange ",), + no_warp=dict(argstr="--no_warp ",), + saveProperties=dict(argstr="--saveProperties ",), + tensor_volume=dict(argstr="--tensor_volume %s", extensions=None,), + verbose=dict(argstr="--verbose ",), + voxel_label=dict(argstr="--voxel_label %d",), + voxelize=dict(argstr="--voxelize %s", hash_files=False,), + voxelize_count_fibers=dict(argstr="--voxelize_count_fibers ",), ) inputs = fiberprocess.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_fiberprocess_outputs(): output_map = dict( - fiber_output=dict(extensions=None, ), - voxelize=dict(extensions=None, ), + fiber_output=dict(extensions=None,), voxelize=dict(extensions=None,), ) outputs = fiberprocess.output_spec() diff --git a/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fiberstats.py b/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fiberstats.py index c3f750e4cb..570ea316c7 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fiberstats.py +++ b/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fiberstats.py @@ -4,22 +4,18 @@ def test_fiberstats_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fiber_file=dict( - argstr='--fiber_file %s', - extensions=None, - ), - verbose=dict(argstr='--verbose ', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + fiber_file=dict(argstr="--fiber_file %s", extensions=None,), + verbose=dict(argstr="--verbose ",), ) inputs = fiberstats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_fiberstats_outputs(): output_map = dict() outputs = fiberstats.output_spec() diff --git a/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fibertrack.py b/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fibertrack.py index 1d241b8388..d3994690d1 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fibertrack.py +++ b/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fibertrack.py @@ -4,41 +4,31 @@ def test_fibertrack_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - forbidden_label=dict(argstr='--forbidden_label %d', ), - force=dict(argstr='--force ', ), - input_roi_file=dict( - argstr='--input_roi_file %s', - extensions=None, - ), - input_tensor_file=dict( - argstr='--input_tensor_file %s', - extensions=None, - ), - max_angle=dict(argstr='--max_angle %f', ), - min_fa=dict(argstr='--min_fa %f', ), - output_fiber_file=dict( - argstr='--output_fiber_file %s', - hash_files=False, - ), - really_verbose=dict(argstr='--really_verbose ', ), - source_label=dict(argstr='--source_label %d', ), - step_size=dict(argstr='--step_size %f', ), - target_label=dict(argstr='--target_label %d', ), - verbose=dict(argstr='--verbose ', ), - whole_brain=dict(argstr='--whole_brain ', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + forbidden_label=dict(argstr="--forbidden_label %d",), + force=dict(argstr="--force ",), + input_roi_file=dict(argstr="--input_roi_file %s", extensions=None,), + input_tensor_file=dict(argstr="--input_tensor_file %s", extensions=None,), + max_angle=dict(argstr="--max_angle %f",), + min_fa=dict(argstr="--min_fa %f",), + output_fiber_file=dict(argstr="--output_fiber_file %s", hash_files=False,), + really_verbose=dict(argstr="--really_verbose ",), + source_label=dict(argstr="--source_label %d",), + step_size=dict(argstr="--step_size %f",), + target_label=dict(argstr="--target_label %d",), + verbose=dict(argstr="--verbose ",), + whole_brain=dict(argstr="--whole_brain ",), ) inputs = fibertrack.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_fibertrack_outputs(): - output_map = dict(output_fiber_file=dict(extensions=None, ), ) + output_map = dict(output_fiber_file=dict(extensions=None,),) outputs = fibertrack.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/diffusion/tractography/ukftractography.py b/nipype/interfaces/semtools/diffusion/tractography/ukftractography.py index 11971dbb6d..228d162560 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/ukftractography.py +++ b/nipype/interfaces/semtools/diffusion/tractography/ukftractography.py @@ -5,139 +5,153 @@ import os -from ....base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ....base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class UKFTractographyInputSpec(CommandLineInputSpec): dwiFile = File(desc="Input DWI volume", exists=True, argstr="--dwiFile %s") seedsFile = File( - desc= - "Seeds for diffusion. If not specified, full brain tractography will be performed, and the algorithm will start from every voxel in the brain mask where the Generalized Anisotropy is bigger than 0.18", + desc="Seeds for diffusion. If not specified, full brain tractography will be performed, and the algorithm will start from every voxel in the brain mask where the Generalized Anisotropy is bigger than 0.18", exists=True, - argstr="--seedsFile %s") + argstr="--seedsFile %s", + ) labels = InputMultiPath( traits.Int, desc="A vector of the ROI labels to be used", sep=",", - argstr="--labels %s") + argstr="--labels %s", + ) maskFile = File( - desc="Mask for diffusion tractography", - exists=True, - argstr="--maskFile %s") + desc="Mask for diffusion tractography", exists=True, argstr="--maskFile %s" + ) tracts = traits.Either( traits.Bool, File(), hash_files=False, desc="Tracts generated, with first tensor output", - argstr="--tracts %s") + argstr="--tracts %s", + ) writeAsciiTracts = traits.Bool( - desc="Write tract file as a VTK binary data file", - argstr="--writeAsciiTracts ") + desc="Write tract file as a VTK binary data file", argstr="--writeAsciiTracts " + ) writeUncompressedTracts = traits.Bool( desc="Write tract file as a VTK uncompressed data file", - argstr="--writeUncompressedTracts ") + argstr="--writeUncompressedTracts ", + ) seedsPerVoxel = traits.Int( - desc= - " Each seed generates a fiber, thus using more seeds generates more fibers. In general use 1 or 2 seeds, and for a more thorough result use 5 or 10 (depending on your machine this may take up to 2 days to run)., ", - argstr="--seedsPerVoxel %d") + desc=" Each seed generates a fiber, thus using more seeds generates more fibers. In general use 1 or 2 seeds, and for a more thorough result use 5 or 10 (depending on your machine this may take up to 2 days to run)., ", + argstr="--seedsPerVoxel %d", + ) numTensor = traits.Enum( - "1", "2", desc="Number of tensors used", argstr="--numTensor %s") + "1", "2", desc="Number of tensors used", argstr="--numTensor %s" + ) freeWater = traits.Bool( - desc= - "Adds a term for free water difusion to the model. (Note for experts: if checked, the 1T simple model is forced) ", - argstr="--freeWater ") + desc="Adds a term for free water difusion to the model. (Note for experts: if checked, the 1T simple model is forced) ", + argstr="--freeWater ", + ) recordFA = traits.Bool( - desc= - "Whether to store FA. Attaches field 'FA', and 'FA2' for 2-tensor case to fiber. ", - argstr="--recordFA ") + desc="Whether to store FA. Attaches field 'FA', and 'FA2' for 2-tensor case to fiber. ", + argstr="--recordFA ", + ) recordFreeWater = traits.Bool( - desc= - "Whether to store the fraction of free water. Attaches field 'FreeWater' to fiber.", - argstr="--recordFreeWater ") + desc="Whether to store the fraction of free water. Attaches field 'FreeWater' to fiber.", + argstr="--recordFreeWater ", + ) recordTrace = traits.Bool( - desc= - "Whether to store Trace. Attaches field 'Trace', and 'Trace2' for 2-tensor case to fiber.", - argstr="--recordTrace ") + desc="Whether to store Trace. Attaches field 'Trace', and 'Trace2' for 2-tensor case to fiber.", + argstr="--recordTrace ", + ) recordTensors = traits.Bool( - desc= - "Recording the tensors enables Slicer to color the fiber bundles by FA, orientation, and so on. The fields will be called 'TensorN', where N is the tensor number. ", - argstr="--recordTensors ") + desc="Recording the tensors enables Slicer to color the fiber bundles by FA, orientation, and so on. The fields will be called 'TensorN', where N is the tensor number. ", + argstr="--recordTensors ", + ) recordNMSE = traits.Bool( desc="Whether to store NMSE. Attaches field 'NMSE' to fiber. ", - argstr="--recordNMSE ") + argstr="--recordNMSE ", + ) recordState = traits.Bool( - desc= - "Whether to attach the states to the fiber. Will generate field 'state'.", - argstr="--recordState ") + desc="Whether to attach the states to the fiber. Will generate field 'state'.", + argstr="--recordState ", + ) recordCovariance = traits.Bool( - desc= - "Whether to store the covariance. Will generate field 'covariance' in fiber.", - argstr="--recordCovariance ") + desc="Whether to store the covariance. Will generate field 'covariance' in fiber.", + argstr="--recordCovariance ", + ) recordLength = traits.Float( - desc="Record length of tractography, in millimeters", - argstr="--recordLength %f") + desc="Record length of tractography, in millimeters", argstr="--recordLength %f" + ) minFA = traits.Float( - desc= - "Abort the tractography when the Fractional Anisotropy is less than this value", - argstr="--minFA %f") + desc="Abort the tractography when the Fractional Anisotropy is less than this value", + argstr="--minFA %f", + ) minGA = traits.Float( - desc= - "Abort the tractography when the Generalized Anisotropy is less than this value", - argstr="--minGA %f") + desc="Abort the tractography when the Generalized Anisotropy is less than this value", + argstr="--minGA %f", + ) fullTensorModel = traits.Bool( - desc= - "Whether to use the full tensor model. If unchecked, use the default simple tensor model", - argstr="--fullTensorModel ") + desc="Whether to use the full tensor model. If unchecked, use the default simple tensor model", + argstr="--fullTensorModel ", + ) numThreads = traits.Int( - desc= - "Number of threads used during computation. Set to the number of cores on your workstation for optimal speed. If left undefined the number of cores detected will be used. ", - argstr="--numThreads %d") + desc="Number of threads used during computation. Set to the number of cores on your workstation for optimal speed. If left undefined the number of cores detected will be used. ", + argstr="--numThreads %d", + ) stepLength = traits.Float( - desc="Step length of tractography, in millimeters", - argstr="--stepLength %f") + desc="Step length of tractography, in millimeters", argstr="--stepLength %f" + ) maxHalfFiberLength = traits.Float( - desc= - "The max length limit of the half fibers generated during tractography. Here the fiber is \'half\' because the tractography goes in only one direction from one seed point at a time", - argstr="--maxHalfFiberLength %f") + desc="The max length limit of the half fibers generated during tractography. Here the fiber is 'half' because the tractography goes in only one direction from one seed point at a time", + argstr="--maxHalfFiberLength %f", + ) seedFALimit = traits.Float( desc="Seed points whose FA are below this value are excluded", - argstr="--seedFALimit %f") - Qm = traits.Float( - desc="Process noise for angles/direction", argstr="--Qm %f") + argstr="--seedFALimit %f", + ) + Qm = traits.Float(desc="Process noise for angles/direction", argstr="--Qm %f") Ql = traits.Float(desc="Process noise for eigenvalues", argstr="--Ql %f") Qw = traits.Float( - desc= - "Process noise for free water weights, ignored if no free water estimation", - argstr="--Qw %f") + desc="Process noise for free water weights, ignored if no free water estimation", + argstr="--Qw %f", + ) Rs = traits.Float(desc="Measurement noise", argstr="--Rs %f") maxBranchingAngle = traits.Float( - desc= - "Maximum branching angle, in degrees. When using multiple tensors, a new branch will be created when the tensors' major directions form an angle between (minBranchingAngle, maxBranchingAngle). Branching is supressed when this maxBranchingAngle is set to 0.0", - argstr="--maxBranchingAngle %f") + desc="Maximum branching angle, in degrees. When using multiple tensors, a new branch will be created when the tensors' major directions form an angle between (minBranchingAngle, maxBranchingAngle). Branching is supressed when this maxBranchingAngle is set to 0.0", + argstr="--maxBranchingAngle %f", + ) minBranchingAngle = traits.Float( - desc= - "Minimum branching angle, in degrees. When using multiple tensors, a new branch will be created when the tensors' major directions form an angle between (minBranchingAngle, maxBranchingAngle)", - argstr="--minBranchingAngle %f") + desc="Minimum branching angle, in degrees. When using multiple tensors, a new branch will be created when the tensors' major directions form an angle between (minBranchingAngle, maxBranchingAngle)", + argstr="--minBranchingAngle %f", + ) tractsWithSecondTensor = traits.Either( traits.Bool, File(), hash_files=False, desc="Tracts generated, with second tensor output (if there is one)", - argstr="--tractsWithSecondTensor %s") + argstr="--tractsWithSecondTensor %s", + ) storeGlyphs = traits.Bool( - desc= - "Store tensors' main directions as two-point lines in a separate file named glyphs_{tracts}. When using multiple tensors, only the major tensors' main directions are stored", - argstr="--storeGlyphs ") + desc="Store tensors' main directions as two-point lines in a separate file named glyphs_{tracts}. When using multiple tensors, only the major tensors' main directions are stored", + argstr="--storeGlyphs ", + ) class UKFTractographyOutputSpec(TraitedSpec): - tracts = File( - desc="Tracts generated, with first tensor output", exists=True) + tracts = File(desc="Tracts generated, with first tensor output", exists=True) tractsWithSecondTensor = File( desc="Tracts generated, with second tensor output (if there is one)", - exists=True) + exists=True, + ) class UKFTractography(SEMLikeCommandLine): @@ -161,7 +175,7 @@ class UKFTractography(SEMLikeCommandLine): output_spec = UKFTractographyOutputSpec _cmd = " UKFTractography " _outputs_filenames = { - 'tracts': 'tracts.vtp', - 'tractsWithSecondTensor': 'tractsWithSecondTensor.vtp' + "tracts": "tracts.vtp", + "tractsWithSecondTensor": "tractsWithSecondTensor.vtp", } _redirect_x = False diff --git a/nipype/interfaces/semtools/featurecreator.py b/nipype/interfaces/semtools/featurecreator.py index 69ff2d675c..f02d19fda8 100644 --- a/nipype/interfaces/semtools/featurecreator.py +++ b/nipype/interfaces/semtools/featurecreator.py @@ -5,22 +5,33 @@ import os -from ..base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ..base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class GenerateCsfClippedFromClassifiedImageInputSpec(CommandLineInputSpec): inputCassifiedVolume = File( desc="Required: input tissue label image", exists=True, - argstr="--inputCassifiedVolume %s") + argstr="--inputCassifiedVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class GenerateCsfClippedFromClassifiedImageOutputSpec(TraitedSpec): @@ -47,5 +58,5 @@ class GenerateCsfClippedFromClassifiedImage(SEMLikeCommandLine): input_spec = GenerateCsfClippedFromClassifiedImageInputSpec output_spec = GenerateCsfClippedFromClassifiedImageOutputSpec _cmd = " GenerateCsfClippedFromClassifiedImage " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False diff --git a/nipype/interfaces/semtools/filtering/__init__.py b/nipype/interfaces/semtools/filtering/__init__.py index 82ad67a9c1..b5b7eccb20 100644 --- a/nipype/interfaces/semtools/filtering/__init__.py +++ b/nipype/interfaces/semtools/filtering/__init__.py @@ -1,9 +1,22 @@ # -*- coding: utf-8 -*- from .denoising import UnbiasedNonLocalMeans from .featuredetection import ( - GenerateSummedGradientImage, CannySegmentationLevelSetImageFilter, - DilateImage, TextureFromNoiseImageFilter, FlippedDifference, ErodeImage, - GenerateBrainClippedImage, NeighborhoodMedian, GenerateTestImage, - NeighborhoodMean, HammerAttributeCreator, TextureMeasureFilter, DilateMask, - DumpBinaryTrainingVectors, DistanceMaps, STAPLEAnalysis, - GradientAnisotropicDiffusionImageFilter, CannyEdge) + GenerateSummedGradientImage, + CannySegmentationLevelSetImageFilter, + DilateImage, + TextureFromNoiseImageFilter, + FlippedDifference, + ErodeImage, + GenerateBrainClippedImage, + NeighborhoodMedian, + GenerateTestImage, + NeighborhoodMean, + HammerAttributeCreator, + TextureMeasureFilter, + DilateMask, + DumpBinaryTrainingVectors, + DistanceMaps, + STAPLEAnalysis, + GradientAnisotropicDiffusionImageFilter, + CannyEdge, +) diff --git a/nipype/interfaces/semtools/filtering/denoising.py b/nipype/interfaces/semtools/filtering/denoising.py index 97d687c512..2ca6840128 100644 --- a/nipype/interfaces/semtools/filtering/denoising.py +++ b/nipype/interfaces/semtools/filtering/denoising.py @@ -5,50 +5,58 @@ import os -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class UnbiasedNonLocalMeansInputSpec(CommandLineInputSpec): sigma = traits.Float( - desc= - "The root power of noise (sigma) in the complex Gaussian process the Rician comes from. If it is underestimated, the algorithm fails to remove the noise. If it is overestimated, over-blurring is likely to occur.", - argstr="--sigma %f") + desc="The root power of noise (sigma) in the complex Gaussian process the Rician comes from. If it is underestimated, the algorithm fails to remove the noise. If it is overestimated, over-blurring is likely to occur.", + argstr="--sigma %f", + ) rs = InputMultiPath( traits.Int, - desc= - "The algorithm search for similar voxels in a neighborhood of this radius (radii larger than 5,5,5 are very slow, and the results can be only marginally better. Small radii may fail to effectively remove the noise).", + desc="The algorithm search for similar voxels in a neighborhood of this radius (radii larger than 5,5,5 are very slow, and the results can be only marginally better. Small radii may fail to effectively remove the noise).", sep=",", - argstr="--rs %s") + argstr="--rs %s", + ) rc = InputMultiPath( traits.Int, - desc= - "Similarity between blocks is computed as the difference between mean values and gradients. These parameters are computed fitting a hyperplane with LS inside a neighborhood of this size", + desc="Similarity between blocks is computed as the difference between mean values and gradients. These parameters are computed fitting a hyperplane with LS inside a neighborhood of this size", sep=",", - argstr="--rc %s") + argstr="--rc %s", + ) hp = traits.Float( - desc= - "This parameter is related to noise; the larger the parameter, the more aggressive the filtering. Should be near 1, and only values between 0.8 and 1.2 are allowed", - argstr="--hp %f") + desc="This parameter is related to noise; the larger the parameter, the more aggressive the filtering. Should be near 1, and only values between 0.8 and 1.2 are allowed", + argstr="--hp %f", + ) ps = traits.Float( - desc= - "To accelerate computations, preselection is used: if the normalized difference is above this threshold, the voxel will be discarded (non used for average)", - argstr="--ps %f") - inputVolume = File( - position=-2, desc="Input MRI volume.", exists=True, argstr="%s") + desc="To accelerate computations, preselection is used: if the normalized difference is above this threshold, the voxel will be discarded (non used for average)", + argstr="--ps %f", + ) + inputVolume = File(position=-2, desc="Input MRI volume.", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output (filtered) MRI volume.", - argstr="%s") + argstr="%s", + ) class UnbiasedNonLocalMeansOutputSpec(TraitedSpec): - outputVolume = File( - position=-1, desc="Output (filtered) MRI volume.", exists=True) + outputVolume = File(position=-1, desc="Output (filtered) MRI volume.", exists=True) class UnbiasedNonLocalMeans(SEMLikeCommandLine): @@ -79,5 +87,5 @@ class UnbiasedNonLocalMeans(SEMLikeCommandLine): input_spec = UnbiasedNonLocalMeansInputSpec output_spec = UnbiasedNonLocalMeansOutputSpec _cmd = " UnbiasedNonLocalMeans " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False diff --git a/nipype/interfaces/semtools/filtering/featuredetection.py b/nipype/interfaces/semtools/filtering/featuredetection.py index ca4973ab43..e15e1de6b0 100644 --- a/nipype/interfaces/semtools/filtering/featuredetection.py +++ b/nipype/interfaces/semtools/filtering/featuredetection.py @@ -5,33 +5,42 @@ import os -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class GenerateSummedGradientImageInputSpec(CommandLineInputSpec): inputVolume1 = File( - desc="input volume 1, usally t1 image", - exists=True, - argstr="--inputVolume1 %s") + desc="input volume 1, usally t1 image", exists=True, argstr="--inputVolume1 %s" + ) inputVolume2 = File( - desc="input volume 2, usally t2 image", - exists=True, - argstr="--inputVolume2 %s") + desc="input volume 2, usally t2 image", exists=True, argstr="--inputVolume2 %s" + ) outputFileName = traits.Either( traits.Bool, File(), hash_files=False, desc="(required) output file name", - argstr="--outputFileName %s") + argstr="--outputFileName %s", + ) MaximumGradient = traits.Bool( - desc= - "If set this flag, it will compute maximum gradient between two input volumes instead of sum of it.", - argstr="--MaximumGradient ") + desc="If set this flag, it will compute maximum gradient between two input volumes instead of sum of it.", + argstr="--MaximumGradient ", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class GenerateSummedGradientImageOutputSpec(TraitedSpec): @@ -56,7 +65,7 @@ class GenerateSummedGradientImage(SEMLikeCommandLine): input_spec = GenerateSummedGradientImageInputSpec output_spec = GenerateSummedGradientImageOutputSpec _cmd = " GenerateSummedGradientImage " - _outputs_filenames = {'outputFileName': 'outputFileName'} + _outputs_filenames = {"outputFileName": "outputFileName"} _redirect_x = False @@ -64,21 +73,23 @@ class CannySegmentationLevelSetImageFilterInputSpec(CommandLineInputSpec): inputVolume = File(exists=True, argstr="--inputVolume %s") initialModel = File(exists=True, argstr="--initialModel %s") outputVolume = traits.Either( - traits.Bool, File(), hash_files=False, argstr="--outputVolume %s") + traits.Bool, File(), hash_files=False, argstr="--outputVolume %s" + ) outputSpeedVolume = traits.Either( - traits.Bool, File(), hash_files=False, argstr="--outputSpeedVolume %s") + traits.Bool, File(), hash_files=False, argstr="--outputSpeedVolume %s" + ) cannyThreshold = traits.Float( - desc="Canny Threshold Value", argstr="--cannyThreshold %f") - cannyVariance = traits.Float( - desc="Canny variance", argstr="--cannyVariance %f") + desc="Canny Threshold Value", argstr="--cannyThreshold %f" + ) + cannyVariance = traits.Float(desc="Canny variance", argstr="--cannyVariance %f") advectionWeight = traits.Float( - desc= - "Controls the smoothness of the resulting mask, small number are more smooth, large numbers allow more sharp corners. ", - argstr="--advectionWeight %f") + desc="Controls the smoothness of the resulting mask, small number are more smooth, large numbers allow more sharp corners. ", + argstr="--advectionWeight %f", + ) initialModelIsovalue = traits.Float( - desc= - "The identification of the input model iso-surface. (for a binary image with 0s and 1s use 0.5) (for a binary image with 0s and 255's use 127.5).", - argstr="--initialModelIsovalue %f") + desc="The identification of the input model iso-surface. (for a binary image with 0s and 1s use 0.5) (for a binary image with 0s and 255's use 127.5).", + argstr="--initialModelIsovalue %f", + ) maxIterations = traits.Int(desc="The", argstr="--maxIterations %d") @@ -108,27 +119,31 @@ class CannySegmentationLevelSetImageFilter(SEMLikeCommandLine): output_spec = CannySegmentationLevelSetImageFilterOutputSpec _cmd = " CannySegmentationLevelSetImageFilter " _outputs_filenames = { - 'outputVolume': 'outputVolume.nii', - 'outputSpeedVolume': 'outputSpeedVolume.nii' + "outputVolume": "outputVolume.nii", + "outputSpeedVolume": "outputSpeedVolume.nii", } _redirect_x = False class DilateImageInputSpec(CommandLineInputSpec): inputVolume = File( - desc="Required: input image", exists=True, argstr="--inputVolume %s") + desc="Required: input image", exists=True, argstr="--inputVolume %s" + ) inputMaskVolume = File( desc="Required: input brain mask image", exists=True, - argstr="--inputMaskVolume %s") + argstr="--inputMaskVolume %s", + ) inputRadius = traits.Int( - desc="Required: input neighborhood radius", argstr="--inputRadius %d") + desc="Required: input neighborhood radius", argstr="--inputRadius %d" + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class DilateImageOutputSpec(TraitedSpec): @@ -155,21 +170,24 @@ class DilateImage(SEMLikeCommandLine): input_spec = DilateImageInputSpec output_spec = DilateImageOutputSpec _cmd = " DilateImage " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class TextureFromNoiseImageFilterInputSpec(CommandLineInputSpec): inputVolume = File( - desc="Required: input image", exists=True, argstr="--inputVolume %s") + desc="Required: input image", exists=True, argstr="--inputVolume %s" + ) inputRadius = traits.Int( - desc="Required: input neighborhood radius", argstr="--inputRadius %d") + desc="Required: input neighborhood radius", argstr="--inputRadius %d" + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class TextureFromNoiseImageFilterOutputSpec(TraitedSpec): @@ -196,23 +214,26 @@ class TextureFromNoiseImageFilter(SEMLikeCommandLine): input_spec = TextureFromNoiseImageFilterInputSpec output_spec = TextureFromNoiseImageFilterOutputSpec _cmd = " TextureFromNoiseImageFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class FlippedDifferenceInputSpec(CommandLineInputSpec): inputVolume = File( - desc="Required: input image", exists=True, argstr="--inputVolume %s") + desc="Required: input image", exists=True, argstr="--inputVolume %s" + ) inputMaskVolume = File( desc="Required: input brain mask image", exists=True, - argstr="--inputMaskVolume %s") + argstr="--inputMaskVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class FlippedDifferenceOutputSpec(TraitedSpec): @@ -239,25 +260,29 @@ class FlippedDifference(SEMLikeCommandLine): input_spec = FlippedDifferenceInputSpec output_spec = FlippedDifferenceOutputSpec _cmd = " FlippedDifference " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class ErodeImageInputSpec(CommandLineInputSpec): inputVolume = File( - desc="Required: input image", exists=True, argstr="--inputVolume %s") + desc="Required: input image", exists=True, argstr="--inputVolume %s" + ) inputMaskVolume = File( desc="Required: input brain mask image", exists=True, - argstr="--inputMaskVolume %s") + argstr="--inputMaskVolume %s", + ) inputRadius = traits.Int( - desc="Required: input neighborhood radius", argstr="--inputRadius %d") + desc="Required: input neighborhood radius", argstr="--inputRadius %d" + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class ErodeImageOutputSpec(TraitedSpec): @@ -284,28 +309,28 @@ class ErodeImage(SEMLikeCommandLine): input_spec = ErodeImageInputSpec output_spec = ErodeImageOutputSpec _cmd = " ErodeImage " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class GenerateBrainClippedImageInputSpec(CommandLineInputSpec): inputImg = File( - desc="input volume 1, usally t1 image", - exists=True, - argstr="--inputImg %s") + desc="input volume 1, usally t1 image", exists=True, argstr="--inputImg %s" + ) inputMsk = File( - desc="input volume 2, usally t2 image", - exists=True, - argstr="--inputMsk %s") + desc="input volume 2, usally t2 image", exists=True, argstr="--inputMsk %s" + ) outputFileName = traits.Either( traits.Bool, File(), hash_files=False, desc="(required) output file name", - argstr="--outputFileName %s") + argstr="--outputFileName %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class GenerateBrainClippedImageOutputSpec(TraitedSpec): @@ -330,25 +355,29 @@ class GenerateBrainClippedImage(SEMLikeCommandLine): input_spec = GenerateBrainClippedImageInputSpec output_spec = GenerateBrainClippedImageOutputSpec _cmd = " GenerateBrainClippedImage " - _outputs_filenames = {'outputFileName': 'outputFileName'} + _outputs_filenames = {"outputFileName": "outputFileName"} _redirect_x = False class NeighborhoodMedianInputSpec(CommandLineInputSpec): inputVolume = File( - desc="Required: input image", exists=True, argstr="--inputVolume %s") + desc="Required: input image", exists=True, argstr="--inputVolume %s" + ) inputMaskVolume = File( desc="Required: input brain mask image", exists=True, - argstr="--inputMaskVolume %s") + argstr="--inputMaskVolume %s", + ) inputRadius = traits.Int( - desc="Required: input neighborhood radius", argstr="--inputRadius %d") + desc="Required: input neighborhood radius", argstr="--inputRadius %d" + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class NeighborhoodMedianOutputSpec(TraitedSpec): @@ -375,27 +404,26 @@ class NeighborhoodMedian(SEMLikeCommandLine): input_spec = NeighborhoodMedianInputSpec output_spec = NeighborhoodMedianOutputSpec _cmd = " NeighborhoodMedian " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class GenerateTestImageInputSpec(CommandLineInputSpec): inputVolume = File( - desc="input volume 1, usally t1 image", - exists=True, - argstr="--inputVolume %s") + desc="input volume 1, usally t1 image", exists=True, argstr="--inputVolume %s" + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="(required) output file name", - argstr="--outputVolume %s") - lowerBoundOfOutputVolume = traits.Float( - argstr="--lowerBoundOfOutputVolume %f") - upperBoundOfOutputVolume = traits.Float( - argstr="--upperBoundOfOutputVolume %f") + argstr="--outputVolume %s", + ) + lowerBoundOfOutputVolume = traits.Float(argstr="--lowerBoundOfOutputVolume %f") + upperBoundOfOutputVolume = traits.Float(argstr="--upperBoundOfOutputVolume %f") outputVolumeSize = traits.Float( - desc="output Volume Size", argstr="--outputVolumeSize %f") + desc="output Volume Size", argstr="--outputVolumeSize %f" + ) class GenerateTestImageOutputSpec(TraitedSpec): @@ -420,25 +448,29 @@ class GenerateTestImage(SEMLikeCommandLine): input_spec = GenerateTestImageInputSpec output_spec = GenerateTestImageOutputSpec _cmd = " GenerateTestImage " - _outputs_filenames = {'outputVolume': 'outputVolume'} + _outputs_filenames = {"outputVolume": "outputVolume"} _redirect_x = False class NeighborhoodMeanInputSpec(CommandLineInputSpec): inputVolume = File( - desc="Required: input image", exists=True, argstr="--inputVolume %s") + desc="Required: input image", exists=True, argstr="--inputVolume %s" + ) inputMaskVolume = File( desc="Required: input brain mask image", exists=True, - argstr="--inputMaskVolume %s") + argstr="--inputMaskVolume %s", + ) inputRadius = traits.Int( - desc="Required: input neighborhood radius", argstr="--inputRadius %d") + desc="Required: input neighborhood radius", argstr="--inputRadius %d" + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class NeighborhoodMeanOutputSpec(TraitedSpec): @@ -465,30 +497,32 @@ class NeighborhoodMean(SEMLikeCommandLine): input_spec = NeighborhoodMeanInputSpec output_spec = NeighborhoodMeanOutputSpec _cmd = " NeighborhoodMean " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class HammerAttributeCreatorInputSpec(CommandLineInputSpec): Scale = traits.Int(desc="Determine Scale of Ball", argstr="--Scale %d") - Strength = traits.Float( - desc="Determine Strength of Edges", argstr="--Strength %f") + Strength = traits.Float(desc="Determine Strength of Edges", argstr="--Strength %f") inputGMVolume = File( desc="Required: input grey matter posterior image", exists=True, - argstr="--inputGMVolume %s") + argstr="--inputGMVolume %s", + ) inputWMVolume = File( desc="Required: input white matter posterior image", exists=True, - argstr="--inputWMVolume %s") + argstr="--inputWMVolume %s", + ) inputCSFVolume = File( desc="Required: input CSF posterior image", exists=True, - argstr="--inputCSFVolume %s") + argstr="--inputCSFVolume %s", + ) outputVolumeBase = traits.Str( - desc= - "Required: output image base name to be appended for each feature vector.", - argstr="--outputVolumeBase %s") + desc="Required: output image base name to be appended for each feature vector.", + argstr="--outputVolumeBase %s", + ) class HammerAttributeCreatorOutputSpec(TraitedSpec): @@ -525,7 +559,8 @@ class TextureMeasureFilterInputSpec(CommandLineInputSpec): distance = traits.Int(argstr="--distance %d") insideROIValue = traits.Float(argstr="--insideROIValue %f") outputFilename = traits.Either( - traits.Bool, File(), hash_files=False, argstr="--outputFilename %s") + traits.Bool, File(), hash_files=False, argstr="--outputFilename %s" + ) class TextureMeasureFilterOutputSpec(TraitedSpec): @@ -552,29 +587,33 @@ class TextureMeasureFilter(SEMLikeCommandLine): input_spec = TextureMeasureFilterInputSpec output_spec = TextureMeasureFilterOutputSpec _cmd = " TextureMeasureFilter " - _outputs_filenames = {'outputFilename': 'outputFilename'} + _outputs_filenames = {"outputFilename": "outputFilename"} _redirect_x = False class DilateMaskInputSpec(CommandLineInputSpec): inputVolume = File( - desc="Required: input image", exists=True, argstr="--inputVolume %s") + desc="Required: input image", exists=True, argstr="--inputVolume %s" + ) inputBinaryVolume = File( desc="Required: input brain mask image", exists=True, - argstr="--inputBinaryVolume %s") + argstr="--inputBinaryVolume %s", + ) sizeStructuralElement = traits.Int( - desc= - "size of structural element. sizeStructuralElement=1 means that 3x3x3 structuring element for 3D", - argstr="--sizeStructuralElement %d") + desc="size of structural element. sizeStructuralElement=1 means that 3x3x3 structuring element for 3D", + argstr="--sizeStructuralElement %d", + ) lowerThreshold = traits.Float( - desc="Required: lowerThreshold value", argstr="--lowerThreshold %f") + desc="Required: lowerThreshold value", argstr="--lowerThreshold %f" + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class DilateMaskOutputSpec(TraitedSpec): @@ -601,7 +640,7 @@ class DilateMask(SEMLikeCommandLine): input_spec = DilateMaskInputSpec output_spec = DilateMaskOutputSpec _cmd = " DilateMask " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False @@ -609,11 +648,13 @@ class DumpBinaryTrainingVectorsInputSpec(CommandLineInputSpec): inputHeaderFilename = File( desc="Required: input header file name", exists=True, - argstr="--inputHeaderFilename %s") + argstr="--inputHeaderFilename %s", + ) inputVectorFilename = File( desc="Required: input vector filename", exists=True, - argstr="--inputVectorFilename %s") + argstr="--inputVectorFilename %s", + ) class DumpBinaryTrainingVectorsOutputSpec(TraitedSpec): @@ -648,21 +689,24 @@ class DistanceMapsInputSpec(CommandLineInputSpec): inputLabelVolume = File( desc="Required: input tissue label image", exists=True, - argstr="--inputLabelVolume %s") + argstr="--inputLabelVolume %s", + ) inputMaskVolume = File( desc="Required: input brain mask image", exists=True, - argstr="--inputMaskVolume %s") + argstr="--inputMaskVolume %s", + ) inputTissueLabel = traits.Int( - desc= - "Required: input integer value of tissue type used to calculate distance", - argstr="--inputTissueLabel %d") + desc="Required: input integer value of tissue type used to calculate distance", + argstr="--inputTissueLabel %d", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class DistanceMapsOutputSpec(TraitedSpec): @@ -689,24 +733,26 @@ class DistanceMaps(SEMLikeCommandLine): input_spec = DistanceMapsInputSpec output_spec = DistanceMapsOutputSpec _cmd = " DistanceMaps " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class STAPLEAnalysisInputSpec(CommandLineInputSpec): inputDimension = traits.Int( - desc="Required: input image Dimension 2 or 3", - argstr="--inputDimension %d") + desc="Required: input image Dimension 2 or 3", argstr="--inputDimension %d" + ) inputLabelVolume = InputMultiPath( File(exists=True), desc="Required: input label volume", - argstr="--inputLabelVolume %s...") + argstr="--inputLabelVolume %s...", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class STAPLEAnalysisOutputSpec(TraitedSpec): @@ -733,26 +779,30 @@ class STAPLEAnalysis(SEMLikeCommandLine): input_spec = STAPLEAnalysisInputSpec output_spec = STAPLEAnalysisOutputSpec _cmd = " STAPLEAnalysis " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class GradientAnisotropicDiffusionImageFilterInputSpec(CommandLineInputSpec): inputVolume = File( - desc="Required: input image", exists=True, argstr="--inputVolume %s") + desc="Required: input image", exists=True, argstr="--inputVolume %s" + ) numberOfIterations = traits.Int( - desc="Optional value for number of Iterations", - argstr="--numberOfIterations %d") + desc="Optional value for number of Iterations", argstr="--numberOfIterations %d" + ) timeStep = traits.Float( - desc="Time step for diffusion process", argstr="--timeStep %f") + desc="Time step for diffusion process", argstr="--timeStep %f" + ) conductance = traits.Float( - desc="Conductance for diffusion process", argstr="--conductance %f") + desc="Conductance for diffusion process", argstr="--conductance %f" + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class GradientAnisotropicDiffusionImageFilterOutputSpec(TraitedSpec): @@ -773,7 +823,7 @@ class GradientAnisotropicDiffusionImageFilter(SEMLikeCommandLine): input_spec = GradientAnisotropicDiffusionImageFilterInputSpec output_spec = GradientAnisotropicDiffusionImageFilterOutputSpec _cmd = " GradientAnisotropicDiffusionImageFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False @@ -781,25 +831,27 @@ class CannyEdgeInputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input tissue label image", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) variance = traits.Float( - desc= - "Variance and Maximum error are used in the Gaussian smoothing of the input image. See itkDiscreteGaussianImageFilter for information on these parameters.", - argstr="--variance %f") + desc="Variance and Maximum error are used in the Gaussian smoothing of the input image. See itkDiscreteGaussianImageFilter for information on these parameters.", + argstr="--variance %f", + ) upperThreshold = traits.Float( - desc= - "Threshold is the lowest allowed value in the output image. Its data type is the same as the data type of the output image. Any values below the Threshold level will be replaced with the OutsideValue parameter value, whose default is zero. ", - argstr="--upperThreshold %f") + desc="Threshold is the lowest allowed value in the output image. Its data type is the same as the data type of the output image. Any values below the Threshold level will be replaced with the OutsideValue parameter value, whose default is zero. ", + argstr="--upperThreshold %f", + ) lowerThreshold = traits.Float( - desc= - "Threshold is the lowest allowed value in the output image. Its data type is the same as the data type of the output image. Any values below the Threshold level will be replaced with the OutsideValue parameter value, whose default is zero. ", - argstr="--lowerThreshold %f") + desc="Threshold is the lowest allowed value in the output image. Its data type is the same as the data type of the output image. Any values below the Threshold level will be replaced with the OutsideValue parameter value, whose default is zero. ", + argstr="--lowerThreshold %f", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) class CannyEdgeOutputSpec(TraitedSpec): @@ -826,5 +878,5 @@ class CannyEdge(SEMLikeCommandLine): input_spec = CannyEdgeInputSpec output_spec = CannyEdgeOutputSpec _cmd = " CannyEdge " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_CannyEdge.py b/nipype/interfaces/semtools/filtering/tests/test_auto_CannyEdge.py index da580ab5de..66d4da8a6c 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_CannyEdge.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_CannyEdge.py @@ -4,30 +4,23 @@ def test_CannyEdge_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - lowerThreshold=dict(argstr='--lowerThreshold %f', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - upperThreshold=dict(argstr='--upperThreshold %f', ), - variance=dict(argstr='--variance %f', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + lowerThreshold=dict(argstr="--lowerThreshold %f",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + upperThreshold=dict(argstr="--upperThreshold %f",), + variance=dict(argstr="--variance %f",), ) inputs = CannyEdge.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CannyEdge_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = CannyEdge.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_CannySegmentationLevelSetImageFilter.py b/nipype/interfaces/semtools/filtering/tests/test_auto_CannySegmentationLevelSetImageFilter.py index 30f9fe6f15..f77517fbb5 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_CannySegmentationLevelSetImageFilter.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_CannySegmentationLevelSetImageFilter.py @@ -4,42 +4,28 @@ def test_CannySegmentationLevelSetImageFilter_inputs(): input_map = dict( - advectionWeight=dict(argstr='--advectionWeight %f', ), - args=dict(argstr='%s', ), - cannyThreshold=dict(argstr='--cannyThreshold %f', ), - cannyVariance=dict(argstr='--cannyVariance %f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - initialModel=dict( - argstr='--initialModel %s', - extensions=None, - ), - initialModelIsovalue=dict(argstr='--initialModelIsovalue %f', ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - maxIterations=dict(argstr='--maxIterations %d', ), - outputSpeedVolume=dict( - argstr='--outputSpeedVolume %s', - hash_files=False, - ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), + advectionWeight=dict(argstr="--advectionWeight %f",), + args=dict(argstr="%s",), + cannyThreshold=dict(argstr="--cannyThreshold %f",), + cannyVariance=dict(argstr="--cannyVariance %f",), + environ=dict(nohash=True, usedefault=True,), + initialModel=dict(argstr="--initialModel %s", extensions=None,), + initialModelIsovalue=dict(argstr="--initialModelIsovalue %f",), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + maxIterations=dict(argstr="--maxIterations %d",), + outputSpeedVolume=dict(argstr="--outputSpeedVolume %s", hash_files=False,), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), ) inputs = CannySegmentationLevelSetImageFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CannySegmentationLevelSetImageFilter_outputs(): output_map = dict( - outputSpeedVolume=dict(extensions=None, ), - outputVolume=dict(extensions=None, ), + outputSpeedVolume=dict(extensions=None,), outputVolume=dict(extensions=None,), ) outputs = CannySegmentationLevelSetImageFilter.output_spec() diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_DilateImage.py b/nipype/interfaces/semtools/filtering/tests/test_auto_DilateImage.py index cf30cce648..901aef8d82 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_DilateImage.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_DilateImage.py @@ -4,32 +4,22 @@ def test_DilateImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputMaskVolume=dict( - argstr='--inputMaskVolume %s', - extensions=None, - ), - inputRadius=dict(argstr='--inputRadius %d', ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputMaskVolume=dict(argstr="--inputMaskVolume %s", extensions=None,), + inputRadius=dict(argstr="--inputRadius %d",), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), ) inputs = DilateImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DilateImage_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = DilateImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_DilateMask.py b/nipype/interfaces/semtools/filtering/tests/test_auto_DilateMask.py index 1da3cc6ebe..0311b115fb 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_DilateMask.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_DilateMask.py @@ -4,33 +4,23 @@ def test_DilateMask_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputBinaryVolume=dict( - argstr='--inputBinaryVolume %s', - extensions=None, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - lowerThreshold=dict(argstr='--lowerThreshold %f', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - sizeStructuralElement=dict(argstr='--sizeStructuralElement %d', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputBinaryVolume=dict(argstr="--inputBinaryVolume %s", extensions=None,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + lowerThreshold=dict(argstr="--lowerThreshold %f",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + sizeStructuralElement=dict(argstr="--sizeStructuralElement %d",), ) inputs = DilateMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DilateMask_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = DilateMask.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_DistanceMaps.py b/nipype/interfaces/semtools/filtering/tests/test_auto_DistanceMaps.py index d7de88405a..730644e779 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_DistanceMaps.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_DistanceMaps.py @@ -4,32 +4,22 @@ def test_DistanceMaps_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputLabelVolume=dict( - argstr='--inputLabelVolume %s', - extensions=None, - ), - inputMaskVolume=dict( - argstr='--inputMaskVolume %s', - extensions=None, - ), - inputTissueLabel=dict(argstr='--inputTissueLabel %d', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputLabelVolume=dict(argstr="--inputLabelVolume %s", extensions=None,), + inputMaskVolume=dict(argstr="--inputMaskVolume %s", extensions=None,), + inputTissueLabel=dict(argstr="--inputTissueLabel %d",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), ) inputs = DistanceMaps.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DistanceMaps_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = DistanceMaps.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_DumpBinaryTrainingVectors.py b/nipype/interfaces/semtools/filtering/tests/test_auto_DumpBinaryTrainingVectors.py index 956f0ef34d..0d7df6bf80 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_DumpBinaryTrainingVectors.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_DumpBinaryTrainingVectors.py @@ -4,25 +4,18 @@ def test_DumpBinaryTrainingVectors_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputHeaderFilename=dict( - argstr='--inputHeaderFilename %s', - extensions=None, - ), - inputVectorFilename=dict( - argstr='--inputVectorFilename %s', - extensions=None, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputHeaderFilename=dict(argstr="--inputHeaderFilename %s", extensions=None,), + inputVectorFilename=dict(argstr="--inputVectorFilename %s", extensions=None,), ) inputs = DumpBinaryTrainingVectors.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DumpBinaryTrainingVectors_outputs(): output_map = dict() outputs = DumpBinaryTrainingVectors.output_spec() diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_ErodeImage.py b/nipype/interfaces/semtools/filtering/tests/test_auto_ErodeImage.py index 50552d0484..568cafee66 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_ErodeImage.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_ErodeImage.py @@ -4,32 +4,22 @@ def test_ErodeImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputMaskVolume=dict( - argstr='--inputMaskVolume %s', - extensions=None, - ), - inputRadius=dict(argstr='--inputRadius %d', ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputMaskVolume=dict(argstr="--inputMaskVolume %s", extensions=None,), + inputRadius=dict(argstr="--inputRadius %d",), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), ) inputs = ErodeImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ErodeImage_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = ErodeImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_FlippedDifference.py b/nipype/interfaces/semtools/filtering/tests/test_auto_FlippedDifference.py index 2f10684f7f..5498ea004d 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_FlippedDifference.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_FlippedDifference.py @@ -4,31 +4,21 @@ def test_FlippedDifference_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputMaskVolume=dict( - argstr='--inputMaskVolume %s', - extensions=None, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputMaskVolume=dict(argstr="--inputMaskVolume %s", extensions=None,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), ) inputs = FlippedDifference.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FlippedDifference_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = FlippedDifference.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateBrainClippedImage.py b/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateBrainClippedImage.py index 20d1c1718a..661bd6c1af 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateBrainClippedImage.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateBrainClippedImage.py @@ -4,32 +4,22 @@ def test_GenerateBrainClippedImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputImg=dict( - argstr='--inputImg %s', - extensions=None, - ), - inputMsk=dict( - argstr='--inputMsk %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputFileName=dict( - argstr='--outputFileName %s', - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputImg=dict(argstr="--inputImg %s", extensions=None,), + inputMsk=dict(argstr="--inputMsk %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputFileName=dict(argstr="--outputFileName %s", hash_files=False,), ) inputs = GenerateBrainClippedImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GenerateBrainClippedImage_outputs(): - output_map = dict(outputFileName=dict(extensions=None, ), ) + output_map = dict(outputFileName=dict(extensions=None,),) outputs = GenerateBrainClippedImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateSummedGradientImage.py b/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateSummedGradientImage.py index 5f5ba62aaa..2eedb1831c 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateSummedGradientImage.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateSummedGradientImage.py @@ -4,33 +4,23 @@ def test_GenerateSummedGradientImage_inputs(): input_map = dict( - MaximumGradient=dict(argstr='--MaximumGradient ', ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume1=dict( - argstr='--inputVolume1 %s', - extensions=None, - ), - inputVolume2=dict( - argstr='--inputVolume2 %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputFileName=dict( - argstr='--outputFileName %s', - hash_files=False, - ), + MaximumGradient=dict(argstr="--MaximumGradient ",), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume1=dict(argstr="--inputVolume1 %s", extensions=None,), + inputVolume2=dict(argstr="--inputVolume2 %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputFileName=dict(argstr="--outputFileName %s", hash_files=False,), ) inputs = GenerateSummedGradientImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GenerateSummedGradientImage_outputs(): - output_map = dict(outputFileName=dict(extensions=None, ), ) + output_map = dict(outputFileName=dict(extensions=None,),) outputs = GenerateSummedGradientImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateTestImage.py b/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateTestImage.py index bd05325a42..364ef50be4 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateTestImage.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateTestImage.py @@ -4,32 +4,23 @@ def test_GenerateTestImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - lowerBoundOfOutputVolume=dict( - argstr='--lowerBoundOfOutputVolume %f', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - outputVolumeSize=dict(argstr='--outputVolumeSize %f', ), - upperBoundOfOutputVolume=dict( - argstr='--upperBoundOfOutputVolume %f', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + lowerBoundOfOutputVolume=dict(argstr="--lowerBoundOfOutputVolume %f",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + outputVolumeSize=dict(argstr="--outputVolumeSize %f",), + upperBoundOfOutputVolume=dict(argstr="--upperBoundOfOutputVolume %f",), ) inputs = GenerateTestImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GenerateTestImage_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = GenerateTestImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_GradientAnisotropicDiffusionImageFilter.py b/nipype/interfaces/semtools/filtering/tests/test_auto_GradientAnisotropicDiffusionImageFilter.py index e07ac2157e..383a836a20 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_GradientAnisotropicDiffusionImageFilter.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_GradientAnisotropicDiffusionImageFilter.py @@ -4,30 +4,23 @@ def test_GradientAnisotropicDiffusionImageFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - conductance=dict(argstr='--conductance %f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - numberOfIterations=dict(argstr='--numberOfIterations %d', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - timeStep=dict(argstr='--timeStep %f', ), + args=dict(argstr="%s",), + conductance=dict(argstr="--conductance %f",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + numberOfIterations=dict(argstr="--numberOfIterations %d",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + timeStep=dict(argstr="--timeStep %f",), ) inputs = GradientAnisotropicDiffusionImageFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GradientAnisotropicDiffusionImageFilter_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = GradientAnisotropicDiffusionImageFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_HammerAttributeCreator.py b/nipype/interfaces/semtools/filtering/tests/test_auto_HammerAttributeCreator.py index 45e3e253ba..9e77140c41 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_HammerAttributeCreator.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_HammerAttributeCreator.py @@ -4,32 +4,22 @@ def test_HammerAttributeCreator_inputs(): input_map = dict( - Scale=dict(argstr='--Scale %d', ), - Strength=dict(argstr='--Strength %f', ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputCSFVolume=dict( - argstr='--inputCSFVolume %s', - extensions=None, - ), - inputGMVolume=dict( - argstr='--inputGMVolume %s', - extensions=None, - ), - inputWMVolume=dict( - argstr='--inputWMVolume %s', - extensions=None, - ), - outputVolumeBase=dict(argstr='--outputVolumeBase %s', ), + Scale=dict(argstr="--Scale %d",), + Strength=dict(argstr="--Strength %f",), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputCSFVolume=dict(argstr="--inputCSFVolume %s", extensions=None,), + inputGMVolume=dict(argstr="--inputGMVolume %s", extensions=None,), + inputWMVolume=dict(argstr="--inputWMVolume %s", extensions=None,), + outputVolumeBase=dict(argstr="--outputVolumeBase %s",), ) inputs = HammerAttributeCreator.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_HammerAttributeCreator_outputs(): output_map = dict() outputs = HammerAttributeCreator.output_spec() diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_NeighborhoodMean.py b/nipype/interfaces/semtools/filtering/tests/test_auto_NeighborhoodMean.py index db06fd3746..9b4652a60c 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_NeighborhoodMean.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_NeighborhoodMean.py @@ -4,32 +4,22 @@ def test_NeighborhoodMean_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputMaskVolume=dict( - argstr='--inputMaskVolume %s', - extensions=None, - ), - inputRadius=dict(argstr='--inputRadius %d', ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputMaskVolume=dict(argstr="--inputMaskVolume %s", extensions=None,), + inputRadius=dict(argstr="--inputRadius %d",), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), ) inputs = NeighborhoodMean.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NeighborhoodMean_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = NeighborhoodMean.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_NeighborhoodMedian.py b/nipype/interfaces/semtools/filtering/tests/test_auto_NeighborhoodMedian.py index 0ddeb1eb16..ae1fc8266d 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_NeighborhoodMedian.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_NeighborhoodMedian.py @@ -4,32 +4,22 @@ def test_NeighborhoodMedian_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputMaskVolume=dict( - argstr='--inputMaskVolume %s', - extensions=None, - ), - inputRadius=dict(argstr='--inputRadius %d', ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputMaskVolume=dict(argstr="--inputMaskVolume %s", extensions=None,), + inputRadius=dict(argstr="--inputRadius %d",), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), ) inputs = NeighborhoodMedian.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NeighborhoodMedian_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = NeighborhoodMedian.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_STAPLEAnalysis.py b/nipype/interfaces/semtools/filtering/tests/test_auto_STAPLEAnalysis.py index 095f678e51..9d841a6052 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_STAPLEAnalysis.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_STAPLEAnalysis.py @@ -4,25 +4,21 @@ def test_STAPLEAnalysis_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputDimension=dict(argstr='--inputDimension %d', ), - inputLabelVolume=dict(argstr='--inputLabelVolume %s...', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputDimension=dict(argstr="--inputDimension %d",), + inputLabelVolume=dict(argstr="--inputLabelVolume %s...",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), ) inputs = STAPLEAnalysis.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_STAPLEAnalysis_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = STAPLEAnalysis.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_TextureFromNoiseImageFilter.py b/nipype/interfaces/semtools/filtering/tests/test_auto_TextureFromNoiseImageFilter.py index 53d3031995..9d820afb58 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_TextureFromNoiseImageFilter.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_TextureFromNoiseImageFilter.py @@ -4,28 +4,21 @@ def test_TextureFromNoiseImageFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputRadius=dict(argstr='--inputRadius %d', ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputRadius=dict(argstr="--inputRadius %d",), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), ) inputs = TextureFromNoiseImageFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TextureFromNoiseImageFilter_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = TextureFromNoiseImageFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_TextureMeasureFilter.py b/nipype/interfaces/semtools/filtering/tests/test_auto_TextureMeasureFilter.py index 2306af1639..ea9e8843d5 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_TextureMeasureFilter.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_TextureMeasureFilter.py @@ -4,33 +4,23 @@ def test_TextureMeasureFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - distance=dict(argstr='--distance %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputMaskVolume=dict( - argstr='--inputMaskVolume %s', - extensions=None, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - insideROIValue=dict(argstr='--insideROIValue %f', ), - outputFilename=dict( - argstr='--outputFilename %s', - hash_files=False, - ), + args=dict(argstr="%s",), + distance=dict(argstr="--distance %d",), + environ=dict(nohash=True, usedefault=True,), + inputMaskVolume=dict(argstr="--inputMaskVolume %s", extensions=None,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + insideROIValue=dict(argstr="--insideROIValue %f",), + outputFilename=dict(argstr="--outputFilename %s", hash_files=False,), ) inputs = TextureMeasureFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TextureMeasureFilter_outputs(): - output_map = dict(outputFilename=dict(extensions=None, ), ) + output_map = dict(outputFilename=dict(extensions=None,),) outputs = TextureMeasureFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/filtering/tests/test_auto_UnbiasedNonLocalMeans.py b/nipype/interfaces/semtools/filtering/tests/test_auto_UnbiasedNonLocalMeans.py index edf1a78553..113d9607c2 100644 --- a/nipype/interfaces/semtools/filtering/tests/test_auto_UnbiasedNonLocalMeans.py +++ b/nipype/interfaces/semtools/filtering/tests/test_auto_UnbiasedNonLocalMeans.py @@ -4,43 +4,25 @@ def test_UnbiasedNonLocalMeans_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - hp=dict(argstr='--hp %f', ), - inputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - ps=dict(argstr='--ps %f', ), - rc=dict( - argstr='--rc %s', - sep=',', - ), - rs=dict( - argstr='--rs %s', - sep=',', - ), - sigma=dict(argstr='--sigma %f', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + hp=dict(argstr="--hp %f",), + inputVolume=dict(argstr="%s", extensions=None, position=-2,), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), + ps=dict(argstr="--ps %f",), + rc=dict(argstr="--rc %s", sep=",",), + rs=dict(argstr="--rs %s", sep=",",), + sigma=dict(argstr="--sigma %f",), ) inputs = UnbiasedNonLocalMeans.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_UnbiasedNonLocalMeans_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = UnbiasedNonLocalMeans.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/legacy/registration.py b/nipype/interfaces/semtools/legacy/registration.py index 04bb425e3d..9835889040 100644 --- a/nipype/interfaces/semtools/legacy/registration.py +++ b/nipype/interfaces/semtools/legacy/registration.py @@ -5,44 +5,53 @@ import os -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class scalartransformInputSpec(CommandLineInputSpec): - input_image = File( - desc="Image to tranform", exists=True, argstr="--input_image %s") + input_image = File(desc="Image to tranform", exists=True, argstr="--input_image %s") output_image = traits.Either( traits.Bool, File(), hash_files=False, desc="The transformed image", - argstr="--output_image %s") + argstr="--output_image %s", + ) transformation = traits.Either( traits.Bool, File(), hash_files=False, desc="Output file for transformation parameters", - argstr="--transformation %s") - invert = traits.Bool( - desc="Invert tranform before applying.", argstr="--invert ") + argstr="--transformation %s", + ) + invert = traits.Bool(desc="Invert tranform before applying.", argstr="--invert ") deformation = File( - desc="Deformation field.", exists=True, argstr="--deformation %s") - h_field = traits.Bool( - desc="The deformation is an h-field.", argstr="--h_field ") + desc="Deformation field.", exists=True, argstr="--deformation %s" + ) + h_field = traits.Bool(desc="The deformation is an h-field.", argstr="--h_field ") interpolation = traits.Enum( "nearestneighbor", "linear", "cubic", desc="Interpolation type (nearestneighbor, linear, cubic)", - argstr="--interpolation %s") + argstr="--interpolation %s", + ) class scalartransformOutputSpec(TraitedSpec): output_image = File(desc="The transformed image", exists=True) - transformation = File( - desc="Output file for transformation parameters", exists=True) + transformation = File(desc="Output file for transformation parameters", exists=True) class scalartransform(SEMLikeCommandLine): @@ -68,7 +77,7 @@ class scalartransform(SEMLikeCommandLine): output_spec = scalartransformOutputSpec _cmd = " scalartransform " _outputs_filenames = { - 'output_image': 'output_image.nii', - 'transformation': 'transformation' + "output_image": "output_image.nii", + "transformation": "transformation", } _redirect_x = False diff --git a/nipype/interfaces/semtools/legacy/tests/test_auto_scalartransform.py b/nipype/interfaces/semtools/legacy/tests/test_auto_scalartransform.py index d28abe6cc6..248c98df3d 100644 --- a/nipype/interfaces/semtools/legacy/tests/test_auto_scalartransform.py +++ b/nipype/interfaces/semtools/legacy/tests/test_auto_scalartransform.py @@ -4,40 +4,26 @@ def test_scalartransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), - deformation=dict( - argstr='--deformation %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - h_field=dict(argstr='--h_field ', ), - input_image=dict( - argstr='--input_image %s', - extensions=None, - ), - interpolation=dict(argstr='--interpolation %s', ), - invert=dict(argstr='--invert ', ), - output_image=dict( - argstr='--output_image %s', - hash_files=False, - ), - transformation=dict( - argstr='--transformation %s', - hash_files=False, - ), + args=dict(argstr="%s",), + deformation=dict(argstr="--deformation %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + h_field=dict(argstr="--h_field ",), + input_image=dict(argstr="--input_image %s", extensions=None,), + interpolation=dict(argstr="--interpolation %s",), + invert=dict(argstr="--invert ",), + output_image=dict(argstr="--output_image %s", hash_files=False,), + transformation=dict(argstr="--transformation %s", hash_files=False,), ) inputs = scalartransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_scalartransform_outputs(): output_map = dict( - output_image=dict(extensions=None, ), - transformation=dict(extensions=None, ), + output_image=dict(extensions=None,), transformation=dict(extensions=None,), ) outputs = scalartransform.output_spec() diff --git a/nipype/interfaces/semtools/registration/__init__.py b/nipype/interfaces/semtools/registration/__init__.py index 3487d13134..6d52169ab3 100644 --- a/nipype/interfaces/semtools/registration/__init__.py +++ b/nipype/interfaces/semtools/registration/__init__.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- -from .specialized import (VBRAINSDemonWarp, BRAINSDemonWarp, - BRAINSTransformFromFiducials) +from .specialized import VBRAINSDemonWarp, BRAINSDemonWarp, BRAINSTransformFromFiducials from .brainsresample import BRAINSResample from .brainsfit import BRAINSFit from .brainsresize import BRAINSResize diff --git a/nipype/interfaces/semtools/registration/brainsfit.py b/nipype/interfaces/semtools/registration/brainsfit.py index 6142aac418..343edd2155 100644 --- a/nipype/interfaces/semtools/registration/brainsfit.py +++ b/nipype/interfaces/semtools/registration/brainsfit.py @@ -5,154 +5,163 @@ import os -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class BRAINSFitInputSpec(CommandLineInputSpec): fixedVolume = File( - desc= - "Input fixed image (the moving image will be transformed into this image space).", + desc="Input fixed image (the moving image will be transformed into this image space).", exists=True, - argstr="--fixedVolume %s") + argstr="--fixedVolume %s", + ) movingVolume = File( - desc= - "Input moving image (this image will be transformed into the fixed image space).", + desc="Input moving image (this image will be transformed into the fixed image space).", exists=True, - argstr="--movingVolume %s") + argstr="--movingVolume %s", + ) samplingPercentage = traits.Float( - desc= - "Fraction of voxels of the fixed image that will be used for registration. The number has to be larger than zero and less or equal to one. Higher values increase the computation time but may give more accurate results. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation. The default is 0.002 (use approximately 0.2% of voxels, resulting in 100000 samples in a 512x512x192 volume) to provide a very fast registration in most cases. Typical values range from 0.01 (1%) for low detail images to 0.2 (20%) for high detail images.", - argstr="--samplingPercentage %f") + desc="Fraction of voxels of the fixed image that will be used for registration. The number has to be larger than zero and less or equal to one. Higher values increase the computation time but may give more accurate results. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation. The default is 0.002 (use approximately 0.2% of voxels, resulting in 100000 samples in a 512x512x192 volume) to provide a very fast registration in most cases. Typical values range from 0.01 (1%) for low detail images to 0.2 (20%) for high detail images.", + argstr="--samplingPercentage %f", + ) splineGridSize = InputMultiPath( traits.Int, - desc= - "Number of BSpline grid subdivisions along each axis of the fixed image, centered on the image space. Values must be 3 or higher for the BSpline to be correctly computed.", + desc="Number of BSpline grid subdivisions along each axis of the fixed image, centered on the image space. Values must be 3 or higher for the BSpline to be correctly computed.", sep=",", - argstr="--splineGridSize %s") + argstr="--splineGridSize %s", + ) linearTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "(optional) Output estimated transform - in case the computed transform is not BSpline. NOTE: You must set at least one output object (transform and/or output volume).", - argstr="--linearTransform %s") + desc="(optional) Output estimated transform - in case the computed transform is not BSpline. NOTE: You must set at least one output object (transform and/or output volume).", + argstr="--linearTransform %s", + ) bsplineTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "(optional) Output estimated transform - in case the computed transform is BSpline. NOTE: You must set at least one output object (transform and/or output volume).", - argstr="--bsplineTransform %s") + desc="(optional) Output estimated transform - in case the computed transform is BSpline. NOTE: You must set at least one output object (transform and/or output volume).", + argstr="--bsplineTransform %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "(optional) Output image: the moving image warped to the fixed image space. NOTE: You must set at least one output object (transform and/or output volume).", - argstr="--outputVolume %s") + desc="(optional) Output image: the moving image warped to the fixed image space. NOTE: You must set at least one output object (transform and/or output volume).", + argstr="--outputVolume %s", + ) initialTransform = File( - desc= - "Transform to be applied to the moving image to initialize the registration. This can only be used if Initialize Transform Mode is Off.", + desc="Transform to be applied to the moving image to initialize the registration. This can only be used if Initialize Transform Mode is Off.", exists=True, - argstr="--initialTransform %s") + argstr="--initialTransform %s", + ) initializeTransformMode = traits.Enum( "Off", "useMomentsAlign", "useCenterOfHeadAlign", "useGeometryAlign", "useCenterOfROIAlign", - desc= - "Determine how to initialize the transform center. useMomentsAlign assumes that the center of mass of the images represent similar structures. useCenterOfHeadAlign attempts to use the top of head and shape of neck to drive a center of mass estimate. useGeometryAlign on assumes that the center of the voxel lattice of the images represent similar structures. Off assumes that the physical space of the images are close. This flag is mutually exclusive with the Initialization transform.", - argstr="--initializeTransformMode %s") + desc="Determine how to initialize the transform center. useMomentsAlign assumes that the center of mass of the images represent similar structures. useCenterOfHeadAlign attempts to use the top of head and shape of neck to drive a center of mass estimate. useGeometryAlign on assumes that the center of the voxel lattice of the images represent similar structures. Off assumes that the physical space of the images are close. This flag is mutually exclusive with the Initialization transform.", + argstr="--initializeTransformMode %s", + ) useRigid = traits.Bool( - desc= - "Perform a rigid registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", - argstr="--useRigid ") + desc="Perform a rigid registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", + argstr="--useRigid ", + ) useScaleVersor3D = traits.Bool( - desc= - "Perform a ScaleVersor3D registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", - argstr="--useScaleVersor3D ") + desc="Perform a ScaleVersor3D registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", + argstr="--useScaleVersor3D ", + ) useScaleSkewVersor3D = traits.Bool( - desc= - "Perform a ScaleSkewVersor3D registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", - argstr="--useScaleSkewVersor3D ") + desc="Perform a ScaleSkewVersor3D registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", + argstr="--useScaleSkewVersor3D ", + ) useAffine = traits.Bool( - desc= - "Perform an Affine registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", - argstr="--useAffine ") + desc="Perform an Affine registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", + argstr="--useAffine ", + ) useBSpline = traits.Bool( - desc= - "Perform a BSpline registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", - argstr="--useBSpline ") + desc="Perform a BSpline registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", + argstr="--useBSpline ", + ) useSyN = traits.Bool( - desc= - "Perform a SyN registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", - argstr="--useSyN ") + desc="Perform a SyN registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", + argstr="--useSyN ", + ) useComposite = traits.Bool( - desc= - "Perform a Composite registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", - argstr="--useComposite ") + desc="Perform a Composite registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", + argstr="--useComposite ", + ) maskProcessingMode = traits.Enum( "NOMASK", "ROIAUTO", "ROI", - desc= - "Specifies a mask to only consider a certain image region for the registration. If ROIAUTO is chosen, then the mask is computed using Otsu thresholding and hole filling. If ROI is chosen then the mask has to be specified as in input.", - argstr="--maskProcessingMode %s") + desc="Specifies a mask to only consider a certain image region for the registration. If ROIAUTO is chosen, then the mask is computed using Otsu thresholding and hole filling. If ROI is chosen then the mask has to be specified as in input.", + argstr="--maskProcessingMode %s", + ) fixedBinaryVolume = File( - desc= - "Fixed Image binary mask volume, required if Masking Option is ROI. Image areas where the mask volume has zero value are ignored during the registration.", + desc="Fixed Image binary mask volume, required if Masking Option is ROI. Image areas where the mask volume has zero value are ignored during the registration.", exists=True, - argstr="--fixedBinaryVolume %s") + argstr="--fixedBinaryVolume %s", + ) movingBinaryVolume = File( - desc= - "Moving Image binary mask volume, required if Masking Option is ROI. Image areas where the mask volume has zero value are ignored during the registration.", + desc="Moving Image binary mask volume, required if Masking Option is ROI. Image areas where the mask volume has zero value are ignored during the registration.", exists=True, - argstr="--movingBinaryVolume %s") + argstr="--movingBinaryVolume %s", + ) outputFixedVolumeROI = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "ROI that is automatically computed from the fixed image. Only available if Masking Option is ROIAUTO. Image areas where the mask volume has zero value are ignored during the registration.", - argstr="--outputFixedVolumeROI %s") + desc="ROI that is automatically computed from the fixed image. Only available if Masking Option is ROIAUTO. Image areas where the mask volume has zero value are ignored during the registration.", + argstr="--outputFixedVolumeROI %s", + ) outputMovingVolumeROI = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "ROI that is automatically computed from the moving image. Only available if Masking Option is ROIAUTO. Image areas where the mask volume has zero value are ignored during the registration.", - argstr="--outputMovingVolumeROI %s") + desc="ROI that is automatically computed from the moving image. Only available if Masking Option is ROIAUTO. Image areas where the mask volume has zero value are ignored during the registration.", + argstr="--outputMovingVolumeROI %s", + ) useROIBSpline = traits.Bool( - desc= - "If enabled then the bounding box of the input ROIs defines the BSpline grid support region. Otherwise the BSpline grid support region is the whole fixed image.", - argstr="--useROIBSpline ") + desc="If enabled then the bounding box of the input ROIs defines the BSpline grid support region. Otherwise the BSpline grid support region is the whole fixed image.", + argstr="--useROIBSpline ", + ) histogramMatch = traits.Bool( - desc= - "Apply histogram matching operation for the input images to make them more similar. This is suitable for images of the same modality that may have different brightness or contrast, but the same overall intensity profile. Do NOT use if registering images from different modalities.", - argstr="--histogramMatch ") + desc="Apply histogram matching operation for the input images to make them more similar. This is suitable for images of the same modality that may have different brightness or contrast, but the same overall intensity profile. Do NOT use if registering images from different modalities.", + argstr="--histogramMatch ", + ) medianFilterSize = InputMultiPath( traits.Int, - desc= - "Apply median filtering to reduce noise in the input volumes. The 3 values specify the radius for the optional MedianImageFilter preprocessing in all 3 directions (in voxels).", + desc="Apply median filtering to reduce noise in the input volumes. The 3 values specify the radius for the optional MedianImageFilter preprocessing in all 3 directions (in voxels).", sep=",", - argstr="--medianFilterSize %s") + argstr="--medianFilterSize %s", + ) removeIntensityOutliers = traits.Float( - desc= - "Remove very high and very low intensity voxels from the input volumes. The parameter specifies the half percentage to decide outliers of image intensities. The default value is zero, which means no outlier removal. If the value of 0.005 is given, the 0.005% of both tails will be thrown away, so 0.01% of intensities in total would be ignored in the statistic calculation.", - argstr="--removeIntensityOutliers %f") + desc="Remove very high and very low intensity voxels from the input volumes. The parameter specifies the half percentage to decide outliers of image intensities. The default value is zero, which means no outlier removal. If the value of 0.005 is given, the 0.005% of both tails will be thrown away, so 0.01% of intensities in total would be ignored in the statistic calculation.", + argstr="--removeIntensityOutliers %f", + ) fixedVolume2 = File( - desc= - "Input fixed image that will be used for multimodal registration. (the moving image will be transformed into this image space).", + desc="Input fixed image that will be used for multimodal registration. (the moving image will be transformed into this image space).", exists=True, - argstr="--fixedVolume2 %s") + argstr="--fixedVolume2 %s", + ) movingVolume2 = File( - desc= - "Input moving image that will be used for multimodal registration(this image will be transformed into the fixed image space).", + desc="Input moving image that will be used for multimodal registration(this image will be transformed into the fixed image space).", exists=True, - argstr="--movingVolume2 %s") + argstr="--movingVolume2 %s", + ) outputVolumePixelType = traits.Enum( "float", "short", @@ -161,15 +170,16 @@ class BRAINSFitInputSpec(CommandLineInputSpec): "uint", "uchar", desc="Data type for representing a voxel of the Output Volume.", - argstr="--outputVolumePixelType %s") + argstr="--outputVolumePixelType %s", + ) backgroundFillValue = traits.Float( - desc= - "This value will be used for filling those areas of the output image that have no corresponding voxels in the input moving image.", - argstr="--backgroundFillValue %f") + desc="This value will be used for filling those areas of the output image that have no corresponding voxels in the input moving image.", + argstr="--backgroundFillValue %f", + ) scaleOutputValues = traits.Bool( - desc= - "If true, and the voxel values do not fit within the minimum and maximum values of the desired outputVolumePixelType, then linearly scale the min/max output image voxel values to fit within the min/max range of the outputVolumePixelType.", - argstr="--scaleOutputValues ") + desc="If true, and the voxel values do not fit within the minimum and maximum values of the desired outputVolumePixelType, then linearly scale the min/max output image voxel values to fit within the min/max range of the outputVolumePixelType.", + argstr="--scaleOutputValues ", + ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", @@ -181,199 +191,200 @@ class BRAINSFitInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, NearestNeighbor, BSpline, WindowedSinc, Hamming, Cosine, Welch, Lanczos, or ResampleInPlace. The ResampleInPlace option will create an image with the same discrete voxel values and will adjust the origin and direction of the physical space interpretation.", - argstr="--interpolationMode %s") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, NearestNeighbor, BSpline, WindowedSinc, Hamming, Cosine, Welch, Lanczos, or ResampleInPlace. The ResampleInPlace option will create an image with the same discrete voxel values and will adjust the origin and direction of the physical space interpretation.", + argstr="--interpolationMode %s", + ) numberOfIterations = InputMultiPath( traits.Int, - desc= - "The maximum number of iterations to try before stopping the optimization. When using a lower value (500-1000) then the registration is forced to terminate earlier but there is a higher risk of stopping before an optimal solution is reached.", + desc="The maximum number of iterations to try before stopping the optimization. When using a lower value (500-1000) then the registration is forced to terminate earlier but there is a higher risk of stopping before an optimal solution is reached.", sep=",", - argstr="--numberOfIterations %s") + argstr="--numberOfIterations %s", + ) maximumStepLength = traits.Float( - desc= - "Starting step length of the optimizer. In general, higher values allow for recovering larger initial misalignments but there is an increased chance that the registration will not converge.", - argstr="--maximumStepLength %f") + desc="Starting step length of the optimizer. In general, higher values allow for recovering larger initial misalignments but there is an increased chance that the registration will not converge.", + argstr="--maximumStepLength %f", + ) minimumStepLength = InputMultiPath( traits.Float, - desc= - "Each step in the optimization takes steps at least this big. When none are possible, registration is complete. Smaller values allows the optimizer to make smaller adjustments, but the registration time may increase.", + desc="Each step in the optimization takes steps at least this big. When none are possible, registration is complete. Smaller values allows the optimizer to make smaller adjustments, but the registration time may increase.", sep=",", - argstr="--minimumStepLength %s") + argstr="--minimumStepLength %s", + ) relaxationFactor = traits.Float( - desc= - "Specifies how quickly the optimization step length is decreased during registration. The value must be larger than 0 and smaller than 1. Larger values result in slower step size decrease, which allow for recovering larger initial misalignments but it increases the registration time and the chance that the registration will not converge.", - argstr="--relaxationFactor %f") + desc="Specifies how quickly the optimization step length is decreased during registration. The value must be larger than 0 and smaller than 1. Larger values result in slower step size decrease, which allow for recovering larger initial misalignments but it increases the registration time and the chance that the registration will not converge.", + argstr="--relaxationFactor %f", + ) translationScale = traits.Float( - desc= - "How much to scale up changes in position (in mm) compared to unit rotational changes (in radians) -- decrease this to allow for more rotation in the search pattern.", - argstr="--translationScale %f") + desc="How much to scale up changes in position (in mm) compared to unit rotational changes (in radians) -- decrease this to allow for more rotation in the search pattern.", + argstr="--translationScale %f", + ) reproportionScale = traits.Float( - desc= - "ScaleVersor3D 'Scale' compensation factor. Increase this to allow for more rescaling in a ScaleVersor3D or ScaleSkewVersor3D search pattern. 1.0 works well with a translationScale of 1000.0", - argstr="--reproportionScale %f") + desc="ScaleVersor3D 'Scale' compensation factor. Increase this to allow for more rescaling in a ScaleVersor3D or ScaleSkewVersor3D search pattern. 1.0 works well with a translationScale of 1000.0", + argstr="--reproportionScale %f", + ) skewScale = traits.Float( - desc= - "ScaleSkewVersor3D Skew compensation factor. Increase this to allow for more skew in a ScaleSkewVersor3D search pattern. 1.0 works well with a translationScale of 1000.0", - argstr="--skewScale %f") + desc="ScaleSkewVersor3D Skew compensation factor. Increase this to allow for more skew in a ScaleSkewVersor3D search pattern. 1.0 works well with a translationScale of 1000.0", + argstr="--skewScale %f", + ) maxBSplineDisplacement = traits.Float( - desc= - "Maximum allowed displacements in image physical coordinates (mm) for BSpline control grid along each axis. A value of 0.0 indicates that the problem should be unbounded. NOTE: This only constrains the BSpline portion, and does not limit the displacement from the associated bulk transform. This can lead to a substantial reduction in computation time in the BSpline optimizer., ", - argstr="--maxBSplineDisplacement %f") + desc="Maximum allowed displacements in image physical coordinates (mm) for BSpline control grid along each axis. A value of 0.0 indicates that the problem should be unbounded. NOTE: This only constrains the BSpline portion, and does not limit the displacement from the associated bulk transform. This can lead to a substantial reduction in computation time in the BSpline optimizer., ", + argstr="--maxBSplineDisplacement %f", + ) fixedVolumeTimeIndex = traits.Int( - desc= - "The index in the time series for the 3D fixed image to fit. Only allowed if the fixed input volume is 4-dimensional.", - argstr="--fixedVolumeTimeIndex %d") + desc="The index in the time series for the 3D fixed image to fit. Only allowed if the fixed input volume is 4-dimensional.", + argstr="--fixedVolumeTimeIndex %d", + ) movingVolumeTimeIndex = traits.Int( - desc= - "The index in the time series for the 3D moving image to fit. Only allowed if the moving input volume is 4-dimensional", - argstr="--movingVolumeTimeIndex %d") + desc="The index in the time series for the 3D moving image to fit. Only allowed if the moving input volume is 4-dimensional", + argstr="--movingVolumeTimeIndex %d", + ) numberOfHistogramBins = traits.Int( - desc= - "The number of histogram levels used for mutual information metric estimation.", - argstr="--numberOfHistogramBins %d") + desc="The number of histogram levels used for mutual information metric estimation.", + argstr="--numberOfHistogramBins %d", + ) numberOfMatchPoints = traits.Int( - desc= - "Number of histogram match points used for mutual information metric estimation.", - argstr="--numberOfMatchPoints %d") + desc="Number of histogram match points used for mutual information metric estimation.", + argstr="--numberOfMatchPoints %d", + ) costMetric = traits.Enum( "MMI", "MSE", "NC", "MIH", - desc= - "The cost metric to be used during fitting. Defaults to MMI. Options are MMI (Mattes Mutual Information), MSE (Mean Square Error), NC (Normalized Correlation), MC (Match Cardinality for binary images)", - argstr="--costMetric %s") + desc="The cost metric to be used during fitting. Defaults to MMI. Options are MMI (Mattes Mutual Information), MSE (Mean Square Error), NC (Normalized Correlation), MC (Match Cardinality for binary images)", + argstr="--costMetric %s", + ) maskInferiorCutOffFromCenter = traits.Float( - desc= - "If Initialize Transform Mode is set to useCenterOfHeadAlign or Masking Option is ROIAUTO then this value defines the how much is cut of from the inferior part of the image. The cut-off distance is specified in millimeters, relative to the image center. If the value is 1000 or larger then no cut-off performed.", - argstr="--maskInferiorCutOffFromCenter %f") + desc="If Initialize Transform Mode is set to useCenterOfHeadAlign or Masking Option is ROIAUTO then this value defines the how much is cut of from the inferior part of the image. The cut-off distance is specified in millimeters, relative to the image center. If the value is 1000 or larger then no cut-off performed.", + argstr="--maskInferiorCutOffFromCenter %f", + ) ROIAutoDilateSize = traits.Float( - desc= - "This flag is only relevant when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. A setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.", - argstr="--ROIAutoDilateSize %f") + desc="This flag is only relevant when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. A setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.", + argstr="--ROIAutoDilateSize %f", + ) ROIAutoClosingSize = traits.Float( - desc= - "This flag is only relevant when using ROIAUTO mode for initializing masks. It defines the hole closing size in mm. It is rounded up to the nearest whole pixel size in each direction. The default is to use a closing size of 9mm. For mouse data this value may need to be reset to 0.9 or smaller.", - argstr="--ROIAutoClosingSize %f") + desc="This flag is only relevant when using ROIAUTO mode for initializing masks. It defines the hole closing size in mm. It is rounded up to the nearest whole pixel size in each direction. The default is to use a closing size of 9mm. For mouse data this value may need to be reset to 0.9 or smaller.", + argstr="--ROIAutoClosingSize %f", + ) numberOfSamples = traits.Int( - desc= - "The number of voxels sampled for mutual information computation. Increase this for higher accuracy, at the cost of longer computation time., NOTE that it is suggested to use samplingPercentage instead of this option. However, if set to non-zero, numberOfSamples overwrites the samplingPercentage option. ", - argstr="--numberOfSamples %d") + desc="The number of voxels sampled for mutual information computation. Increase this for higher accuracy, at the cost of longer computation time., NOTE that it is suggested to use samplingPercentage instead of this option. However, if set to non-zero, numberOfSamples overwrites the samplingPercentage option. ", + argstr="--numberOfSamples %d", + ) strippedOutputTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Rigid component of the estimated affine transform. Can be used to rigidly register the moving image to the fixed image. NOTE: This value is overridden if either bsplineTransform or linearTransform is set.", - argstr="--strippedOutputTransform %s") + desc="Rigid component of the estimated affine transform. Can be used to rigidly register the moving image to the fixed image. NOTE: This value is overridden if either bsplineTransform or linearTransform is set.", + argstr="--strippedOutputTransform %s", + ) transformType = InputMultiPath( traits.Str, - desc= - "Specifies a list of registration types to be used. The valid types are, Rigid, ScaleVersor3D, ScaleSkewVersor3D, Affine, BSpline and SyN. Specifying more than one in a comma separated list will initialize the next stage with the previous results. If registrationClass flag is used, it overrides this parameter setting.", + desc="Specifies a list of registration types to be used. The valid types are, Rigid, ScaleVersor3D, ScaleSkewVersor3D, Affine, BSpline and SyN. Specifying more than one in a comma separated list will initialize the next stage with the previous results. If registrationClass flag is used, it overrides this parameter setting.", sep=",", - argstr="--transformType %s") + argstr="--transformType %s", + ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "(optional) Filename to which save the (optional) estimated transform. NOTE: You must select either the outputTransform or the outputVolume option.", - argstr="--outputTransform %s") + desc="(optional) Filename to which save the (optional) estimated transform. NOTE: You must select either the outputTransform or the outputVolume option.", + argstr="--outputTransform %s", + ) initializeRegistrationByCurrentGenericTransform = traits.Bool( - desc= - "If this flag is ON, the current generic composite transform, resulted from the linear registration stages, is set to initialize the follow nonlinear registration process. However, by the default behaviour, the moving image is first warped based on the existant transform before it is passed to the BSpline registration filter. It is done to speed up the BSpline registration by reducing the computations of composite transform Jacobian.", - argstr="--initializeRegistrationByCurrentGenericTransform ") + desc="If this flag is ON, the current generic composite transform, resulted from the linear registration stages, is set to initialize the follow nonlinear registration process. However, by the default behaviour, the moving image is first warped based on the existant transform before it is passed to the BSpline registration filter. It is done to speed up the BSpline registration by reducing the computations of composite transform Jacobian.", + argstr="--initializeRegistrationByCurrentGenericTransform ", + ) failureExitCode = traits.Int( - desc= - "If the fit fails, exit with this status code. (It can be used to force a successfult exit status of (0) if the registration fails due to reaching the maximum number of iterations.", - argstr="--failureExitCode %d") + desc="If the fit fails, exit with this status code. (It can be used to force a successfult exit status of (0) if the registration fails due to reaching the maximum number of iterations.", + argstr="--failureExitCode %d", + ) writeTransformOnFailure = traits.Bool( - desc= - "Flag to save the final transform even if the numberOfIterations are reached without convergence. (Intended for use when --failureExitCode 0 )", - argstr="--writeTransformOnFailure ") + desc="Flag to save the final transform even if the numberOfIterations are reached without convergence. (Intended for use when --failureExitCode 0 )", + argstr="--writeTransformOnFailure ", + ) numberOfThreads = traits.Int( - desc= - "Explicitly specify the maximum number of threads to use. (default is auto-detected)", - argstr="--numberOfThreads %d") + desc="Explicitly specify the maximum number of threads to use. (default is auto-detected)", + argstr="--numberOfThreads %d", + ) debugLevel = traits.Int( - desc= - "Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", - argstr="--debugLevel %d") + desc="Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", + argstr="--debugLevel %d", + ) costFunctionConvergenceFactor = traits.Float( - desc= - "From itkLBFGSBOptimizer.h: Set/Get the CostFunctionConvergenceFactor. Algorithm terminates when the reduction in cost function is less than (factor * epsmcj) where epsmch is the machine precision. Typical values for factor: 1e+12 for low accuracy; 1e+7 for moderate accuracy and 1e+1 for extremely high accuracy. 1e+9 seems to work well., ", - argstr="--costFunctionConvergenceFactor %f") + desc="From itkLBFGSBOptimizer.h: Set/Get the CostFunctionConvergenceFactor. Algorithm terminates when the reduction in cost function is less than (factor * epsmcj) where epsmch is the machine precision. Typical values for factor: 1e+12 for low accuracy; 1e+7 for moderate accuracy and 1e+1 for extremely high accuracy. 1e+9 seems to work well., ", + argstr="--costFunctionConvergenceFactor %f", + ) projectedGradientTolerance = traits.Float( - desc= - "From itkLBFGSBOptimizer.h: Set/Get the ProjectedGradientTolerance. Algorithm terminates when the project gradient is below the tolerance. Default lbfgsb value is 1e-5, but 1e-4 seems to work well., ", - argstr="--projectedGradientTolerance %f") + desc="From itkLBFGSBOptimizer.h: Set/Get the ProjectedGradientTolerance. Algorithm terminates when the project gradient is below the tolerance. Default lbfgsb value is 1e-5, but 1e-4 seems to work well., ", + argstr="--projectedGradientTolerance %f", + ) maximumNumberOfEvaluations = traits.Int( - desc= - "Maximum number of evaluations for line search in lbfgsb optimizer.", - argstr="--maximumNumberOfEvaluations %d") + desc="Maximum number of evaluations for line search in lbfgsb optimizer.", + argstr="--maximumNumberOfEvaluations %d", + ) maximumNumberOfCorrections = traits.Int( desc="Maximum number of corrections in lbfgsb optimizer.", - argstr="--maximumNumberOfCorrections %d") + argstr="--maximumNumberOfCorrections %d", + ) gui = traits.Bool( - desc= - "Display intermediate image volumes for debugging. NOTE: This is not part of the standard build sytem, and probably does nothing on your installation.", - argstr="--gui ") + desc="Display intermediate image volumes for debugging. NOTE: This is not part of the standard build sytem, and probably does nothing on your installation.", + argstr="--gui ", + ) promptUser = traits.Bool( - desc= - "Prompt the user to hit enter each time an image is sent to the DebugImageViewer", - argstr="--promptUser ") + desc="Prompt the user to hit enter each time an image is sent to the DebugImageViewer", + argstr="--promptUser ", + ) metricSamplingStrategy = traits.Enum( "Random", - desc= - "It defines the method that registration filter uses to sample the input fixed image. Only Random is supported for now.", - argstr="--metricSamplingStrategy %s") + desc="It defines the method that registration filter uses to sample the input fixed image. Only Random is supported for now.", + argstr="--metricSamplingStrategy %s", + ) logFileReport = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "A file to write out final information report in CSV file: MetricName,MetricValue,FixedImageName,FixedMaskName,MovingImageName,MovingMaskName", - argstr="--logFileReport %s") + desc="A file to write out final information report in CSV file: MetricName,MetricValue,FixedImageName,FixedMaskName,MovingImageName,MovingMaskName", + argstr="--logFileReport %s", + ) writeOutputTransformInFloat = traits.Bool( - desc= - "By default, the output registration transforms (either the output composite transform or each transform component) are written to the disk in double precision. If this flag is ON, the output transforms will be written in single (float) precision. It is especially important if the output transform is a displacement field transform, or it is a composite transform that includes several displacement fields.", - argstr="--writeOutputTransformInFloat ") + desc="By default, the output registration transforms (either the output composite transform or each transform component) are written to the disk in double precision. If this flag is ON, the output transforms will be written in single (float) precision. It is especially important if the output transform is a displacement field transform, or it is a composite transform that includes several displacement fields.", + argstr="--writeOutputTransformInFloat ", + ) class BRAINSFitOutputSpec(TraitedSpec): linearTransform = File( - desc= - "(optional) Output estimated transform - in case the computed transform is not BSpline. NOTE: You must set at least one output object (transform and/or output volume).", - exists=True) + desc="(optional) Output estimated transform - in case the computed transform is not BSpline. NOTE: You must set at least one output object (transform and/or output volume).", + exists=True, + ) bsplineTransform = File( - desc= - "(optional) Output estimated transform - in case the computed transform is BSpline. NOTE: You must set at least one output object (transform and/or output volume).", - exists=True) + desc="(optional) Output estimated transform - in case the computed transform is BSpline. NOTE: You must set at least one output object (transform and/or output volume).", + exists=True, + ) outputVolume = File( - desc= - "(optional) Output image: the moving image warped to the fixed image space. NOTE: You must set at least one output object (transform and/or output volume).", - exists=True) + desc="(optional) Output image: the moving image warped to the fixed image space. NOTE: You must set at least one output object (transform and/or output volume).", + exists=True, + ) outputFixedVolumeROI = File( - desc= - "ROI that is automatically computed from the fixed image. Only available if Masking Option is ROIAUTO. Image areas where the mask volume has zero value are ignored during the registration.", - exists=True) + desc="ROI that is automatically computed from the fixed image. Only available if Masking Option is ROIAUTO. Image areas where the mask volume has zero value are ignored during the registration.", + exists=True, + ) outputMovingVolumeROI = File( - desc= - "ROI that is automatically computed from the moving image. Only available if Masking Option is ROIAUTO. Image areas where the mask volume has zero value are ignored during the registration.", - exists=True) + desc="ROI that is automatically computed from the moving image. Only available if Masking Option is ROIAUTO. Image areas where the mask volume has zero value are ignored during the registration.", + exists=True, + ) strippedOutputTransform = File( - desc= - "Rigid component of the estimated affine transform. Can be used to rigidly register the moving image to the fixed image. NOTE: This value is overridden if either bsplineTransform or linearTransform is set.", - exists=True) + desc="Rigid component of the estimated affine transform. Can be used to rigidly register the moving image to the fixed image. NOTE: This value is overridden if either bsplineTransform or linearTransform is set.", + exists=True, + ) outputTransform = File( - desc= - "(optional) Filename to which save the (optional) estimated transform. NOTE: You must select either the outputTransform or the outputVolume option.", - exists=True) + desc="(optional) Filename to which save the (optional) estimated transform. NOTE: You must select either the outputTransform or the outputVolume option.", + exists=True, + ) logFileReport = File( - desc= - "A file to write out final information report in CSV file: MetricName,MetricValue,FixedImageName,FixedMaskName,MovingImageName,MovingMaskName", - exists=True) + desc="A file to write out final information report in CSV file: MetricName,MetricValue,FixedImageName,FixedMaskName,MovingImageName,MovingMaskName", + exists=True, + ) class BRAINSFit(SEMLikeCommandLine): @@ -399,13 +410,13 @@ class BRAINSFit(SEMLikeCommandLine): output_spec = BRAINSFitOutputSpec _cmd = " BRAINSFit " _outputs_filenames = { - 'outputVolume': 'outputVolume.nii', - 'bsplineTransform': 'bsplineTransform.h5', - 'outputTransform': 'outputTransform.h5', - 'outputFixedVolumeROI': 'outputFixedVolumeROI.nii', - 'strippedOutputTransform': 'strippedOutputTransform.h5', - 'outputMovingVolumeROI': 'outputMovingVolumeROI.nii', - 'linearTransform': 'linearTransform.h5', - 'logFileReport': 'logFileReport' + "outputVolume": "outputVolume.nii", + "bsplineTransform": "bsplineTransform.h5", + "outputTransform": "outputTransform.h5", + "outputFixedVolumeROI": "outputFixedVolumeROI.nii", + "strippedOutputTransform": "strippedOutputTransform.h5", + "outputMovingVolumeROI": "outputMovingVolumeROI.nii", + "linearTransform": "linearTransform.h5", + "logFileReport": "logFileReport", } _redirect_x = False diff --git a/nipype/interfaces/semtools/registration/brainsresample.py b/nipype/interfaces/semtools/registration/brainsresample.py index f9ea80acbd..0eb6b5f29c 100644 --- a/nipype/interfaces/semtools/registration/brainsresample.py +++ b/nipype/interfaces/semtools/registration/brainsresample.py @@ -5,25 +5,34 @@ import os -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class BRAINSResampleInputSpec(CommandLineInputSpec): - inputVolume = File( - desc="Image To Warp", exists=True, argstr="--inputVolume %s") + inputVolume = File(desc="Image To Warp", exists=True, argstr="--inputVolume %s") referenceVolume = File( - desc= - "Reference image used only to define the output space. If not specified, the warping is done in the same space as the image to warp.", + desc="Reference image used only to define the output space. If not specified, the warping is done in the same space as the image to warp.", exists=True, - argstr="--referenceVolume %s") + argstr="--referenceVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Resulting deformed image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) pixelType = traits.Enum( "float", "short", @@ -32,19 +41,19 @@ class BRAINSResampleInputSpec(CommandLineInputSpec): "uint", "uchar", "binary", - desc= - "Specifies the pixel type for the input/output images. The \'binary\' pixel type uses a modified algorithm whereby the image is read in as unsigned char, a signed distance map is created, signed distance map is resampled, and then a thresholded image of type unsigned char is written to disk.", - argstr="--pixelType %s") + desc="Specifies the pixel type for the input/output images. The 'binary' pixel type uses a modified algorithm whereby the image is read in as unsigned char, a signed distance map is created, signed distance map is resampled, and then a thresholded image of type unsigned char is written to disk.", + argstr="--pixelType %s", + ) deformationVolume = File( - desc= - "Displacement Field to be used to warp the image (ITKv3 or earlier)", + desc="Displacement Field to be used to warp the image (ITKv3 or earlier)", exists=True, - argstr="--deformationVolume %s") + argstr="--deformationVolume %s", + ) warpTransform = File( - desc= - "Filename for the BRAINSFit transform (ITKv3 or earlier) or composite transform file (ITKv4)", + desc="Filename for the BRAINSFit transform (ITKv3 or earlier) or composite transform file (ITKv4)", exists=True, - argstr="--warpTransform %s") + argstr="--warpTransform %s", + ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", @@ -56,24 +65,24 @@ class BRAINSResampleInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", - argstr="--interpolationMode %s") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", + argstr="--interpolationMode %s", + ) inverseTransform = traits.Bool( - desc= - "True/False is to compute inverse of given transformation. Default is false", - argstr="--inverseTransform ") - defaultValue = traits.Float( - desc="Default voxel value", argstr="--defaultValue %f") + desc="True/False is to compute inverse of given transformation. Default is false", + argstr="--inverseTransform ", + ) + defaultValue = traits.Float(desc="Default voxel value", argstr="--defaultValue %f") gridSpacing = InputMultiPath( traits.Int, - desc= - "Add warped grid to output image to help show the deformation that occured with specified spacing. A spacing of 0 in a dimension indicates that grid lines should be rendered to fall exactly (i.e. do not allow displacements off that plane). This is useful for makeing a 2D image of grid lines from the 3D space", + desc="Add warped grid to output image to help show the deformation that occured with specified spacing. A spacing of 0 in a dimension indicates that grid lines should be rendered to fall exactly (i.e. do not allow displacements off that plane). This is useful for makeing a 2D image of grid lines from the 3D space", sep=",", - argstr="--gridSpacing %s") + argstr="--gridSpacing %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSResampleOutputSpec(TraitedSpec): @@ -102,5 +111,5 @@ class BRAINSResample(SEMLikeCommandLine): input_spec = BRAINSResampleInputSpec output_spec = BRAINSResampleOutputSpec _cmd = " BRAINSResample " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False diff --git a/nipype/interfaces/semtools/registration/brainsresize.py b/nipype/interfaces/semtools/registration/brainsresize.py index 11238dd914..a81ee204b5 100644 --- a/nipype/interfaces/semtools/registration/brainsresize.py +++ b/nipype/interfaces/semtools/registration/brainsresize.py @@ -5,20 +5,29 @@ import os -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class BRAINSResizeInputSpec(CommandLineInputSpec): - inputVolume = File( - desc="Image To Scale", exists=True, argstr="--inputVolume %s") + inputVolume = File(desc="Image To Scale", exists=True, argstr="--inputVolume %s") outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Resulting scaled image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) pixelType = traits.Enum( "float", "short", @@ -27,12 +36,12 @@ class BRAINSResizeInputSpec(CommandLineInputSpec): "uint", "uchar", "binary", - desc= - "Specifies the pixel type for the input/output images. The \'binary\' pixel type uses a modified algorithm whereby the image is read in as unsigned char, a signed distance map is created, signed distance map is resampled, and then a thresholded image of type unsigned char is written to disk.", - argstr="--pixelType %s") + desc="Specifies the pixel type for the input/output images. The 'binary' pixel type uses a modified algorithm whereby the image is read in as unsigned char, a signed distance map is created, signed distance map is resampled, and then a thresholded image of type unsigned char is written to disk.", + argstr="--pixelType %s", + ) scaleFactor = traits.Float( - desc="The scale factor for the image spacing.", - argstr="--scaleFactor %f") + desc="The scale factor for the image spacing.", argstr="--scaleFactor %f" + ) class BRAINSResizeOutputSpec(TraitedSpec): @@ -59,5 +68,5 @@ class BRAINSResize(SEMLikeCommandLine): input_spec = BRAINSResizeInputSpec output_spec = BRAINSResizeOutputSpec _cmd = " BRAINSResize " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False diff --git a/nipype/interfaces/semtools/registration/specialized.py b/nipype/interfaces/semtools/registration/specialized.py index 2cc08e3ec7..5a6daf5250 100644 --- a/nipype/interfaces/semtools/registration/specialized.py +++ b/nipype/interfaces/semtools/registration/specialized.py @@ -5,52 +5,63 @@ import os -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class VBRAINSDemonWarpInputSpec(CommandLineInputSpec): movingVolume = InputMultiPath( File(exists=True), desc="Required: input moving image", - argstr="--movingVolume %s...") + argstr="--movingVolume %s...", + ) fixedVolume = InputMultiPath( File(exists=True), desc="Required: input fixed (target) image", - argstr="--fixedVolume %s...") + argstr="--fixedVolume %s...", + ) inputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", - desc= - "Input volumes will be typecast to this format: float|short|ushort|int|uchar", - argstr="--inputPixelType %s") + desc="Input volumes will be typecast to this format: float|short|ushort|int|uchar", + argstr="--inputPixelType %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: output resampled moving image (will have the same physical space as the fixedVolume).", - argstr="--outputVolume %s") + desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", + argstr="--outputVolume %s", + ) outputDisplacementFieldVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Output deformation field vector image (will have the same physical space as the fixedVolume).", - argstr="--outputDisplacementFieldVolume %s") + desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", + argstr="--outputDisplacementFieldVolume %s", + ) outputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", - desc= - "outputVolume will be typecast to this format: float|short|ushort|int|uchar", - argstr="--outputPixelType %s") + desc="outputVolume will be typecast to this format: float|short|ushort|int|uchar", + argstr="--outputPixelType %s", + ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", @@ -62,171 +73,185 @@ class VBRAINSDemonWarpInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", - argstr="--interpolationMode %s") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", + argstr="--interpolationMode %s", + ) registrationFilterType = traits.Enum( "Demons", "FastSymmetricForces", "Diffeomorphic", "LogDemons", "SymmetricLogDemons", - desc= - "Registration Filter Type: Demons|FastSymmetricForces|Diffeomorphic|LogDemons|SymmetricLogDemons", - argstr="--registrationFilterType %s") + desc="Registration Filter Type: Demons|FastSymmetricForces|Diffeomorphic|LogDemons|SymmetricLogDemons", + argstr="--registrationFilterType %s", + ) smoothDisplacementFieldSigma = traits.Float( - desc= - "A gaussian smoothing value to be applied to the deformation feild at each iteration.", - argstr="--smoothDisplacementFieldSigma %f") + desc="A gaussian smoothing value to be applied to the deformation feild at each iteration.", + argstr="--smoothDisplacementFieldSigma %f", + ) numberOfPyramidLevels = traits.Int( - desc= - "Number of image pyramid levels to use in the multi-resolution registration.", - argstr="--numberOfPyramidLevels %d") + desc="Number of image pyramid levels to use in the multi-resolution registration.", + argstr="--numberOfPyramidLevels %d", + ) minimumFixedPyramid = InputMultiPath( traits.Int, - desc= - "The shrink factor for the first level of the fixed image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", + desc="The shrink factor for the first level of the fixed image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", - argstr="--minimumFixedPyramid %s") + argstr="--minimumFixedPyramid %s", + ) minimumMovingPyramid = InputMultiPath( traits.Int, - desc= - "The shrink factor for the first level of the moving image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", + desc="The shrink factor for the first level of the moving image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", - argstr="--minimumMovingPyramid %s") + argstr="--minimumMovingPyramid %s", + ) arrayOfPyramidLevelIterations = InputMultiPath( traits.Int, desc="The number of iterations for each pyramid level", sep=",", - argstr="--arrayOfPyramidLevelIterations %s") + argstr="--arrayOfPyramidLevelIterations %s", + ) histogramMatch = traits.Bool( - desc= - "Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile.", - argstr="--histogramMatch ") + desc="Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile.", + argstr="--histogramMatch ", + ) numberOfHistogramBins = traits.Int( - desc="The number of histogram levels", - argstr="--numberOfHistogramBins %d") + desc="The number of histogram levels", argstr="--numberOfHistogramBins %d" + ) numberOfMatchPoints = traits.Int( desc="The number of match points for histrogramMatch", - argstr="--numberOfMatchPoints %d") + argstr="--numberOfMatchPoints %d", + ) medianFilterSize = InputMultiPath( traits.Int, - desc= - "Median filter radius in all 3 directions. When images have a lot of salt and pepper noise, this step can improve the registration.", + desc="Median filter radius in all 3 directions. When images have a lot of salt and pepper noise, this step can improve the registration.", sep=",", - argstr="--medianFilterSize %s") + argstr="--medianFilterSize %s", + ) initializeWithDisplacementField = File( desc="Initial deformation field vector image file name", exists=True, - argstr="--initializeWithDisplacementField %s") + argstr="--initializeWithDisplacementField %s", + ) initializeWithTransform = File( desc="Initial Transform filename", exists=True, - argstr="--initializeWithTransform %s") + argstr="--initializeWithTransform %s", + ) makeBOBF = traits.Bool( - desc= - "Flag to make Brain-Only Background-Filled versions of the input and target volumes.", - argstr="--makeBOBF ") + desc="Flag to make Brain-Only Background-Filled versions of the input and target volumes.", + argstr="--makeBOBF ", + ) fixedBinaryVolume = File( desc="Mask filename for desired region of interest in the Fixed image.", exists=True, - argstr="--fixedBinaryVolume %s") + argstr="--fixedBinaryVolume %s", + ) movingBinaryVolume = File( - desc= - "Mask filename for desired region of interest in the Moving image.", + desc="Mask filename for desired region of interest in the Moving image.", exists=True, - argstr="--movingBinaryVolume %s") + argstr="--movingBinaryVolume %s", + ) lowerThresholdForBOBF = traits.Int( - desc="Lower threshold for performing BOBF", - argstr="--lowerThresholdForBOBF %d") + desc="Lower threshold for performing BOBF", argstr="--lowerThresholdForBOBF %d" + ) upperThresholdForBOBF = traits.Int( - desc="Upper threshold for performing BOBF", - argstr="--upperThresholdForBOBF %d") + desc="Upper threshold for performing BOBF", argstr="--upperThresholdForBOBF %d" + ) backgroundFillValue = traits.Int( desc="Replacement value to overwrite background when performing BOBF", - argstr="--backgroundFillValue %d") + argstr="--backgroundFillValue %d", + ) seedForBOBF = InputMultiPath( traits.Int, desc="coordinates in all 3 directions for Seed when performing BOBF", sep=",", - argstr="--seedForBOBF %s") + argstr="--seedForBOBF %s", + ) neighborhoodForBOBF = InputMultiPath( traits.Int, - desc= - "neighborhood in all 3 directions to be included when performing BOBF", + desc="neighborhood in all 3 directions to be included when performing BOBF", sep=",", - argstr="--neighborhoodForBOBF %s") + argstr="--neighborhoodForBOBF %s", + ) outputDisplacementFieldPrefix = traits.Str( - desc= - "Displacement field filename prefix for writing separate x, y, and z component images", - argstr="--outputDisplacementFieldPrefix %s") + desc="Displacement field filename prefix for writing separate x, y, and z component images", + argstr="--outputDisplacementFieldPrefix %s", + ) outputCheckerboardVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", - argstr="--outputCheckerboardVolume %s") + desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", + argstr="--outputCheckerboardVolume %s", + ) checkerboardPatternSubdivisions = InputMultiPath( traits.Int, desc="Number of Checkerboard subdivisions in all 3 directions", sep=",", - argstr="--checkerboardPatternSubdivisions %s") + argstr="--checkerboardPatternSubdivisions %s", + ) outputNormalized = traits.Bool( - desc= - "Flag to warp and write the normalized images to output. In normalized images the image values are fit-scaled to be between 0 and the maximum storage type value.", - argstr="--outputNormalized ") + desc="Flag to warp and write the normalized images to output. In normalized images the image values are fit-scaled to be between 0 and the maximum storage type value.", + argstr="--outputNormalized ", + ) outputDebug = traits.Bool( - desc="Flag to write debugging images after each step.", - argstr="--outputDebug ") + desc="Flag to write debugging images after each step.", argstr="--outputDebug " + ) weightFactors = InputMultiPath( traits.Float, desc="Weight fatctors for each input images", sep=",", - argstr="--weightFactors %s") + argstr="--weightFactors %s", + ) gradient_type = traits.Enum( "0", "1", "2", - desc= - "Type of gradient used for computing the demons force (0 is symmetrized, 1 is fixed image, 2 is moving image)", - argstr="--gradient_type %s") + desc="Type of gradient used for computing the demons force (0 is symmetrized, 1 is fixed image, 2 is moving image)", + argstr="--gradient_type %s", + ) upFieldSmoothing = traits.Float( desc="Smoothing sigma for the update field at each iteration", - argstr="--upFieldSmoothing %f") + argstr="--upFieldSmoothing %f", + ) max_step_length = traits.Float( desc="Maximum length of an update vector (0: no restriction)", - argstr="--max_step_length %f") + argstr="--max_step_length %f", + ) use_vanilla_dem = traits.Bool( - desc="Run vanilla demons algorithm", argstr="--use_vanilla_dem ") + desc="Run vanilla demons algorithm", argstr="--use_vanilla_dem " + ) gui = traits.Bool( - desc="Display intermediate image volumes for debugging", - argstr="--gui ") + desc="Display intermediate image volumes for debugging", argstr="--gui " + ) promptUser = traits.Bool( - desc= - "Prompt the user to hit enter each time an image is sent to the DebugImageViewer", - argstr="--promptUser ") + desc="Prompt the user to hit enter each time an image is sent to the DebugImageViewer", + argstr="--promptUser ", + ) numberOfBCHApproximationTerms = traits.Int( desc="Number of terms in the BCH expansion", - argstr="--numberOfBCHApproximationTerms %d") + argstr="--numberOfBCHApproximationTerms %d", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class VBRAINSDemonWarpOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: output resampled moving image (will have the same physical space as the fixedVolume).", - exists=True) + desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", + exists=True, + ) outputDisplacementFieldVolume = File( - desc= - "Output deformation field vector image (will have the same physical space as the fixedVolume).", - exists=True) + desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", + exists=True, + ) outputCheckerboardVolume = File( - desc= - "Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", - exists=True) + desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", + exists=True, + ) class VBRAINSDemonWarp(SEMLikeCommandLine): @@ -252,54 +277,54 @@ class VBRAINSDemonWarp(SEMLikeCommandLine): output_spec = VBRAINSDemonWarpOutputSpec _cmd = " VBRAINSDemonWarp " _outputs_filenames = { - 'outputVolume': 'outputVolume.nii', - 'outputCheckerboardVolume': 'outputCheckerboardVolume.nii', - 'outputDisplacementFieldVolume': 'outputDisplacementFieldVolume.nrrd' + "outputVolume": "outputVolume.nii", + "outputCheckerboardVolume": "outputCheckerboardVolume.nii", + "outputDisplacementFieldVolume": "outputDisplacementFieldVolume.nrrd", } _redirect_x = False class BRAINSDemonWarpInputSpec(CommandLineInputSpec): movingVolume = File( - desc="Required: input moving image", - exists=True, - argstr="--movingVolume %s") + desc="Required: input moving image", exists=True, argstr="--movingVolume %s" + ) fixedVolume = File( desc="Required: input fixed (target) image", exists=True, - argstr="--fixedVolume %s") + argstr="--fixedVolume %s", + ) inputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", - desc= - "Input volumes will be typecast to this format: float|short|ushort|int|uchar", - argstr="--inputPixelType %s") + desc="Input volumes will be typecast to this format: float|short|ushort|int|uchar", + argstr="--inputPixelType %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: output resampled moving image (will have the same physical space as the fixedVolume).", - argstr="--outputVolume %s") + desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", + argstr="--outputVolume %s", + ) outputDisplacementFieldVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Output deformation field vector image (will have the same physical space as the fixedVolume).", - argstr="--outputDisplacementFieldVolume %s") + desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", + argstr="--outputDisplacementFieldVolume %s", + ) outputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", - desc= - "outputVolume will be typecast to this format: float|short|ushort|int|uchar", - argstr="--outputPixelType %s") + desc="outputVolume will be typecast to this format: float|short|ushort|int|uchar", + argstr="--outputPixelType %s", + ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", @@ -311,168 +336,181 @@ class BRAINSDemonWarpInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", - argstr="--interpolationMode %s") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", + argstr="--interpolationMode %s", + ) registrationFilterType = traits.Enum( "Demons", "FastSymmetricForces", "Diffeomorphic", - desc= - "Registration Filter Type: Demons|FastSymmetricForces|Diffeomorphic", - argstr="--registrationFilterType %s") + desc="Registration Filter Type: Demons|FastSymmetricForces|Diffeomorphic", + argstr="--registrationFilterType %s", + ) smoothDisplacementFieldSigma = traits.Float( - desc= - "A gaussian smoothing value to be applied to the deformation feild at each iteration.", - argstr="--smoothDisplacementFieldSigma %f") + desc="A gaussian smoothing value to be applied to the deformation feild at each iteration.", + argstr="--smoothDisplacementFieldSigma %f", + ) numberOfPyramidLevels = traits.Int( - desc= - "Number of image pyramid levels to use in the multi-resolution registration.", - argstr="--numberOfPyramidLevels %d") + desc="Number of image pyramid levels to use in the multi-resolution registration.", + argstr="--numberOfPyramidLevels %d", + ) minimumFixedPyramid = InputMultiPath( traits.Int, - desc= - "The shrink factor for the first level of the fixed image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", + desc="The shrink factor for the first level of the fixed image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", - argstr="--minimumFixedPyramid %s") + argstr="--minimumFixedPyramid %s", + ) minimumMovingPyramid = InputMultiPath( traits.Int, - desc= - "The shrink factor for the first level of the moving image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", + desc="The shrink factor for the first level of the moving image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", - argstr="--minimumMovingPyramid %s") + argstr="--minimumMovingPyramid %s", + ) arrayOfPyramidLevelIterations = InputMultiPath( traits.Int, desc="The number of iterations for each pyramid level", sep=",", - argstr="--arrayOfPyramidLevelIterations %s") + argstr="--arrayOfPyramidLevelIterations %s", + ) histogramMatch = traits.Bool( - desc= - "Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile.", - argstr="--histogramMatch ") + desc="Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile.", + argstr="--histogramMatch ", + ) numberOfHistogramBins = traits.Int( - desc="The number of histogram levels", - argstr="--numberOfHistogramBins %d") + desc="The number of histogram levels", argstr="--numberOfHistogramBins %d" + ) numberOfMatchPoints = traits.Int( desc="The number of match points for histrogramMatch", - argstr="--numberOfMatchPoints %d") + argstr="--numberOfMatchPoints %d", + ) medianFilterSize = InputMultiPath( traits.Int, - desc= - "Median filter radius in all 3 directions. When images have a lot of salt and pepper noise, this step can improve the registration.", + desc="Median filter radius in all 3 directions. When images have a lot of salt and pepper noise, this step can improve the registration.", sep=",", - argstr="--medianFilterSize %s") + argstr="--medianFilterSize %s", + ) initializeWithDisplacementField = File( desc="Initial deformation field vector image file name", exists=True, - argstr="--initializeWithDisplacementField %s") + argstr="--initializeWithDisplacementField %s", + ) initializeWithTransform = File( desc="Initial Transform filename", exists=True, - argstr="--initializeWithTransform %s") + argstr="--initializeWithTransform %s", + ) maskProcessingMode = traits.Enum( "NOMASK", "ROIAUTO", "ROI", "BOBF", - desc= - "What mode to use for using the masks: NOMASK|ROIAUTO|ROI|BOBF. If ROIAUTO is choosen, then the mask is implicitly defined using a otsu forground and hole filling algorithm. Where the Region Of Interest mode uses the masks to define what parts of the image should be used for computing the deformation field. Brain Only Background Fill uses the masks to pre-process the input images by clipping and filling in the background with a predefined value.", - argstr="--maskProcessingMode %s") + desc="What mode to use for using the masks: NOMASK|ROIAUTO|ROI|BOBF. If ROIAUTO is choosen, then the mask is implicitly defined using a otsu forground and hole filling algorithm. Where the Region Of Interest mode uses the masks to define what parts of the image should be used for computing the deformation field. Brain Only Background Fill uses the masks to pre-process the input images by clipping and filling in the background with a predefined value.", + argstr="--maskProcessingMode %s", + ) fixedBinaryVolume = File( desc="Mask filename for desired region of interest in the Fixed image.", exists=True, - argstr="--fixedBinaryVolume %s") + argstr="--fixedBinaryVolume %s", + ) movingBinaryVolume = File( - desc= - "Mask filename for desired region of interest in the Moving image.", + desc="Mask filename for desired region of interest in the Moving image.", exists=True, - argstr="--movingBinaryVolume %s") + argstr="--movingBinaryVolume %s", + ) lowerThresholdForBOBF = traits.Int( - desc="Lower threshold for performing BOBF", - argstr="--lowerThresholdForBOBF %d") + desc="Lower threshold for performing BOBF", argstr="--lowerThresholdForBOBF %d" + ) upperThresholdForBOBF = traits.Int( - desc="Upper threshold for performing BOBF", - argstr="--upperThresholdForBOBF %d") + desc="Upper threshold for performing BOBF", argstr="--upperThresholdForBOBF %d" + ) backgroundFillValue = traits.Int( desc="Replacement value to overwrite background when performing BOBF", - argstr="--backgroundFillValue %d") + argstr="--backgroundFillValue %d", + ) seedForBOBF = InputMultiPath( traits.Int, desc="coordinates in all 3 directions for Seed when performing BOBF", sep=",", - argstr="--seedForBOBF %s") + argstr="--seedForBOBF %s", + ) neighborhoodForBOBF = InputMultiPath( traits.Int, - desc= - "neighborhood in all 3 directions to be included when performing BOBF", + desc="neighborhood in all 3 directions to be included when performing BOBF", sep=",", - argstr="--neighborhoodForBOBF %s") + argstr="--neighborhoodForBOBF %s", + ) outputDisplacementFieldPrefix = traits.Str( - desc= - "Displacement field filename prefix for writing separate x, y, and z component images", - argstr="--outputDisplacementFieldPrefix %s") + desc="Displacement field filename prefix for writing separate x, y, and z component images", + argstr="--outputDisplacementFieldPrefix %s", + ) outputCheckerboardVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", - argstr="--outputCheckerboardVolume %s") + desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", + argstr="--outputCheckerboardVolume %s", + ) checkerboardPatternSubdivisions = InputMultiPath( traits.Int, desc="Number of Checkerboard subdivisions in all 3 directions", sep=",", - argstr="--checkerboardPatternSubdivisions %s") + argstr="--checkerboardPatternSubdivisions %s", + ) outputNormalized = traits.Bool( - desc= - "Flag to warp and write the normalized images to output. In normalized images the image values are fit-scaled to be between 0 and the maximum storage type value.", - argstr="--outputNormalized ") + desc="Flag to warp and write the normalized images to output. In normalized images the image values are fit-scaled to be between 0 and the maximum storage type value.", + argstr="--outputNormalized ", + ) outputDebug = traits.Bool( - desc="Flag to write debugging images after each step.", - argstr="--outputDebug ") + desc="Flag to write debugging images after each step.", argstr="--outputDebug " + ) gradient_type = traits.Enum( "0", "1", "2", - desc= - "Type of gradient used for computing the demons force (0 is symmetrized, 1 is fixed image, 2 is moving image)", - argstr="--gradient_type %s") + desc="Type of gradient used for computing the demons force (0 is symmetrized, 1 is fixed image, 2 is moving image)", + argstr="--gradient_type %s", + ) upFieldSmoothing = traits.Float( desc="Smoothing sigma for the update field at each iteration", - argstr="--upFieldSmoothing %f") + argstr="--upFieldSmoothing %f", + ) max_step_length = traits.Float( desc="Maximum length of an update vector (0: no restriction)", - argstr="--max_step_length %f") + argstr="--max_step_length %f", + ) use_vanilla_dem = traits.Bool( - desc="Run vanilla demons algorithm", argstr="--use_vanilla_dem ") + desc="Run vanilla demons algorithm", argstr="--use_vanilla_dem " + ) gui = traits.Bool( - desc="Display intermediate image volumes for debugging", - argstr="--gui ") + desc="Display intermediate image volumes for debugging", argstr="--gui " + ) promptUser = traits.Bool( - desc= - "Prompt the user to hit enter each time an image is sent to the DebugImageViewer", - argstr="--promptUser ") + desc="Prompt the user to hit enter each time an image is sent to the DebugImageViewer", + argstr="--promptUser ", + ) numberOfBCHApproximationTerms = traits.Int( desc="Number of terms in the BCH expansion", - argstr="--numberOfBCHApproximationTerms %d") + argstr="--numberOfBCHApproximationTerms %d", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSDemonWarpOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: output resampled moving image (will have the same physical space as the fixedVolume).", - exists=True) + desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", + exists=True, + ) outputDisplacementFieldVolume = File( - desc= - "Output deformation field vector image (will have the same physical space as the fixedVolume).", - exists=True) + desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", + exists=True, + ) outputCheckerboardVolume = File( - desc= - "Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", - exists=True) + desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", + exists=True, + ) class BRAINSDemonWarp(SEMLikeCommandLine): @@ -498,9 +536,9 @@ class BRAINSDemonWarp(SEMLikeCommandLine): output_spec = BRAINSDemonWarpOutputSpec _cmd = " BRAINSDemonWarp " _outputs_filenames = { - 'outputVolume': 'outputVolume.nii', - 'outputCheckerboardVolume': 'outputCheckerboardVolume.nii', - 'outputDisplacementFieldVolume': 'outputDisplacementFieldVolume.nrrd' + "outputVolume": "outputVolume.nii", + "outputCheckerboardVolume": "outputCheckerboardVolume.nii", + "outputDisplacementFieldVolume": "outputDisplacementFieldVolume.nrrd", } _redirect_x = False @@ -509,39 +547,47 @@ class BRAINSTransformFromFiducialsInputSpec(CommandLineInputSpec): fixedLandmarks = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="Ordered list of landmarks in the fixed image", - argstr="--fixedLandmarks %s...") + argstr="--fixedLandmarks %s...", + ) movingLandmarks = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="Ordered list of landmarks in the moving image", - argstr="--movingLandmarks %s...") + argstr="--movingLandmarks %s...", + ) saveTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Save the transform that results from registration", - argstr="--saveTransform %s") + argstr="--saveTransform %s", + ) transformType = traits.Enum( "Translation", "Rigid", "Similarity", desc="Type of transform to produce", - argstr="--transformType %s") + argstr="--transformType %s", + ) fixedLandmarksFile = File( desc="An fcsv formatted file with a list of landmark points.", exists=True, - argstr="--fixedLandmarksFile %s") + argstr="--fixedLandmarksFile %s", + ) movingLandmarksFile = File( desc="An fcsv formatted file with a list of landmark points.", exists=True, - argstr="--movingLandmarksFile %s") + argstr="--movingLandmarksFile %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSTransformFromFiducialsOutputSpec(TraitedSpec): saveTransform = File( - desc="Save the transform that results from registration", exists=True) + desc="Save the transform that results from registration", exists=True + ) class BRAINSTransformFromFiducials(SEMLikeCommandLine): @@ -564,5 +610,5 @@ class BRAINSTransformFromFiducials(SEMLikeCommandLine): input_spec = BRAINSTransformFromFiducialsInputSpec output_spec = BRAINSTransformFromFiducialsOutputSpec _cmd = " BRAINSTransformFromFiducials " - _outputs_filenames = {'saveTransform': 'saveTransform.h5'} + _outputs_filenames = {"saveTransform": "saveTransform.h5"} _redirect_x = False diff --git a/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSDemonWarp.py b/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSDemonWarp.py index efe6d50fce..4136c8105d 100644 --- a/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSDemonWarp.py +++ b/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSDemonWarp.py @@ -4,113 +4,77 @@ def test_BRAINSDemonWarp_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), arrayOfPyramidLevelIterations=dict( - argstr='--arrayOfPyramidLevelIterations %s', - sep=',', + argstr="--arrayOfPyramidLevelIterations %s", sep=",", ), - backgroundFillValue=dict(argstr='--backgroundFillValue %d', ), + backgroundFillValue=dict(argstr="--backgroundFillValue %d",), checkerboardPatternSubdivisions=dict( - argstr='--checkerboardPatternSubdivisions %s', - sep=',', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixedBinaryVolume=dict( - argstr='--fixedBinaryVolume %s', - extensions=None, - ), - fixedVolume=dict( - argstr='--fixedVolume %s', - extensions=None, - ), - gradient_type=dict(argstr='--gradient_type %s', ), - gui=dict(argstr='--gui ', ), - histogramMatch=dict(argstr='--histogramMatch ', ), + argstr="--checkerboardPatternSubdivisions %s", sep=",", + ), + environ=dict(nohash=True, usedefault=True,), + fixedBinaryVolume=dict(argstr="--fixedBinaryVolume %s", extensions=None,), + fixedVolume=dict(argstr="--fixedVolume %s", extensions=None,), + gradient_type=dict(argstr="--gradient_type %s",), + gui=dict(argstr="--gui ",), + histogramMatch=dict(argstr="--histogramMatch ",), initializeWithDisplacementField=dict( - argstr='--initializeWithDisplacementField %s', - extensions=None, + argstr="--initializeWithDisplacementField %s", extensions=None, ), initializeWithTransform=dict( - argstr='--initializeWithTransform %s', - extensions=None, - ), - inputPixelType=dict(argstr='--inputPixelType %s', ), - interpolationMode=dict(argstr='--interpolationMode %s', ), - lowerThresholdForBOBF=dict(argstr='--lowerThresholdForBOBF %d', ), - maskProcessingMode=dict(argstr='--maskProcessingMode %s', ), - max_step_length=dict(argstr='--max_step_length %f', ), - medianFilterSize=dict( - argstr='--medianFilterSize %s', - sep=',', - ), - minimumFixedPyramid=dict( - argstr='--minimumFixedPyramid %s', - sep=',', - ), - minimumMovingPyramid=dict( - argstr='--minimumMovingPyramid %s', - sep=',', - ), - movingBinaryVolume=dict( - argstr='--movingBinaryVolume %s', - extensions=None, - ), - movingVolume=dict( - argstr='--movingVolume %s', - extensions=None, - ), - neighborhoodForBOBF=dict( - argstr='--neighborhoodForBOBF %s', - sep=',', - ), + argstr="--initializeWithTransform %s", extensions=None, + ), + inputPixelType=dict(argstr="--inputPixelType %s",), + interpolationMode=dict(argstr="--interpolationMode %s",), + lowerThresholdForBOBF=dict(argstr="--lowerThresholdForBOBF %d",), + maskProcessingMode=dict(argstr="--maskProcessingMode %s",), + max_step_length=dict(argstr="--max_step_length %f",), + medianFilterSize=dict(argstr="--medianFilterSize %s", sep=",",), + minimumFixedPyramid=dict(argstr="--minimumFixedPyramid %s", sep=",",), + minimumMovingPyramid=dict(argstr="--minimumMovingPyramid %s", sep=",",), + movingBinaryVolume=dict(argstr="--movingBinaryVolume %s", extensions=None,), + movingVolume=dict(argstr="--movingVolume %s", extensions=None,), + neighborhoodForBOBF=dict(argstr="--neighborhoodForBOBF %s", sep=",",), numberOfBCHApproximationTerms=dict( - argstr='--numberOfBCHApproximationTerms %d', ), - numberOfHistogramBins=dict(argstr='--numberOfHistogramBins %d', ), - numberOfMatchPoints=dict(argstr='--numberOfMatchPoints %d', ), - numberOfPyramidLevels=dict(argstr='--numberOfPyramidLevels %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + argstr="--numberOfBCHApproximationTerms %d", + ), + numberOfHistogramBins=dict(argstr="--numberOfHistogramBins %d",), + numberOfMatchPoints=dict(argstr="--numberOfMatchPoints %d",), + numberOfPyramidLevels=dict(argstr="--numberOfPyramidLevels %d",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), outputCheckerboardVolume=dict( - argstr='--outputCheckerboardVolume %s', - hash_files=False, + argstr="--outputCheckerboardVolume %s", hash_files=False, ), - outputDebug=dict(argstr='--outputDebug ', ), + outputDebug=dict(argstr="--outputDebug ",), outputDisplacementFieldPrefix=dict( - argstr='--outputDisplacementFieldPrefix %s', ), - outputDisplacementFieldVolume=dict( - argstr='--outputDisplacementFieldVolume %s', - hash_files=False, - ), - outputNormalized=dict(argstr='--outputNormalized ', ), - outputPixelType=dict(argstr='--outputPixelType %s', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, + argstr="--outputDisplacementFieldPrefix %s", ), - promptUser=dict(argstr='--promptUser ', ), - registrationFilterType=dict(argstr='--registrationFilterType %s', ), - seedForBOBF=dict( - argstr='--seedForBOBF %s', - sep=',', - ), - smoothDisplacementFieldSigma=dict( - argstr='--smoothDisplacementFieldSigma %f', ), - upFieldSmoothing=dict(argstr='--upFieldSmoothing %f', ), - upperThresholdForBOBF=dict(argstr='--upperThresholdForBOBF %d', ), - use_vanilla_dem=dict(argstr='--use_vanilla_dem ', ), + outputDisplacementFieldVolume=dict( + argstr="--outputDisplacementFieldVolume %s", hash_files=False, + ), + outputNormalized=dict(argstr="--outputNormalized ",), + outputPixelType=dict(argstr="--outputPixelType %s",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + promptUser=dict(argstr="--promptUser ",), + registrationFilterType=dict(argstr="--registrationFilterType %s",), + seedForBOBF=dict(argstr="--seedForBOBF %s", sep=",",), + smoothDisplacementFieldSigma=dict(argstr="--smoothDisplacementFieldSigma %f",), + upFieldSmoothing=dict(argstr="--upFieldSmoothing %f",), + upperThresholdForBOBF=dict(argstr="--upperThresholdForBOBF %d",), + use_vanilla_dem=dict(argstr="--use_vanilla_dem ",), ) inputs = BRAINSDemonWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSDemonWarp_outputs(): output_map = dict( - outputCheckerboardVolume=dict(extensions=None, ), - outputDisplacementFieldVolume=dict(extensions=None, ), - outputVolume=dict(extensions=None, ), + outputCheckerboardVolume=dict(extensions=None,), + outputDisplacementFieldVolume=dict(extensions=None,), + outputVolume=dict(extensions=None,), ) outputs = BRAINSDemonWarp.output_spec() diff --git a/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSFit.py b/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSFit.py index 81422f2296..683d34694b 100644 --- a/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSFit.py +++ b/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSFit.py @@ -4,159 +4,101 @@ def test_BRAINSFit_inputs(): input_map = dict( - ROIAutoClosingSize=dict(argstr='--ROIAutoClosingSize %f', ), - ROIAutoDilateSize=dict(argstr='--ROIAutoDilateSize %f', ), - args=dict(argstr='%s', ), - backgroundFillValue=dict(argstr='--backgroundFillValue %f', ), - bsplineTransform=dict( - argstr='--bsplineTransform %s', - hash_files=False, - ), + ROIAutoClosingSize=dict(argstr="--ROIAutoClosingSize %f",), + ROIAutoDilateSize=dict(argstr="--ROIAutoDilateSize %f",), + args=dict(argstr="%s",), + backgroundFillValue=dict(argstr="--backgroundFillValue %f",), + bsplineTransform=dict(argstr="--bsplineTransform %s", hash_files=False,), costFunctionConvergenceFactor=dict( - argstr='--costFunctionConvergenceFactor %f', ), - costMetric=dict(argstr='--costMetric %s', ), - debugLevel=dict(argstr='--debugLevel %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - failureExitCode=dict(argstr='--failureExitCode %d', ), - fixedBinaryVolume=dict( - argstr='--fixedBinaryVolume %s', - extensions=None, - ), - fixedVolume=dict( - argstr='--fixedVolume %s', - extensions=None, - ), - fixedVolume2=dict( - argstr='--fixedVolume2 %s', - extensions=None, - ), - fixedVolumeTimeIndex=dict(argstr='--fixedVolumeTimeIndex %d', ), - gui=dict(argstr='--gui ', ), - histogramMatch=dict(argstr='--histogramMatch ', ), - initialTransform=dict( - argstr='--initialTransform %s', - extensions=None, - ), + argstr="--costFunctionConvergenceFactor %f", + ), + costMetric=dict(argstr="--costMetric %s",), + debugLevel=dict(argstr="--debugLevel %d",), + environ=dict(nohash=True, usedefault=True,), + failureExitCode=dict(argstr="--failureExitCode %d",), + fixedBinaryVolume=dict(argstr="--fixedBinaryVolume %s", extensions=None,), + fixedVolume=dict(argstr="--fixedVolume %s", extensions=None,), + fixedVolume2=dict(argstr="--fixedVolume2 %s", extensions=None,), + fixedVolumeTimeIndex=dict(argstr="--fixedVolumeTimeIndex %d",), + gui=dict(argstr="--gui ",), + histogramMatch=dict(argstr="--histogramMatch ",), + initialTransform=dict(argstr="--initialTransform %s", extensions=None,), initializeRegistrationByCurrentGenericTransform=dict( - argstr='--initializeRegistrationByCurrentGenericTransform ', ), - initializeTransformMode=dict(argstr='--initializeTransformMode %s', ), - interpolationMode=dict(argstr='--interpolationMode %s', ), - linearTransform=dict( - argstr='--linearTransform %s', - hash_files=False, - ), - logFileReport=dict( - argstr='--logFileReport %s', - hash_files=False, - ), - maskInferiorCutOffFromCenter=dict( - argstr='--maskInferiorCutOffFromCenter %f', ), - maskProcessingMode=dict(argstr='--maskProcessingMode %s', ), - maxBSplineDisplacement=dict(argstr='--maxBSplineDisplacement %f', ), - maximumNumberOfCorrections=dict( - argstr='--maximumNumberOfCorrections %d', ), - maximumNumberOfEvaluations=dict( - argstr='--maximumNumberOfEvaluations %d', ), - maximumStepLength=dict(argstr='--maximumStepLength %f', ), - medianFilterSize=dict( - argstr='--medianFilterSize %s', - sep=',', - ), - metricSamplingStrategy=dict(argstr='--metricSamplingStrategy %s', ), - minimumStepLength=dict( - argstr='--minimumStepLength %s', - sep=',', - ), - movingBinaryVolume=dict( - argstr='--movingBinaryVolume %s', - extensions=None, - ), - movingVolume=dict( - argstr='--movingVolume %s', - extensions=None, - ), - movingVolume2=dict( - argstr='--movingVolume2 %s', - extensions=None, - ), - movingVolumeTimeIndex=dict(argstr='--movingVolumeTimeIndex %d', ), - numberOfHistogramBins=dict(argstr='--numberOfHistogramBins %d', ), - numberOfIterations=dict( - argstr='--numberOfIterations %s', - sep=',', - ), - numberOfMatchPoints=dict(argstr='--numberOfMatchPoints %d', ), - numberOfSamples=dict(argstr='--numberOfSamples %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + argstr="--initializeRegistrationByCurrentGenericTransform ", + ), + initializeTransformMode=dict(argstr="--initializeTransformMode %s",), + interpolationMode=dict(argstr="--interpolationMode %s",), + linearTransform=dict(argstr="--linearTransform %s", hash_files=False,), + logFileReport=dict(argstr="--logFileReport %s", hash_files=False,), + maskInferiorCutOffFromCenter=dict(argstr="--maskInferiorCutOffFromCenter %f",), + maskProcessingMode=dict(argstr="--maskProcessingMode %s",), + maxBSplineDisplacement=dict(argstr="--maxBSplineDisplacement %f",), + maximumNumberOfCorrections=dict(argstr="--maximumNumberOfCorrections %d",), + maximumNumberOfEvaluations=dict(argstr="--maximumNumberOfEvaluations %d",), + maximumStepLength=dict(argstr="--maximumStepLength %f",), + medianFilterSize=dict(argstr="--medianFilterSize %s", sep=",",), + metricSamplingStrategy=dict(argstr="--metricSamplingStrategy %s",), + minimumStepLength=dict(argstr="--minimumStepLength %s", sep=",",), + movingBinaryVolume=dict(argstr="--movingBinaryVolume %s", extensions=None,), + movingVolume=dict(argstr="--movingVolume %s", extensions=None,), + movingVolume2=dict(argstr="--movingVolume2 %s", extensions=None,), + movingVolumeTimeIndex=dict(argstr="--movingVolumeTimeIndex %d",), + numberOfHistogramBins=dict(argstr="--numberOfHistogramBins %d",), + numberOfIterations=dict(argstr="--numberOfIterations %s", sep=",",), + numberOfMatchPoints=dict(argstr="--numberOfMatchPoints %d",), + numberOfSamples=dict(argstr="--numberOfSamples %d",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), outputFixedVolumeROI=dict( - argstr='--outputFixedVolumeROI %s', - hash_files=False, + argstr="--outputFixedVolumeROI %s", hash_files=False, ), outputMovingVolumeROI=dict( - argstr='--outputMovingVolumeROI %s', - hash_files=False, - ), - outputTransform=dict( - argstr='--outputTransform %s', - hash_files=False, - ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - outputVolumePixelType=dict(argstr='--outputVolumePixelType %s', ), - projectedGradientTolerance=dict( - argstr='--projectedGradientTolerance %f', ), - promptUser=dict(argstr='--promptUser ', ), - relaxationFactor=dict(argstr='--relaxationFactor %f', ), - removeIntensityOutliers=dict(argstr='--removeIntensityOutliers %f', ), - reproportionScale=dict(argstr='--reproportionScale %f', ), - samplingPercentage=dict(argstr='--samplingPercentage %f', ), - scaleOutputValues=dict(argstr='--scaleOutputValues ', ), - skewScale=dict(argstr='--skewScale %f', ), - splineGridSize=dict( - argstr='--splineGridSize %s', - sep=',', - ), + argstr="--outputMovingVolumeROI %s", hash_files=False, + ), + outputTransform=dict(argstr="--outputTransform %s", hash_files=False,), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + outputVolumePixelType=dict(argstr="--outputVolumePixelType %s",), + projectedGradientTolerance=dict(argstr="--projectedGradientTolerance %f",), + promptUser=dict(argstr="--promptUser ",), + relaxationFactor=dict(argstr="--relaxationFactor %f",), + removeIntensityOutliers=dict(argstr="--removeIntensityOutliers %f",), + reproportionScale=dict(argstr="--reproportionScale %f",), + samplingPercentage=dict(argstr="--samplingPercentage %f",), + scaleOutputValues=dict(argstr="--scaleOutputValues ",), + skewScale=dict(argstr="--skewScale %f",), + splineGridSize=dict(argstr="--splineGridSize %s", sep=",",), strippedOutputTransform=dict( - argstr='--strippedOutputTransform %s', - hash_files=False, - ), - transformType=dict( - argstr='--transformType %s', - sep=',', - ), - translationScale=dict(argstr='--translationScale %f', ), - useAffine=dict(argstr='--useAffine ', ), - useBSpline=dict(argstr='--useBSpline ', ), - useComposite=dict(argstr='--useComposite ', ), - useROIBSpline=dict(argstr='--useROIBSpline ', ), - useRigid=dict(argstr='--useRigid ', ), - useScaleSkewVersor3D=dict(argstr='--useScaleSkewVersor3D ', ), - useScaleVersor3D=dict(argstr='--useScaleVersor3D ', ), - useSyN=dict(argstr='--useSyN ', ), - writeOutputTransformInFloat=dict( - argstr='--writeOutputTransformInFloat ', ), - writeTransformOnFailure=dict(argstr='--writeTransformOnFailure ', ), + argstr="--strippedOutputTransform %s", hash_files=False, + ), + transformType=dict(argstr="--transformType %s", sep=",",), + translationScale=dict(argstr="--translationScale %f",), + useAffine=dict(argstr="--useAffine ",), + useBSpline=dict(argstr="--useBSpline ",), + useComposite=dict(argstr="--useComposite ",), + useROIBSpline=dict(argstr="--useROIBSpline ",), + useRigid=dict(argstr="--useRigid ",), + useScaleSkewVersor3D=dict(argstr="--useScaleSkewVersor3D ",), + useScaleVersor3D=dict(argstr="--useScaleVersor3D ",), + useSyN=dict(argstr="--useSyN ",), + writeOutputTransformInFloat=dict(argstr="--writeOutputTransformInFloat ",), + writeTransformOnFailure=dict(argstr="--writeTransformOnFailure ",), ) inputs = BRAINSFit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSFit_outputs(): output_map = dict( - bsplineTransform=dict(extensions=None, ), - linearTransform=dict(extensions=None, ), - logFileReport=dict(extensions=None, ), - outputFixedVolumeROI=dict(extensions=None, ), - outputMovingVolumeROI=dict(extensions=None, ), - outputTransform=dict(extensions=None, ), - outputVolume=dict(extensions=None, ), - strippedOutputTransform=dict(extensions=None, ), + bsplineTransform=dict(extensions=None,), + linearTransform=dict(extensions=None,), + logFileReport=dict(extensions=None,), + outputFixedVolumeROI=dict(extensions=None,), + outputMovingVolumeROI=dict(extensions=None,), + outputTransform=dict(extensions=None,), + outputVolume=dict(extensions=None,), + strippedOutputTransform=dict(extensions=None,), ) outputs = BRAINSFit.output_spec() diff --git a/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSResample.py b/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSResample.py index 092689442d..2d94f19a5f 100644 --- a/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSResample.py +++ b/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSResample.py @@ -4,48 +4,29 @@ def test_BRAINSResample_inputs(): input_map = dict( - args=dict(argstr='%s', ), - defaultValue=dict(argstr='--defaultValue %f', ), - deformationVolume=dict( - argstr='--deformationVolume %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - gridSpacing=dict( - argstr='--gridSpacing %s', - sep=',', - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - interpolationMode=dict(argstr='--interpolationMode %s', ), - inverseTransform=dict(argstr='--inverseTransform ', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - pixelType=dict(argstr='--pixelType %s', ), - referenceVolume=dict( - argstr='--referenceVolume %s', - extensions=None, - ), - warpTransform=dict( - argstr='--warpTransform %s', - extensions=None, - ), + args=dict(argstr="%s",), + defaultValue=dict(argstr="--defaultValue %f",), + deformationVolume=dict(argstr="--deformationVolume %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + gridSpacing=dict(argstr="--gridSpacing %s", sep=",",), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + interpolationMode=dict(argstr="--interpolationMode %s",), + inverseTransform=dict(argstr="--inverseTransform ",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + pixelType=dict(argstr="--pixelType %s",), + referenceVolume=dict(argstr="--referenceVolume %s", extensions=None,), + warpTransform=dict(argstr="--warpTransform %s", extensions=None,), ) inputs = BRAINSResample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSResample_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = BRAINSResample.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSResize.py b/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSResize.py index 98280ec104..db477b2593 100644 --- a/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSResize.py +++ b/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSResize.py @@ -4,29 +4,22 @@ def test_BRAINSResize_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - pixelType=dict(argstr='--pixelType %s', ), - scaleFactor=dict(argstr='--scaleFactor %f', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + pixelType=dict(argstr="--pixelType %s",), + scaleFactor=dict(argstr="--scaleFactor %f",), ) inputs = BRAINSResize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSResize_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = BRAINSResize.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSTransformFromFiducials.py b/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSTransformFromFiducials.py index 5fea3e44e3..4e5c6ae239 100644 --- a/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSTransformFromFiducials.py +++ b/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSTransformFromFiducials.py @@ -4,35 +4,25 @@ def test_BRAINSTransformFromFiducials_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixedLandmarks=dict(argstr='--fixedLandmarks %s...', ), - fixedLandmarksFile=dict( - argstr='--fixedLandmarksFile %s', - extensions=None, - ), - movingLandmarks=dict(argstr='--movingLandmarks %s...', ), - movingLandmarksFile=dict( - argstr='--movingLandmarksFile %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - saveTransform=dict( - argstr='--saveTransform %s', - hash_files=False, - ), - transformType=dict(argstr='--transformType %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + fixedLandmarks=dict(argstr="--fixedLandmarks %s...",), + fixedLandmarksFile=dict(argstr="--fixedLandmarksFile %s", extensions=None,), + movingLandmarks=dict(argstr="--movingLandmarks %s...",), + movingLandmarksFile=dict(argstr="--movingLandmarksFile %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + saveTransform=dict(argstr="--saveTransform %s", hash_files=False,), + transformType=dict(argstr="--transformType %s",), ) inputs = BRAINSTransformFromFiducials.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSTransformFromFiducials_outputs(): - output_map = dict(saveTransform=dict(extensions=None, ), ) + output_map = dict(saveTransform=dict(extensions=None,),) outputs = BRAINSTransformFromFiducials.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/registration/tests/test_auto_VBRAINSDemonWarp.py b/nipype/interfaces/semtools/registration/tests/test_auto_VBRAINSDemonWarp.py index fb0a3dd88e..098360c1e3 100644 --- a/nipype/interfaces/semtools/registration/tests/test_auto_VBRAINSDemonWarp.py +++ b/nipype/interfaces/semtools/registration/tests/test_auto_VBRAINSDemonWarp.py @@ -4,111 +4,78 @@ def test_VBRAINSDemonWarp_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), arrayOfPyramidLevelIterations=dict( - argstr='--arrayOfPyramidLevelIterations %s', - sep=',', + argstr="--arrayOfPyramidLevelIterations %s", sep=",", ), - backgroundFillValue=dict(argstr='--backgroundFillValue %d', ), + backgroundFillValue=dict(argstr="--backgroundFillValue %d",), checkerboardPatternSubdivisions=dict( - argstr='--checkerboardPatternSubdivisions %s', - sep=',', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixedBinaryVolume=dict( - argstr='--fixedBinaryVolume %s', - extensions=None, - ), - fixedVolume=dict(argstr='--fixedVolume %s...', ), - gradient_type=dict(argstr='--gradient_type %s', ), - gui=dict(argstr='--gui ', ), - histogramMatch=dict(argstr='--histogramMatch ', ), + argstr="--checkerboardPatternSubdivisions %s", sep=",", + ), + environ=dict(nohash=True, usedefault=True,), + fixedBinaryVolume=dict(argstr="--fixedBinaryVolume %s", extensions=None,), + fixedVolume=dict(argstr="--fixedVolume %s...",), + gradient_type=dict(argstr="--gradient_type %s",), + gui=dict(argstr="--gui ",), + histogramMatch=dict(argstr="--histogramMatch ",), initializeWithDisplacementField=dict( - argstr='--initializeWithDisplacementField %s', - extensions=None, + argstr="--initializeWithDisplacementField %s", extensions=None, ), initializeWithTransform=dict( - argstr='--initializeWithTransform %s', - extensions=None, - ), - inputPixelType=dict(argstr='--inputPixelType %s', ), - interpolationMode=dict(argstr='--interpolationMode %s', ), - lowerThresholdForBOBF=dict(argstr='--lowerThresholdForBOBF %d', ), - makeBOBF=dict(argstr='--makeBOBF ', ), - max_step_length=dict(argstr='--max_step_length %f', ), - medianFilterSize=dict( - argstr='--medianFilterSize %s', - sep=',', - ), - minimumFixedPyramid=dict( - argstr='--minimumFixedPyramid %s', - sep=',', - ), - minimumMovingPyramid=dict( - argstr='--minimumMovingPyramid %s', - sep=',', - ), - movingBinaryVolume=dict( - argstr='--movingBinaryVolume %s', - extensions=None, - ), - movingVolume=dict(argstr='--movingVolume %s...', ), - neighborhoodForBOBF=dict( - argstr='--neighborhoodForBOBF %s', - sep=',', - ), + argstr="--initializeWithTransform %s", extensions=None, + ), + inputPixelType=dict(argstr="--inputPixelType %s",), + interpolationMode=dict(argstr="--interpolationMode %s",), + lowerThresholdForBOBF=dict(argstr="--lowerThresholdForBOBF %d",), + makeBOBF=dict(argstr="--makeBOBF ",), + max_step_length=dict(argstr="--max_step_length %f",), + medianFilterSize=dict(argstr="--medianFilterSize %s", sep=",",), + minimumFixedPyramid=dict(argstr="--minimumFixedPyramid %s", sep=",",), + minimumMovingPyramid=dict(argstr="--minimumMovingPyramid %s", sep=",",), + movingBinaryVolume=dict(argstr="--movingBinaryVolume %s", extensions=None,), + movingVolume=dict(argstr="--movingVolume %s...",), + neighborhoodForBOBF=dict(argstr="--neighborhoodForBOBF %s", sep=",",), numberOfBCHApproximationTerms=dict( - argstr='--numberOfBCHApproximationTerms %d', ), - numberOfHistogramBins=dict(argstr='--numberOfHistogramBins %d', ), - numberOfMatchPoints=dict(argstr='--numberOfMatchPoints %d', ), - numberOfPyramidLevels=dict(argstr='--numberOfPyramidLevels %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + argstr="--numberOfBCHApproximationTerms %d", + ), + numberOfHistogramBins=dict(argstr="--numberOfHistogramBins %d",), + numberOfMatchPoints=dict(argstr="--numberOfMatchPoints %d",), + numberOfPyramidLevels=dict(argstr="--numberOfPyramidLevels %d",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), outputCheckerboardVolume=dict( - argstr='--outputCheckerboardVolume %s', - hash_files=False, + argstr="--outputCheckerboardVolume %s", hash_files=False, ), - outputDebug=dict(argstr='--outputDebug ', ), + outputDebug=dict(argstr="--outputDebug ",), outputDisplacementFieldPrefix=dict( - argstr='--outputDisplacementFieldPrefix %s', ), - outputDisplacementFieldVolume=dict( - argstr='--outputDisplacementFieldVolume %s', - hash_files=False, - ), - outputNormalized=dict(argstr='--outputNormalized ', ), - outputPixelType=dict(argstr='--outputPixelType %s', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - promptUser=dict(argstr='--promptUser ', ), - registrationFilterType=dict(argstr='--registrationFilterType %s', ), - seedForBOBF=dict( - argstr='--seedForBOBF %s', - sep=',', - ), - smoothDisplacementFieldSigma=dict( - argstr='--smoothDisplacementFieldSigma %f', ), - upFieldSmoothing=dict(argstr='--upFieldSmoothing %f', ), - upperThresholdForBOBF=dict(argstr='--upperThresholdForBOBF %d', ), - use_vanilla_dem=dict(argstr='--use_vanilla_dem ', ), - weightFactors=dict( - argstr='--weightFactors %s', - sep=',', + argstr="--outputDisplacementFieldPrefix %s", ), + outputDisplacementFieldVolume=dict( + argstr="--outputDisplacementFieldVolume %s", hash_files=False, + ), + outputNormalized=dict(argstr="--outputNormalized ",), + outputPixelType=dict(argstr="--outputPixelType %s",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + promptUser=dict(argstr="--promptUser ",), + registrationFilterType=dict(argstr="--registrationFilterType %s",), + seedForBOBF=dict(argstr="--seedForBOBF %s", sep=",",), + smoothDisplacementFieldSigma=dict(argstr="--smoothDisplacementFieldSigma %f",), + upFieldSmoothing=dict(argstr="--upFieldSmoothing %f",), + upperThresholdForBOBF=dict(argstr="--upperThresholdForBOBF %d",), + use_vanilla_dem=dict(argstr="--use_vanilla_dem ",), + weightFactors=dict(argstr="--weightFactors %s", sep=",",), ) inputs = VBRAINSDemonWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_VBRAINSDemonWarp_outputs(): output_map = dict( - outputCheckerboardVolume=dict(extensions=None, ), - outputDisplacementFieldVolume=dict(extensions=None, ), - outputVolume=dict(extensions=None, ), + outputCheckerboardVolume=dict(extensions=None,), + outputDisplacementFieldVolume=dict(extensions=None,), + outputVolume=dict(extensions=None,), ) outputs = VBRAINSDemonWarp.output_spec() diff --git a/nipype/interfaces/semtools/segmentation/__init__.py b/nipype/interfaces/semtools/segmentation/__init__.py index e5ea4e2776..0cc6090203 100644 --- a/nipype/interfaces/semtools/segmentation/__init__.py +++ b/nipype/interfaces/semtools/segmentation/__init__.py @@ -1,5 +1,11 @@ # -*- coding: utf-8 -*- from .specialized import ( - BRAINSCut, BRAINSROIAuto, BRAINSConstellationDetector, - BRAINSCreateLabelMapFromProbabilityMaps, BinaryMaskEditorBasedOnLandmarks, - BRAINSMultiSTAPLE, BRAINSABC, ESLR) + BRAINSCut, + BRAINSROIAuto, + BRAINSConstellationDetector, + BRAINSCreateLabelMapFromProbabilityMaps, + BinaryMaskEditorBasedOnLandmarks, + BRAINSMultiSTAPLE, + BRAINSABC, + ESLR, +) diff --git a/nipype/interfaces/semtools/segmentation/specialized.py b/nipype/interfaces/semtools/segmentation/specialized.py index fa08b8e260..483b5470fe 100644 --- a/nipype/interfaces/semtools/segmentation/specialized.py +++ b/nipype/interfaces/semtools/segmentation/specialized.py @@ -5,66 +5,78 @@ import os -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class BRAINSCutInputSpec(CommandLineInputSpec): netConfiguration = File( - desc= - "XML File defining BRAINSCut parameters. OLD NAME. PLEASE USE modelConfigurationFilename instead.", + desc="XML File defining BRAINSCut parameters. OLD NAME. PLEASE USE modelConfigurationFilename instead.", exists=True, - argstr="--netConfiguration %s") + argstr="--netConfiguration %s", + ) modelConfigurationFilename = File( desc="XML File defining BRAINSCut parameters", exists=True, - argstr="--modelConfigurationFilename %s") + argstr="--modelConfigurationFilename %s", + ) trainModelStartIndex = traits.Int( - desc="Starting iteration for training", - argstr="--trainModelStartIndex %d") + desc="Starting iteration for training", argstr="--trainModelStartIndex %d" + ) verbose = traits.Int( - desc="print out some debugging information", argstr="--verbose %d") + desc="print out some debugging information", argstr="--verbose %d" + ) multiStructureThreshold = traits.Bool( desc="multiStructureThreshold module to deal with overlaping area", - argstr="--multiStructureThreshold ") + argstr="--multiStructureThreshold ", + ) histogramEqualization = traits.Bool( - desc= - "A Histogram Equalization process could be added to the creating/applying process from Subject To Atlas. Default is false, which genreate input vectors without Histogram Equalization. ", - argstr="--histogramEqualization ") + desc="A Histogram Equalization process could be added to the creating/applying process from Subject To Atlas. Default is false, which genreate input vectors without Histogram Equalization. ", + argstr="--histogramEqualization ", + ) computeSSEOn = traits.Bool( - desc= - "compute Sum of Square Error (SSE) along the trained model until the number of iteration given in the modelConfigurationFilename file", - argstr="--computeSSEOn ") + desc="compute Sum of Square Error (SSE) along the trained model until the number of iteration given in the modelConfigurationFilename file", + argstr="--computeSSEOn ", + ) generateProbability = traits.Bool( - desc="Generate probability map", argstr="--generateProbability ") + desc="Generate probability map", argstr="--generateProbability " + ) createVectors = traits.Bool( - desc="create vectors for training neural net", - argstr="--createVectors ") - trainModel = traits.Bool( - desc="train the neural net", argstr="--trainModel ") + desc="create vectors for training neural net", argstr="--createVectors " + ) + trainModel = traits.Bool(desc="train the neural net", argstr="--trainModel ") NoTrainingVectorShuffling = traits.Bool( desc="If this flag is on, there will be no shuffling.", - argstr="--NoTrainingVectorShuffling ") - applyModel = traits.Bool( - desc="apply the neural net", argstr="--applyModel ") + argstr="--NoTrainingVectorShuffling ", + ) + applyModel = traits.Bool(desc="apply the neural net", argstr="--applyModel ") validate = traits.Bool( - desc= - "validate data set.Just need for the first time run ( This is for validation of xml file and not working yet )", - argstr="--validate ") + desc="validate data set.Just need for the first time run ( This is for validation of xml file and not working yet )", + argstr="--validate ", + ) method = traits.Enum("RandomForest", "ANN", argstr="--method %s") numberOfTrees = traits.Int( - desc= - " Random tree: number of trees. This is to be used when only one model with specified depth wish to be created. ", - argstr="--numberOfTrees %d") + desc=" Random tree: number of trees. This is to be used when only one model with specified depth wish to be created. ", + argstr="--numberOfTrees %d", + ) randomTreeDepth = traits.Int( - desc= - " Random tree depth. This is to be used when only one model with specified depth wish to be created. ", - argstr="--randomTreeDepth %d") + desc=" Random tree depth. This is to be used when only one model with specified depth wish to be created. ", + argstr="--randomTreeDepth %d", + ) modelFilename = traits.Str( - desc= - " model file name given from user (not by xml configuration file) ", - argstr="--modelFilename %s") + desc=" model file name given from user (not by xml configuration file) ", + argstr="--modelFilename %s", + ) class BRAINSCutOutputSpec(TraitedSpec): @@ -97,41 +109,45 @@ class BRAINSROIAutoInputSpec(CommandLineInputSpec): inputVolume = File( desc="The input image for finding the largest region filled mask.", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) outputROIMaskVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="The ROI automatically found from the input image.", - argstr="--outputROIMaskVolume %s") + argstr="--outputROIMaskVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "The inputVolume with optional [maskOutput|cropOutput] to the region of the brain mask.", - argstr="--outputVolume %s") + desc="The inputVolume with optional [maskOutput|cropOutput] to the region of the brain mask.", + argstr="--outputVolume %s", + ) maskOutput = traits.Bool( - desc="The inputVolume multiplied by the ROI mask.", - argstr="--maskOutput ") + desc="The inputVolume multiplied by the ROI mask.", argstr="--maskOutput " + ) cropOutput = traits.Bool( desc="The inputVolume cropped to the region of the ROI mask.", - argstr="--cropOutput ") + argstr="--cropOutput ", + ) otsuPercentileThreshold = traits.Float( desc="Parameter to the Otsu threshold algorithm.", - argstr="--otsuPercentileThreshold %f") + argstr="--otsuPercentileThreshold %f", + ) thresholdCorrectionFactor = traits.Float( - desc= - "A factor to scale the Otsu algorithm's result threshold, in case clipping mangles the image.", - argstr="--thresholdCorrectionFactor %f") + desc="A factor to scale the Otsu algorithm's result threshold, in case clipping mangles the image.", + argstr="--thresholdCorrectionFactor %f", + ) closingSize = traits.Float( - desc= - "The Closing Size (in millimeters) for largest connected filled mask. This value is divided by image spacing and rounded to the next largest voxel number.", - argstr="--closingSize %f") + desc="The Closing Size (in millimeters) for largest connected filled mask. This value is divided by image spacing and rounded to the next largest voxel number.", + argstr="--closingSize %f", + ) ROIAutoDilateSize = traits.Float( - desc= - "This flag is only relavent when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. At setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.", - argstr="--ROIAutoDilateSize %f") + desc="This flag is only relavent when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. At setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.", + argstr="--ROIAutoDilateSize %f", + ) outputVolumePixelType = traits.Enum( "float", "short", @@ -139,21 +155,23 @@ class BRAINSROIAutoInputSpec(CommandLineInputSpec): "int", "uint", "uchar", - desc= - "The output image Pixel Type is the scalar datatype for representation of the Output Volume.", - argstr="--outputVolumePixelType %s") + desc="The output image Pixel Type is the scalar datatype for representation of the Output Volume.", + argstr="--outputVolumePixelType %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSROIAutoOutputSpec(TraitedSpec): outputROIMaskVolume = File( - desc="The ROI automatically found from the input image.", exists=True) + desc="The ROI automatically found from the input image.", exists=True + ) outputVolume = File( - desc= - "The inputVolume with optional [maskOutput|cropOutput] to the region of the brain mask.", - exists=True) + desc="The inputVolume with optional [maskOutput|cropOutput] to the region of the brain mask.", + exists=True, + ) class BRAINSROIAuto(SEMLikeCommandLine): @@ -177,119 +195,122 @@ class BRAINSROIAuto(SEMLikeCommandLine): output_spec = BRAINSROIAutoOutputSpec _cmd = " BRAINSROIAuto " _outputs_filenames = { - 'outputVolume': 'outputVolume.nii', - 'outputROIMaskVolume': 'outputROIMaskVolume.nii' + "outputVolume": "outputVolume.nii", + "outputROIMaskVolume": "outputROIMaskVolume.nii", } _redirect_x = False class BRAINSConstellationDetectorInputSpec(CommandLineInputSpec): houghEyeDetectorMode = traits.Int( - desc= - ", This flag controls the mode of Hough eye detector. By default, value of 1 is for T1W images, while the value of 0 is for T2W and PD images., ", - argstr="--houghEyeDetectorMode %d") + desc=", This flag controls the mode of Hough eye detector. By default, value of 1 is for T1W images, while the value of 0 is for T2W and PD images., ", + argstr="--houghEyeDetectorMode %d", + ) inputTemplateModel = File( desc="User-specified template model., ", exists=True, - argstr="--inputTemplateModel %s") + argstr="--inputTemplateModel %s", + ) LLSModel = File( desc="Linear least squares model filename in HD5 format", exists=True, - argstr="--LLSModel %s") + argstr="--LLSModel %s", + ) inputVolume = File( desc="Input image in which to find ACPC points", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "ACPC-aligned output image with the same voxels, but updated origin, and direction cosign so that the AC point would fall at the physical location (0.0,0.0,0.0), and the mid-sagital plane is the plane where physical L/R coordinate is 0.0.", - argstr="--outputVolume %s") + desc="ACPC-aligned output image with the same voxels, but updated origin, and direction cosign so that the AC point would fall at the physical location (0.0,0.0,0.0), and the mid-sagital plane is the plane where physical L/R coordinate is 0.0.", + argstr="--outputVolume %s", + ) outputResampledVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "ACPC-aligned output image in a resampled unifor space. Currently this is a 1mm, 256^3, Identity direction image.", - argstr="--outputResampledVolume %s") + desc="ACPC-aligned output image in a resampled unifor space. Currently this is a 1mm, 256^3, Identity direction image.", + argstr="--outputResampledVolume %s", + ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "The filename for the original space to ACPC alignment to be written (in .h5 format)., ", - argstr="--outputTransform %s") + desc="The filename for the original space to ACPC alignment to be written (in .h5 format)., ", + argstr="--outputTransform %s", + ) outputLandmarksInInputSpace = traits.Either( traits.Bool, File(), hash_files=False, - desc= - ", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (.fcsv) with the landmarks in the original image space (the detected RP, AC, PC, and VN4) in it to be written., ", - argstr="--outputLandmarksInInputSpace %s") + desc=", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (.fcsv) with the landmarks in the original image space (the detected RP, AC, PC, and VN4) in it to be written., ", + argstr="--outputLandmarksInInputSpace %s", + ) outputLandmarksInACPCAlignedSpace = traits.Either( traits.Bool, File(), hash_files=False, - desc= - ", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (.fcsv) with the landmarks in the output image space (the detected RP, AC, PC, and VN4) in it to be written., ", - argstr="--outputLandmarksInACPCAlignedSpace %s") + desc=", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (.fcsv) with the landmarks in the output image space (the detected RP, AC, PC, and VN4) in it to be written., ", + argstr="--outputLandmarksInACPCAlignedSpace %s", + ) outputMRML = traits.Either( traits.Bool, File(), hash_files=False, - desc= - ", The filename for the new subject-specific scene definition file in the same format produced by Slicer3 (in .mrml format). Only the components that were specified by the user on command line would be generated. Compatible components include inputVolume, outputVolume, outputLandmarksInInputSpace, outputLandmarksInACPCAlignedSpace, and outputTransform., ", - argstr="--outputMRML %s") + desc=", The filename for the new subject-specific scene definition file in the same format produced by Slicer3 (in .mrml format). Only the components that were specified by the user on command line would be generated. Compatible components include inputVolume, outputVolume, outputLandmarksInInputSpace, outputLandmarksInACPCAlignedSpace, and outputTransform., ", + argstr="--outputMRML %s", + ) outputVerificationScript = traits.Either( traits.Bool, File(), hash_files=False, - desc= - ", The filename for the Slicer3 script that verifies the aligned landmarks against the aligned image file. This will happen only in conjunction with saveOutputLandmarks and an outputVolume., ", - argstr="--outputVerificationScript %s") + desc=", The filename for the Slicer3 script that verifies the aligned landmarks against the aligned image file. This will happen only in conjunction with saveOutputLandmarks and an outputVolume., ", + argstr="--outputVerificationScript %s", + ) mspQualityLevel = traits.Int( - desc= - ", Flag cotrols how agressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds), NOTE: -1= Prealigned so no estimate!., ", - argstr="--mspQualityLevel %d") + desc=", Flag cotrols how agressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds), NOTE: -1= Prealigned so no estimate!., ", + argstr="--mspQualityLevel %d", + ) otsuPercentileThreshold = traits.Float( - desc= - ", This is a parameter to FindLargestForegroundFilledMask, which is employed when acLowerBound is set and an outputUntransformedClippedVolume is requested., ", - argstr="--otsuPercentileThreshold %f") + desc=", This is a parameter to FindLargestForegroundFilledMask, which is employed when acLowerBound is set and an outputUntransformedClippedVolume is requested., ", + argstr="--otsuPercentileThreshold %f", + ) acLowerBound = traits.Float( - desc= - ", When generating a resampled output image, replace the image with the BackgroundFillValue everywhere below the plane This Far in physical units (millimeters) below (inferior to) the AC point (as found by the model.) The oversize default was chosen to have no effect. Based on visualizing a thousand masks in the IPIG study, we recommend a limit no smaller than 80.0 mm., ", - argstr="--acLowerBound %f") + desc=", When generating a resampled output image, replace the image with the BackgroundFillValue everywhere below the plane This Far in physical units (millimeters) below (inferior to) the AC point (as found by the model.) The oversize default was chosen to have no effect. Based on visualizing a thousand masks in the IPIG study, we recommend a limit no smaller than 80.0 mm., ", + argstr="--acLowerBound %f", + ) cutOutHeadInOutputVolume = traits.Bool( - desc= - ", Flag to cut out just the head tissue when producing an (un)transformed clipped volume., ", - argstr="--cutOutHeadInOutputVolume ") + desc=", Flag to cut out just the head tissue when producing an (un)transformed clipped volume., ", + argstr="--cutOutHeadInOutputVolume ", + ) outputUntransformedClippedVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Output image in which to store neck-clipped input image, with the use of --acLowerBound and maybe --cutOutHeadInUntransformedVolume.", - argstr="--outputUntransformedClippedVolume %s") + desc="Output image in which to store neck-clipped input image, with the use of --acLowerBound and maybe --cutOutHeadInUntransformedVolume.", + argstr="--outputUntransformedClippedVolume %s", + ) rescaleIntensities = traits.Bool( - desc= - ", Flag to turn on rescaling image intensities on input., ", - argstr="--rescaleIntensities ") + desc=", Flag to turn on rescaling image intensities on input., ", + argstr="--rescaleIntensities ", + ) trimRescaledIntensities = traits.Float( - desc= - ", Turn on clipping the rescaled image one-tailed on input. Units of standard deviations above the mean. Very large values are very permissive. Non-positive value turns clipping off. Defaults to removing 0.00001 of a normal tail above the mean., ", - argstr="--trimRescaledIntensities %f") + desc=", Turn on clipping the rescaled image one-tailed on input. Units of standard deviations above the mean. Very large values are very permissive. Non-positive value turns clipping off. Defaults to removing 0.00001 of a normal tail above the mean., ", + argstr="--trimRescaledIntensities %f", + ) rescaleIntensitiesOutputRange = InputMultiPath( traits.Int, - desc= - ", This pair of integers gives the lower and upper bounds on the signal portion of the output image. Out-of-field voxels are taken from BackgroundFillValue., ", + desc=", This pair of integers gives the lower and upper bounds on the signal portion of the output image. Out-of-field voxels are taken from BackgroundFillValue., ", sep=",", - argstr="--rescaleIntensitiesOutputRange %s") + argstr="--rescaleIntensitiesOutputRange %s", + ) BackgroundFillValue = traits.Str( - desc= - "Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", - argstr="--BackgroundFillValue %s") + desc="Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", + argstr="--BackgroundFillValue %s", + ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", @@ -301,141 +322,146 @@ class BRAINSConstellationDetectorInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", - argstr="--interpolationMode %s") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", + argstr="--interpolationMode %s", + ) forceACPoint = InputMultiPath( traits.Float, - desc= - ", Use this flag to manually specify the AC point from the original image on the command line., ", + desc=", Use this flag to manually specify the AC point from the original image on the command line., ", sep=",", - argstr="--forceACPoint %s") + argstr="--forceACPoint %s", + ) forcePCPoint = InputMultiPath( traits.Float, - desc= - ", Use this flag to manually specify the PC point from the original image on the command line., ", + desc=", Use this flag to manually specify the PC point from the original image on the command line., ", sep=",", - argstr="--forcePCPoint %s") + argstr="--forcePCPoint %s", + ) forceVN4Point = InputMultiPath( traits.Float, - desc= - ", Use this flag to manually specify the VN4 point from the original image on the command line., ", + desc=", Use this flag to manually specify the VN4 point from the original image on the command line., ", sep=",", - argstr="--forceVN4Point %s") + argstr="--forceVN4Point %s", + ) forceRPPoint = InputMultiPath( traits.Float, - desc= - ", Use this flag to manually specify the RP point from the original image on the command line., ", + desc=", Use this flag to manually specify the RP point from the original image on the command line., ", sep=",", - argstr="--forceRPPoint %s") + argstr="--forceRPPoint %s", + ) inputLandmarksEMSP = File( - desc= - ", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (in .fcsv) with the landmarks in the estimated MSP aligned space to be loaded. The detector will only process landmarks not enlisted on the file., ", + desc=", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (in .fcsv) with the landmarks in the estimated MSP aligned space to be loaded. The detector will only process landmarks not enlisted on the file., ", exists=True, - argstr="--inputLandmarksEMSP %s") + argstr="--inputLandmarksEMSP %s", + ) forceHoughEyeDetectorReportFailure = traits.Bool( - desc= - ", Flag indicates whether the Hough eye detector should report failure, ", - argstr="--forceHoughEyeDetectorReportFailure ") + desc=", Flag indicates whether the Hough eye detector should report failure, ", + argstr="--forceHoughEyeDetectorReportFailure ", + ) rmpj = traits.Float( - desc= - ", Search radius for MPJ in unit of mm, ", - argstr="--rmpj %f") + desc=", Search radius for MPJ in unit of mm, ", + argstr="--rmpj %f", + ) rac = traits.Float( desc=", Search radius for AC in unit of mm, ", - argstr="--rac %f") + argstr="--rac %f", + ) rpc = traits.Float( desc=", Search radius for PC in unit of mm, ", - argstr="--rpc %f") + argstr="--rpc %f", + ) rVN4 = traits.Float( - desc= - ", Search radius for VN4 in unit of mm, ", - argstr="--rVN4 %f") + desc=", Search radius for VN4 in unit of mm, ", + argstr="--rVN4 %f", + ) debug = traits.Bool( - desc= - ", Show internal debugging information., ", - argstr="--debug ") + desc=", Show internal debugging information., ", + argstr="--debug ", + ) verbose = traits.Bool( desc=", Show more verbose output, ", - argstr="--verbose ") + argstr="--verbose ", + ) writeBranded2DImage = traits.Either( traits.Bool, File(), hash_files=False, - desc= - ", The filename for the 2D .png branded midline debugging image. This will happen only in conjunction with requesting an outputVolume., ", - argstr="--writeBranded2DImage %s") + desc=", The filename for the 2D .png branded midline debugging image. This will happen only in conjunction with requesting an outputVolume., ", + argstr="--writeBranded2DImage %s", + ) resultsDir = traits.Either( traits.Bool, Directory(), hash_files=False, - desc= - ", The directory for the debuging images to be written., ", - argstr="--resultsDir %s") + desc=", The directory for the debuging images to be written., ", + argstr="--resultsDir %s", + ) writedebuggingImagesLevel = traits.Int( - desc= - ", This flag controls if debugging images are produced. By default value of 0 is no images. Anything greater than zero will be increasing level of debugging images., ", - argstr="--writedebuggingImagesLevel %d") + desc=", This flag controls if debugging images are produced. By default value of 0 is no images. Anything greater than zero will be increasing level of debugging images., ", + argstr="--writedebuggingImagesLevel %d", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) atlasVolume = File( desc="Atlas volume image to be used for BRAINSFit registration", exists=True, - argstr="--atlasVolume %s") + argstr="--atlasVolume %s", + ) atlasLandmarks = File( - desc= - "Atlas landmarks to be used for BRAINSFit registration initialization, ", + desc="Atlas landmarks to be used for BRAINSFit registration initialization, ", exists=True, - argstr="--atlasLandmarks %s") + argstr="--atlasLandmarks %s", + ) atlasLandmarkWeights = File( - desc= - "Weights associated with atlas landmarks to be used for BRAINSFit registration initialization, ", + desc="Weights associated with atlas landmarks to be used for BRAINSFit registration initialization, ", exists=True, - argstr="--atlasLandmarkWeights %s") + argstr="--atlasLandmarkWeights %s", + ) class BRAINSConstellationDetectorOutputSpec(TraitedSpec): outputVolume = File( - desc= - "ACPC-aligned output image with the same voxels, but updated origin, and direction cosign so that the AC point would fall at the physical location (0.0,0.0,0.0), and the mid-sagital plane is the plane where physical L/R coordinate is 0.0.", - exists=True) + desc="ACPC-aligned output image with the same voxels, but updated origin, and direction cosign so that the AC point would fall at the physical location (0.0,0.0,0.0), and the mid-sagital plane is the plane where physical L/R coordinate is 0.0.", + exists=True, + ) outputResampledVolume = File( - desc= - "ACPC-aligned output image in a resampled unifor space. Currently this is a 1mm, 256^3, Identity direction image.", - exists=True) + desc="ACPC-aligned output image in a resampled unifor space. Currently this is a 1mm, 256^3, Identity direction image.", + exists=True, + ) outputTransform = File( - desc= - "The filename for the original space to ACPC alignment to be written (in .h5 format)., ", - exists=True) + desc="The filename for the original space to ACPC alignment to be written (in .h5 format)., ", + exists=True, + ) outputLandmarksInInputSpace = File( - desc= - ", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (.fcsv) with the landmarks in the original image space (the detected RP, AC, PC, and VN4) in it to be written., ", - exists=True) + desc=", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (.fcsv) with the landmarks in the original image space (the detected RP, AC, PC, and VN4) in it to be written., ", + exists=True, + ) outputLandmarksInACPCAlignedSpace = File( - desc= - ", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (.fcsv) with the landmarks in the output image space (the detected RP, AC, PC, and VN4) in it to be written., ", - exists=True) + desc=", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (.fcsv) with the landmarks in the output image space (the detected RP, AC, PC, and VN4) in it to be written., ", + exists=True, + ) outputMRML = File( - desc= - ", The filename for the new subject-specific scene definition file in the same format produced by Slicer3 (in .mrml format). Only the components that were specified by the user on command line would be generated. Compatible components include inputVolume, outputVolume, outputLandmarksInInputSpace, outputLandmarksInACPCAlignedSpace, and outputTransform., ", - exists=True) + desc=", The filename for the new subject-specific scene definition file in the same format produced by Slicer3 (in .mrml format). Only the components that were specified by the user on command line would be generated. Compatible components include inputVolume, outputVolume, outputLandmarksInInputSpace, outputLandmarksInACPCAlignedSpace, and outputTransform., ", + exists=True, + ) outputVerificationScript = File( - desc= - ", The filename for the Slicer3 script that verifies the aligned landmarks against the aligned image file. This will happen only in conjunction with saveOutputLandmarks and an outputVolume., ", - exists=True) + desc=", The filename for the Slicer3 script that verifies the aligned landmarks against the aligned image file. This will happen only in conjunction with saveOutputLandmarks and an outputVolume., ", + exists=True, + ) outputUntransformedClippedVolume = File( - desc= - "Output image in which to store neck-clipped input image, with the use of --acLowerBound and maybe --cutOutHeadInUntransformedVolume.", - exists=True) + desc="Output image in which to store neck-clipped input image, with the use of --acLowerBound and maybe --cutOutHeadInUntransformedVolume.", + exists=True, + ) writeBranded2DImage = File( - desc= - ", The filename for the 2D .png branded midline debugging image. This will happen only in conjunction with requesting an outputVolume., ", - exists=True) + desc=", The filename for the 2D .png branded midline debugging image. This will happen only in conjunction with requesting an outputVolume., ", + exists=True, + ) resultsDir = Directory( - desc= - ", The directory for the debuging images to be written., ", - exists=True) + desc=", The directory for the debuging images to be written., ", + exists=True, + ) class BRAINSConstellationDetector(SEMLikeCommandLine): @@ -455,26 +481,16 @@ class BRAINSConstellationDetector(SEMLikeCommandLine): output_spec = BRAINSConstellationDetectorOutputSpec _cmd = " BRAINSConstellationDetector " _outputs_filenames = { - 'outputVolume': - 'outputVolume.nii.gz', - 'outputMRML': - 'outputMRML.mrml', - 'resultsDir': - 'resultsDir', - 'outputResampledVolume': - 'outputResampledVolume.nii.gz', - 'outputTransform': - 'outputTransform.h5', - 'writeBranded2DImage': - 'writeBranded2DImage.png', - 'outputLandmarksInACPCAlignedSpace': - 'outputLandmarksInACPCAlignedSpace.fcsv', - 'outputLandmarksInInputSpace': - 'outputLandmarksInInputSpace.fcsv', - 'outputUntransformedClippedVolume': - 'outputUntransformedClippedVolume.nii.gz', - 'outputVerificationScript': - 'outputVerificationScript.sh' + "outputVolume": "outputVolume.nii.gz", + "outputMRML": "outputMRML.mrml", + "resultsDir": "resultsDir", + "outputResampledVolume": "outputResampledVolume.nii.gz", + "outputTransform": "outputTransform.h5", + "writeBranded2DImage": "writeBranded2DImage.png", + "outputLandmarksInACPCAlignedSpace": "outputLandmarksInACPCAlignedSpace.fcsv", + "outputLandmarksInInputSpace": "outputLandmarksInInputSpace.fcsv", + "outputUntransformedClippedVolume": "outputUntransformedClippedVolume.nii.gz", + "outputVerificationScript": "outputVerificationScript.sh", } _redirect_x = False @@ -483,37 +499,42 @@ class BRAINSCreateLabelMapFromProbabilityMapsInputSpec(CommandLineInputSpec): inputProbabilityVolume = InputMultiPath( File(exists=True), desc="The list of proobabilityimages.", - argstr="--inputProbabilityVolume %s...") + argstr="--inputProbabilityVolume %s...", + ) priorLabelCodes = InputMultiPath( traits.Int, - desc= - "A list of PriorLabelCode values used for coding the output label images", + desc="A list of PriorLabelCode values used for coding the output label images", sep=",", - argstr="--priorLabelCodes %s") + argstr="--priorLabelCodes %s", + ) foregroundPriors = InputMultiPath( traits.Int, desc="A list: For each Prior Label, 1 if foreground, 0 if background", sep=",", - argstr="--foregroundPriors %s") + argstr="--foregroundPriors %s", + ) nonAirRegionMask = File( - desc= - "a mask representing the \'NonAirRegion\' -- Just force pixels in this region to zero", + desc="a mask representing the 'NonAirRegion' -- Just force pixels in this region to zero", exists=True, - argstr="--nonAirRegionMask %s") + argstr="--nonAirRegionMask %s", + ) inclusionThreshold = traits.Float( - desc="tolerance for inclusion", argstr="--inclusionThreshold %f") + desc="tolerance for inclusion", argstr="--inclusionThreshold %f" + ) dirtyLabelVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="the labels prior to cleaning", - argstr="--dirtyLabelVolume %s") + argstr="--dirtyLabelVolume %s", + ) cleanLabelVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="the foreground labels volume", - argstr="--cleanLabelVolume %s") + argstr="--cleanLabelVolume %s", + ) class BRAINSCreateLabelMapFromProbabilityMapsOutputSpec(TraitedSpec): @@ -534,8 +555,8 @@ class BRAINSCreateLabelMapFromProbabilityMaps(SEMLikeCommandLine): output_spec = BRAINSCreateLabelMapFromProbabilityMapsOutputSpec _cmd = " BRAINSCreateLabelMapFromProbabilityMaps " _outputs_filenames = { - 'dirtyLabelVolume': 'dirtyLabelVolume.nii', - 'cleanLabelVolume': 'cleanLabelVolume.nii' + "dirtyLabelVolume": "dirtyLabelVolume.nii", + "cleanLabelVolume": "cleanLabelVolume.nii", } _redirect_x = False @@ -544,47 +565,50 @@ class BinaryMaskEditorBasedOnLandmarksInputSpec(CommandLineInputSpec): inputBinaryVolume = File( desc="Input binary image in which to be edited", exists=True, - argstr="--inputBinaryVolume %s") + argstr="--inputBinaryVolume %s", + ) outputBinaryVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output binary image in which to be edited", - argstr="--outputBinaryVolume %s") + argstr="--outputBinaryVolume %s", + ) inputLandmarksFilename = File( - desc= - " The filename for the landmark definition file in the same format produced by Slicer3 (.fcsv). ", + desc=" The filename for the landmark definition file in the same format produced by Slicer3 (.fcsv). ", exists=True, - argstr="--inputLandmarksFilename %s") + argstr="--inputLandmarksFilename %s", + ) inputLandmarkNames = InputMultiPath( traits.Str, - desc= - " A target input landmark name to be edited. This should be listed in the inputLandmakrFilename Given. ", + desc=" A target input landmark name to be edited. This should be listed in the inputLandmakrFilename Given. ", sep=",", - argstr="--inputLandmarkNames %s") + argstr="--inputLandmarkNames %s", + ) setCutDirectionForLandmark = InputMultiPath( traits.Str, - desc= - "Setting the cutting out direction of the input binary image to the one of anterior, posterior, left, right, superior or posterior. (ENUMERATION: ANTERIOR, POSTERIOR, LEFT, RIGHT, SUPERIOR, POSTERIOR) ", + desc="Setting the cutting out direction of the input binary image to the one of anterior, posterior, left, right, superior or posterior. (ENUMERATION: ANTERIOR, POSTERIOR, LEFT, RIGHT, SUPERIOR, POSTERIOR) ", sep=",", - argstr="--setCutDirectionForLandmark %s") + argstr="--setCutDirectionForLandmark %s", + ) setCutDirectionForObliquePlane = InputMultiPath( traits.Str, - desc= - "If this is true, the mask will be thresholded out to the direction of inferior, posterior, and/or left. Default behavrior is that cutting out to the direction of superior, anterior and/or right. ", + desc="If this is true, the mask will be thresholded out to the direction of inferior, posterior, and/or left. Default behavrior is that cutting out to the direction of superior, anterior and/or right. ", sep=",", - argstr="--setCutDirectionForObliquePlane %s") + argstr="--setCutDirectionForObliquePlane %s", + ) inputLandmarkNamesForObliquePlane = InputMultiPath( traits.Str, - desc= - " Three subset landmark names of inputLandmarksFilename for a oblique plane computation. The plane computed for binary volume editing. ", + desc=" Three subset landmark names of inputLandmarksFilename for a oblique plane computation. The plane computed for binary volume editing. ", sep=",", - argstr="--inputLandmarkNamesForObliquePlane %s") + argstr="--inputLandmarkNamesForObliquePlane %s", + ) class BinaryMaskEditorBasedOnLandmarksOutputSpec(TraitedSpec): outputBinaryVolume = File( - desc="Output binary image in which to be edited", exists=True) + desc="Output binary image in which to be edited", exists=True + ) class BinaryMaskEditorBasedOnLandmarks(SEMLikeCommandLine): @@ -601,50 +625,56 @@ class BinaryMaskEditorBasedOnLandmarks(SEMLikeCommandLine): input_spec = BinaryMaskEditorBasedOnLandmarksInputSpec output_spec = BinaryMaskEditorBasedOnLandmarksOutputSpec _cmd = " BinaryMaskEditorBasedOnLandmarks " - _outputs_filenames = {'outputBinaryVolume': 'outputBinaryVolume.nii'} + _outputs_filenames = {"outputBinaryVolume": "outputBinaryVolume.nii"} _redirect_x = False class BRAINSMultiSTAPLEInputSpec(CommandLineInputSpec): inputCompositeT1Volume = File( - desc= - "Composite T1, all label maps transofrmed into the space for this image.", + desc="Composite T1, all label maps transofrmed into the space for this image.", exists=True, - argstr="--inputCompositeT1Volume %s") + argstr="--inputCompositeT1Volume %s", + ) inputLabelVolume = InputMultiPath( File(exists=True), desc="The list of proobabilityimages.", - argstr="--inputLabelVolume %s...") + argstr="--inputLabelVolume %s...", + ) inputTransform = InputMultiPath( File(exists=True), desc="transforms to apply to label volumes", - argstr="--inputTransform %s...") + argstr="--inputTransform %s...", + ) labelForUndecidedPixels = traits.Int( - desc="Label for undecided pixels", - argstr="--labelForUndecidedPixels %d") + desc="Label for undecided pixels", argstr="--labelForUndecidedPixels %d" + ) resampledVolumePrefix = traits.Str( desc="if given, write out resampled volumes with this prefix", - argstr="--resampledVolumePrefix %s") + argstr="--resampledVolumePrefix %s", + ) skipResampling = traits.Bool( - desc="Omit resampling images into reference space", - argstr="--skipResampling ") + desc="Omit resampling images into reference space", argstr="--skipResampling " + ) outputMultiSTAPLE = traits.Either( traits.Bool, File(), hash_files=False, desc="the MultiSTAPLE average of input label volumes", - argstr="--outputMultiSTAPLE %s") + argstr="--outputMultiSTAPLE %s", + ) outputConfusionMatrix = traits.Either( traits.Bool, File(), hash_files=False, desc="Confusion Matrix", - argstr="--outputConfusionMatrix %s") + argstr="--outputConfusionMatrix %s", + ) class BRAINSMultiSTAPLEOutputSpec(TraitedSpec): outputMultiSTAPLE = File( - desc="the MultiSTAPLE average of input label volumes", exists=True) + desc="the MultiSTAPLE average of input label volumes", exists=True + ) outputConfusionMatrix = File(desc="Confusion Matrix", exists=True) @@ -661,8 +691,8 @@ class BRAINSMultiSTAPLE(SEMLikeCommandLine): output_spec = BRAINSMultiSTAPLEOutputSpec _cmd = " BRAINSMultiSTAPLE " _outputs_filenames = { - 'outputMultiSTAPLE': 'outputMultiSTAPLE.nii', - 'outputConfusionMatrix': 'outputConfusionMatrixh5|mat|txt' + "outputMultiSTAPLE": "outputMultiSTAPLE.nii", + "outputConfusionMatrix": "outputConfusionMatrixh5|mat|txt", } _redirect_x = False @@ -671,90 +701,97 @@ class BRAINSABCInputSpec(CommandLineInputSpec): inputVolumes = InputMultiPath( File(exists=True), desc="The list of input image files to be segmented.", - argstr="--inputVolumes %s...") + argstr="--inputVolumes %s...", + ) atlasDefinition = File( desc="Contains all parameters for Atlas", exists=True, - argstr="--atlasDefinition %s") + argstr="--atlasDefinition %s", + ) restoreState = File( desc="The initial state for the registration process", exists=True, - argstr="--restoreState %s") + argstr="--restoreState %s", + ) saveState = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "(optional) Filename to which save the final state of the registration", - argstr="--saveState %s") + desc="(optional) Filename to which save the final state of the registration", + argstr="--saveState %s", + ) inputVolumeTypes = InputMultiPath( traits.Str, desc="The list of input image types corresponding to the inputVolumes.", sep=",", - argstr="--inputVolumeTypes %s") + argstr="--inputVolumeTypes %s", + ) outputDir = traits.Either( traits.Bool, Directory(), hash_files=False, desc="Ouput directory", - argstr="--outputDir %s") + argstr="--outputDir %s", + ) atlasToSubjectTransformType = traits.Enum( "Identity", "Rigid", "Affine", "BSpline", "SyN", - desc= - " What type of linear transform type do you want to use to register the atlas to the reference subject image.", - argstr="--atlasToSubjectTransformType %s") + desc=" What type of linear transform type do you want to use to register the atlas to the reference subject image.", + argstr="--atlasToSubjectTransformType %s", + ) atlasToSubjectTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="The transform from atlas to the subject", - argstr="--atlasToSubjectTransform %s") + argstr="--atlasToSubjectTransform %s", + ) atlasToSubjectInitialTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="The initial transform from atlas to the subject", - argstr="--atlasToSubjectInitialTransform %s") + argstr="--atlasToSubjectInitialTransform %s", + ) subjectIntermodeTransformType = traits.Enum( "Identity", "Rigid", "Affine", "BSpline", - desc= - " What type of linear transform type do you want to use to register the atlas to the reference subject image.", - argstr="--subjectIntermodeTransformType %s") + desc=" What type of linear transform type do you want to use to register the atlas to the reference subject image.", + argstr="--subjectIntermodeTransformType %s", + ) outputVolumes = traits.Either( traits.Bool, - InputMultiPath(File(), ), + InputMultiPath(File(),), hash_files=False, - desc= - "Corrected Output Images: should specify the same number of images as inputVolume, if only one element is given, then it is used as a file pattern where %s is replaced by the imageVolumeType, and %d by the index list location.", - argstr="--outputVolumes %s...") + desc="Corrected Output Images: should specify the same number of images as inputVolume, if only one element is given, then it is used as a file pattern where %s is replaced by the imageVolumeType, and %d by the index list location.", + argstr="--outputVolumes %s...", + ) outputLabels = traits.Either( traits.Bool, File(), hash_files=False, desc="Output Label Image", - argstr="--outputLabels %s") + argstr="--outputLabels %s", + ) outputDirtyLabels = traits.Either( traits.Bool, File(), hash_files=False, desc="Output Dirty Label Image", - argstr="--outputDirtyLabels %s") + argstr="--outputDirtyLabels %s", + ) posteriorTemplate = traits.Str( desc="filename template for Posterior output files", - argstr="--posteriorTemplate %s") + argstr="--posteriorTemplate %s", + ) outputFormat = traits.Enum( - "NIFTI", - "Meta", - "Nrrd", - desc="Output format", - argstr="--outputFormat %s") + "NIFTI", "Meta", "Nrrd", desc="Output format", argstr="--outputFormat %s" + ) interpolationMode = traits.Enum( "BSpline", "NearestNeighbor", @@ -766,93 +803,97 @@ class BRAINSABCInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, NearestNeighbor, BSpline, WindowedSinc, or ResampleInPlace. The ResampleInPlace option will create an image with the same discrete voxel values and will adjust the origin and direction of the physical space interpretation.", - argstr="--interpolationMode %s") - maxIterations = traits.Int( - desc="Filter iterations", argstr="--maxIterations %d") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, NearestNeighbor, BSpline, WindowedSinc, or ResampleInPlace. The ResampleInPlace option will create an image with the same discrete voxel values and will adjust the origin and direction of the physical space interpretation.", + argstr="--interpolationMode %s", + ) + maxIterations = traits.Int(desc="Filter iterations", argstr="--maxIterations %d") medianFilterSize = InputMultiPath( traits.Int, - desc= - "The radius for the optional MedianImageFilter preprocessing in all 3 directions.", + desc="The radius for the optional MedianImageFilter preprocessing in all 3 directions.", sep=",", - argstr="--medianFilterSize %s") + argstr="--medianFilterSize %s", + ) filterIteration = traits.Int( - desc="Filter iterations", argstr="--filterIteration %d") + desc="Filter iterations", argstr="--filterIteration %d" + ) filterTimeStep = traits.Float( - desc= - "Filter time step should be less than (PixelSpacing/(1^(DIM+1)), value is set to negative, then allow automatic setting of this value. ", - argstr="--filterTimeStep %f") + desc="Filter time step should be less than (PixelSpacing/(1^(DIM+1)), value is set to negative, then allow automatic setting of this value. ", + argstr="--filterTimeStep %f", + ) filterMethod = traits.Enum( "None", "CurvatureFlow", "GradientAnisotropicDiffusion", "Median", desc="Filter method for preprocessing of registration", - argstr="--filterMethod %s") - maxBiasDegree = traits.Int( - desc="Maximum bias degree", argstr="--maxBiasDegree %d") + argstr="--filterMethod %s", + ) + maxBiasDegree = traits.Int(desc="Maximum bias degree", argstr="--maxBiasDegree %d") useKNN = traits.Bool( - desc="Use the KNN stage of estimating posteriors.", argstr="--useKNN ") + desc="Use the KNN stage of estimating posteriors.", argstr="--useKNN " + ) purePlugsThreshold = traits.Float( - desc= - "If this threshold value is greater than zero, only pure samples are used to compute the distributions in EM classification, and only pure samples are used for KNN training. The default value is set to 0, that means not using pure plugs. However, a value of 0.2 is suggested if you want to activate using pure plugs option.", - argstr="--purePlugsThreshold %f") + desc="If this threshold value is greater than zero, only pure samples are used to compute the distributions in EM classification, and only pure samples are used for KNN training. The default value is set to 0, that means not using pure plugs. However, a value of 0.2 is suggested if you want to activate using pure plugs option.", + argstr="--purePlugsThreshold %f", + ) numberOfSubSamplesInEachPlugArea = InputMultiPath( traits.Int, - desc= - "Number of continous index samples taken at each direction of lattice space for each plug volume.", + desc="Number of continous index samples taken at each direction of lattice space for each plug volume.", sep=",", - argstr="--numberOfSubSamplesInEachPlugArea %s") + argstr="--numberOfSubSamplesInEachPlugArea %s", + ) atlasWarpingOff = traits.Bool( - desc="Deformable registration of atlas to subject", - argstr="--atlasWarpingOff ") + desc="Deformable registration of atlas to subject", argstr="--atlasWarpingOff " + ) gridSize = InputMultiPath( traits.Int, desc="Grid size for atlas warping with BSplines", sep=",", - argstr="--gridSize %s") + argstr="--gridSize %s", + ) defaultSuffix = traits.Str(argstr="--defaultSuffix %s") implicitOutputs = traits.Either( traits.Bool, - InputMultiPath(File(), ), + InputMultiPath(File(),), hash_files=False, - desc= - "Outputs to be made available to NiPype. Needed because not all BRAINSABC outputs have command line arguments.", - argstr="--implicitOutputs %s...") + desc="Outputs to be made available to NiPype. Needed because not all BRAINSABC outputs have command line arguments.", + argstr="--implicitOutputs %s...", + ) debuglevel = traits.Int( - desc= - "Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", - argstr="--debuglevel %d") + desc="Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", + argstr="--debuglevel %d", + ) writeLess = traits.Bool( desc="Does not write posteriors and filtered, bias corrected images", - argstr="--writeLess ") + argstr="--writeLess ", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSABCOutputSpec(TraitedSpec): saveState = File( - desc= - "(optional) Filename to which save the final state of the registration", - exists=True) + desc="(optional) Filename to which save the final state of the registration", + exists=True, + ) outputDir = Directory(desc="Ouput directory", exists=True) atlasToSubjectTransform = File( - desc="The transform from atlas to the subject", exists=True) + desc="The transform from atlas to the subject", exists=True + ) atlasToSubjectInitialTransform = File( - desc="The initial transform from atlas to the subject", exists=True) + desc="The initial transform from atlas to the subject", exists=True + ) outputVolumes = OutputMultiPath( File(exists=True), - desc= - "Corrected Output Images: should specify the same number of images as inputVolume, if only one element is given, then it is used as a file pattern where %s is replaced by the imageVolumeType, and %d by the index list location." + desc="Corrected Output Images: should specify the same number of images as inputVolume, if only one element is given, then it is used as a file pattern where %s is replaced by the imageVolumeType, and %d by the index list location.", ) outputLabels = File(desc="Output Label Image", exists=True) outputDirtyLabels = File(desc="Output Dirty Label Image", exists=True) implicitOutputs = OutputMultiPath( File(exists=True), - desc= - "Outputs to be made available to NiPype. Needed because not all BRAINSABC outputs have command line arguments." + desc="Outputs to be made available to NiPype. Needed because not all BRAINSABC outputs have command line arguments.", ) @@ -869,44 +910,52 @@ class BRAINSABC(SEMLikeCommandLine): output_spec = BRAINSABCOutputSpec _cmd = " BRAINSABC " _outputs_filenames = { - 'saveState': 'saveState.h5', - 'outputLabels': 'outputLabels.nii.gz', - 'atlasToSubjectTransform': 'atlasToSubjectTransform.h5', - 'atlasToSubjectInitialTransform': 'atlasToSubjectInitialTransform.h5', - 'outputDirtyLabels': 'outputDirtyLabels.nii.gz', - 'outputVolumes': 'outputVolumes.nii.gz', - 'outputDir': 'outputDir', - 'implicitOutputs': 'implicitOutputs.nii.gz' + "saveState": "saveState.h5", + "outputLabels": "outputLabels.nii.gz", + "atlasToSubjectTransform": "atlasToSubjectTransform.h5", + "atlasToSubjectInitialTransform": "atlasToSubjectInitialTransform.h5", + "outputDirtyLabels": "outputDirtyLabels.nii.gz", + "outputVolumes": "outputVolumes.nii.gz", + "outputDir": "outputDir", + "implicitOutputs": "implicitOutputs.nii.gz", } _redirect_x = False class ESLRInputSpec(CommandLineInputSpec): inputVolume = File( - desc="Input Label Volume", exists=True, argstr="--inputVolume %s") + desc="Input Label Volume", exists=True, argstr="--inputVolume %s" + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output Label Volume", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) low = traits.Int( - desc="The lower bound of the labels to be used.", argstr="--low %d") + desc="The lower bound of the labels to be used.", argstr="--low %d" + ) high = traits.Int( - desc="The higher bound of the labels to be used.", argstr="--high %d") + desc="The higher bound of the labels to be used.", argstr="--high %d" + ) closingSize = traits.Int( - desc="The closing size for hole filling.", argstr="--closingSize %d") + desc="The closing size for hole filling.", argstr="--closingSize %d" + ) openingSize = traits.Int( - desc="The opening size for hole filling.", argstr="--openingSize %d") + desc="The opening size for hole filling.", argstr="--openingSize %d" + ) safetySize = traits.Int( - desc="The safetySize size for the clipping region.", - argstr="--safetySize %d") + desc="The safetySize size for the clipping region.", argstr="--safetySize %d" + ) preserveOutside = traits.Bool( desc="For values outside the specified range, preserve those values.", - argstr="--preserveOutside ") + argstr="--preserveOutside ", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class ESLROutputSpec(TraitedSpec): @@ -925,5 +974,5 @@ class ESLR(SEMLikeCommandLine): input_spec = ESLRInputSpec output_spec = ESLROutputSpec _cmd = " ESLR " - _outputs_filenames = {'outputVolume': 'outputVolume.nii.gz'} + _outputs_filenames = {"outputVolume": "outputVolume.nii.gz"} _redirect_x = False diff --git a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSABC.py b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSABC.py index a6be21c287..e43cdf412c 100644 --- a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSABC.py +++ b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSABC.py @@ -4,103 +4,66 @@ def test_BRAINSABC_inputs(): input_map = dict( - args=dict(argstr='%s', ), - atlasDefinition=dict( - argstr='--atlasDefinition %s', - extensions=None, - ), + args=dict(argstr="%s",), + atlasDefinition=dict(argstr="--atlasDefinition %s", extensions=None,), atlasToSubjectInitialTransform=dict( - argstr='--atlasToSubjectInitialTransform %s', - hash_files=False, + argstr="--atlasToSubjectInitialTransform %s", hash_files=False, ), atlasToSubjectTransform=dict( - argstr='--atlasToSubjectTransform %s', - hash_files=False, - ), - atlasToSubjectTransformType=dict( - argstr='--atlasToSubjectTransformType %s', ), - atlasWarpingOff=dict(argstr='--atlasWarpingOff ', ), - debuglevel=dict(argstr='--debuglevel %d', ), - defaultSuffix=dict(argstr='--defaultSuffix %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - filterIteration=dict(argstr='--filterIteration %d', ), - filterMethod=dict(argstr='--filterMethod %s', ), - filterTimeStep=dict(argstr='--filterTimeStep %f', ), - gridSize=dict( - argstr='--gridSize %s', - sep=',', - ), - implicitOutputs=dict( - argstr='--implicitOutputs %s...', - hash_files=False, - ), - inputVolumeTypes=dict( - argstr='--inputVolumeTypes %s', - sep=',', - ), - inputVolumes=dict(argstr='--inputVolumes %s...', ), - interpolationMode=dict(argstr='--interpolationMode %s', ), - maxBiasDegree=dict(argstr='--maxBiasDegree %d', ), - maxIterations=dict(argstr='--maxIterations %d', ), - medianFilterSize=dict( - argstr='--medianFilterSize %s', - sep=',', + argstr="--atlasToSubjectTransform %s", hash_files=False, ), + atlasToSubjectTransformType=dict(argstr="--atlasToSubjectTransformType %s",), + atlasWarpingOff=dict(argstr="--atlasWarpingOff ",), + debuglevel=dict(argstr="--debuglevel %d",), + defaultSuffix=dict(argstr="--defaultSuffix %s",), + environ=dict(nohash=True, usedefault=True,), + filterIteration=dict(argstr="--filterIteration %d",), + filterMethod=dict(argstr="--filterMethod %s",), + filterTimeStep=dict(argstr="--filterTimeStep %f",), + gridSize=dict(argstr="--gridSize %s", sep=",",), + implicitOutputs=dict(argstr="--implicitOutputs %s...", hash_files=False,), + inputVolumeTypes=dict(argstr="--inputVolumeTypes %s", sep=",",), + inputVolumes=dict(argstr="--inputVolumes %s...",), + interpolationMode=dict(argstr="--interpolationMode %s",), + maxBiasDegree=dict(argstr="--maxBiasDegree %d",), + maxIterations=dict(argstr="--maxIterations %d",), + medianFilterSize=dict(argstr="--medianFilterSize %s", sep=",",), numberOfSubSamplesInEachPlugArea=dict( - argstr='--numberOfSubSamplesInEachPlugArea %s', - sep=',', - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputDir=dict( - argstr='--outputDir %s', - hash_files=False, - ), - outputDirtyLabels=dict( - argstr='--outputDirtyLabels %s', - hash_files=False, - ), - outputFormat=dict(argstr='--outputFormat %s', ), - outputLabels=dict( - argstr='--outputLabels %s', - hash_files=False, - ), - outputVolumes=dict( - argstr='--outputVolumes %s...', - hash_files=False, - ), - posteriorTemplate=dict(argstr='--posteriorTemplate %s', ), - purePlugsThreshold=dict(argstr='--purePlugsThreshold %f', ), - restoreState=dict( - argstr='--restoreState %s', - extensions=None, - ), - saveState=dict( - argstr='--saveState %s', - hash_files=False, + argstr="--numberOfSubSamplesInEachPlugArea %s", sep=",", ), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputDir=dict(argstr="--outputDir %s", hash_files=False,), + outputDirtyLabels=dict(argstr="--outputDirtyLabels %s", hash_files=False,), + outputFormat=dict(argstr="--outputFormat %s",), + outputLabels=dict(argstr="--outputLabels %s", hash_files=False,), + outputVolumes=dict(argstr="--outputVolumes %s...", hash_files=False,), + posteriorTemplate=dict(argstr="--posteriorTemplate %s",), + purePlugsThreshold=dict(argstr="--purePlugsThreshold %f",), + restoreState=dict(argstr="--restoreState %s", extensions=None,), + saveState=dict(argstr="--saveState %s", hash_files=False,), subjectIntermodeTransformType=dict( - argstr='--subjectIntermodeTransformType %s', ), - useKNN=dict(argstr='--useKNN ', ), - writeLess=dict(argstr='--writeLess ', ), + argstr="--subjectIntermodeTransformType %s", + ), + useKNN=dict(argstr="--useKNN ",), + writeLess=dict(argstr="--writeLess ",), ) inputs = BRAINSABC.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSABC_outputs(): output_map = dict( - atlasToSubjectInitialTransform=dict(extensions=None, ), - atlasToSubjectTransform=dict(extensions=None, ), + atlasToSubjectInitialTransform=dict(extensions=None,), + atlasToSubjectTransform=dict(extensions=None,), implicitOutputs=dict(), outputDir=dict(), - outputDirtyLabels=dict(extensions=None, ), - outputLabels=dict(extensions=None, ), + outputDirtyLabels=dict(extensions=None,), + outputLabels=dict(extensions=None,), outputVolumes=dict(), - saveState=dict(extensions=None, ), + saveState=dict(extensions=None,), ) outputs = BRAINSABC.output_spec() diff --git a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSConstellationDetector.py b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSConstellationDetector.py index 7f1f2b747b..a2eb766db3 100644 --- a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSConstellationDetector.py +++ b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSConstellationDetector.py @@ -4,137 +4,82 @@ def test_BRAINSConstellationDetector_inputs(): input_map = dict( - BackgroundFillValue=dict(argstr='--BackgroundFillValue %s', ), - LLSModel=dict( - argstr='--LLSModel %s', - extensions=None, - ), - acLowerBound=dict(argstr='--acLowerBound %f', ), - args=dict(argstr='%s', ), - atlasLandmarkWeights=dict( - argstr='--atlasLandmarkWeights %s', - extensions=None, - ), - atlasLandmarks=dict( - argstr='--atlasLandmarks %s', - extensions=None, - ), - atlasVolume=dict( - argstr='--atlasVolume %s', - extensions=None, - ), - cutOutHeadInOutputVolume=dict(argstr='--cutOutHeadInOutputVolume ', ), - debug=dict(argstr='--debug ', ), - environ=dict( - nohash=True, - usedefault=True, - ), - forceACPoint=dict( - argstr='--forceACPoint %s', - sep=',', - ), + BackgroundFillValue=dict(argstr="--BackgroundFillValue %s",), + LLSModel=dict(argstr="--LLSModel %s", extensions=None,), + acLowerBound=dict(argstr="--acLowerBound %f",), + args=dict(argstr="%s",), + atlasLandmarkWeights=dict(argstr="--atlasLandmarkWeights %s", extensions=None,), + atlasLandmarks=dict(argstr="--atlasLandmarks %s", extensions=None,), + atlasVolume=dict(argstr="--atlasVolume %s", extensions=None,), + cutOutHeadInOutputVolume=dict(argstr="--cutOutHeadInOutputVolume ",), + debug=dict(argstr="--debug ",), + environ=dict(nohash=True, usedefault=True,), + forceACPoint=dict(argstr="--forceACPoint %s", sep=",",), forceHoughEyeDetectorReportFailure=dict( - argstr='--forceHoughEyeDetectorReportFailure ', ), - forcePCPoint=dict( - argstr='--forcePCPoint %s', - sep=',', - ), - forceRPPoint=dict( - argstr='--forceRPPoint %s', - sep=',', - ), - forceVN4Point=dict( - argstr='--forceVN4Point %s', - sep=',', - ), - houghEyeDetectorMode=dict(argstr='--houghEyeDetectorMode %d', ), - inputLandmarksEMSP=dict( - argstr='--inputLandmarksEMSP %s', - extensions=None, - ), - inputTemplateModel=dict( - argstr='--inputTemplateModel %s', - extensions=None, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - interpolationMode=dict(argstr='--interpolationMode %s', ), - mspQualityLevel=dict(argstr='--mspQualityLevel %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - otsuPercentileThreshold=dict(argstr='--otsuPercentileThreshold %f', ), + argstr="--forceHoughEyeDetectorReportFailure ", + ), + forcePCPoint=dict(argstr="--forcePCPoint %s", sep=",",), + forceRPPoint=dict(argstr="--forceRPPoint %s", sep=",",), + forceVN4Point=dict(argstr="--forceVN4Point %s", sep=",",), + houghEyeDetectorMode=dict(argstr="--houghEyeDetectorMode %d",), + inputLandmarksEMSP=dict(argstr="--inputLandmarksEMSP %s", extensions=None,), + inputTemplateModel=dict(argstr="--inputTemplateModel %s", extensions=None,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + interpolationMode=dict(argstr="--interpolationMode %s",), + mspQualityLevel=dict(argstr="--mspQualityLevel %d",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + otsuPercentileThreshold=dict(argstr="--otsuPercentileThreshold %f",), outputLandmarksInACPCAlignedSpace=dict( - argstr='--outputLandmarksInACPCAlignedSpace %s', - hash_files=False, + argstr="--outputLandmarksInACPCAlignedSpace %s", hash_files=False, ), outputLandmarksInInputSpace=dict( - argstr='--outputLandmarksInInputSpace %s', - hash_files=False, - ), - outputMRML=dict( - argstr='--outputMRML %s', - hash_files=False, + argstr="--outputLandmarksInInputSpace %s", hash_files=False, ), + outputMRML=dict(argstr="--outputMRML %s", hash_files=False,), outputResampledVolume=dict( - argstr='--outputResampledVolume %s', - hash_files=False, - ), - outputTransform=dict( - argstr='--outputTransform %s', - hash_files=False, + argstr="--outputResampledVolume %s", hash_files=False, ), + outputTransform=dict(argstr="--outputTransform %s", hash_files=False,), outputUntransformedClippedVolume=dict( - argstr='--outputUntransformedClippedVolume %s', - hash_files=False, + argstr="--outputUntransformedClippedVolume %s", hash_files=False, ), outputVerificationScript=dict( - argstr='--outputVerificationScript %s', - hash_files=False, - ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, + argstr="--outputVerificationScript %s", hash_files=False, ), - rVN4=dict(argstr='--rVN4 %f', ), - rac=dict(argstr='--rac %f', ), - rescaleIntensities=dict(argstr='--rescaleIntensities ', ), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + rVN4=dict(argstr="--rVN4 %f",), + rac=dict(argstr="--rac %f",), + rescaleIntensities=dict(argstr="--rescaleIntensities ",), rescaleIntensitiesOutputRange=dict( - argstr='--rescaleIntensitiesOutputRange %s', - sep=',', - ), - resultsDir=dict( - argstr='--resultsDir %s', - hash_files=False, - ), - rmpj=dict(argstr='--rmpj %f', ), - rpc=dict(argstr='--rpc %f', ), - trimRescaledIntensities=dict(argstr='--trimRescaledIntensities %f', ), - verbose=dict(argstr='--verbose ', ), - writeBranded2DImage=dict( - argstr='--writeBranded2DImage %s', - hash_files=False, - ), - writedebuggingImagesLevel=dict( - argstr='--writedebuggingImagesLevel %d', ), + argstr="--rescaleIntensitiesOutputRange %s", sep=",", + ), + resultsDir=dict(argstr="--resultsDir %s", hash_files=False,), + rmpj=dict(argstr="--rmpj %f",), + rpc=dict(argstr="--rpc %f",), + trimRescaledIntensities=dict(argstr="--trimRescaledIntensities %f",), + verbose=dict(argstr="--verbose ",), + writeBranded2DImage=dict(argstr="--writeBranded2DImage %s", hash_files=False,), + writedebuggingImagesLevel=dict(argstr="--writedebuggingImagesLevel %d",), ) inputs = BRAINSConstellationDetector.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSConstellationDetector_outputs(): output_map = dict( - outputLandmarksInACPCAlignedSpace=dict(extensions=None, ), - outputLandmarksInInputSpace=dict(extensions=None, ), - outputMRML=dict(extensions=None, ), - outputResampledVolume=dict(extensions=None, ), - outputTransform=dict(extensions=None, ), - outputUntransformedClippedVolume=dict(extensions=None, ), - outputVerificationScript=dict(extensions=None, ), - outputVolume=dict(extensions=None, ), + outputLandmarksInACPCAlignedSpace=dict(extensions=None,), + outputLandmarksInInputSpace=dict(extensions=None,), + outputMRML=dict(extensions=None,), + outputResampledVolume=dict(extensions=None,), + outputTransform=dict(extensions=None,), + outputUntransformedClippedVolume=dict(extensions=None,), + outputVerificationScript=dict(extensions=None,), + outputVolume=dict(extensions=None,), resultsDir=dict(), - writeBranded2DImage=dict(extensions=None, ), + writeBranded2DImage=dict(extensions=None,), ) outputs = BRAINSConstellationDetector.output_spec() diff --git a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSCreateLabelMapFromProbabilityMaps.py b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSCreateLabelMapFromProbabilityMaps.py index 5d74d787b5..06bf0165ca 100644 --- a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSCreateLabelMapFromProbabilityMaps.py +++ b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSCreateLabelMapFromProbabilityMaps.py @@ -4,43 +4,27 @@ def test_BRAINSCreateLabelMapFromProbabilityMaps_inputs(): input_map = dict( - args=dict(argstr='%s', ), - cleanLabelVolume=dict( - argstr='--cleanLabelVolume %s', - hash_files=False, - ), - dirtyLabelVolume=dict( - argstr='--dirtyLabelVolume %s', - hash_files=False, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - foregroundPriors=dict( - argstr='--foregroundPriors %s', - sep=',', - ), - inclusionThreshold=dict(argstr='--inclusionThreshold %f', ), - inputProbabilityVolume=dict(argstr='--inputProbabilityVolume %s...', ), - nonAirRegionMask=dict( - argstr='--nonAirRegionMask %s', - extensions=None, - ), - priorLabelCodes=dict( - argstr='--priorLabelCodes %s', - sep=',', - ), + args=dict(argstr="%s",), + cleanLabelVolume=dict(argstr="--cleanLabelVolume %s", hash_files=False,), + dirtyLabelVolume=dict(argstr="--dirtyLabelVolume %s", hash_files=False,), + environ=dict(nohash=True, usedefault=True,), + foregroundPriors=dict(argstr="--foregroundPriors %s", sep=",",), + inclusionThreshold=dict(argstr="--inclusionThreshold %f",), + inputProbabilityVolume=dict(argstr="--inputProbabilityVolume %s...",), + nonAirRegionMask=dict(argstr="--nonAirRegionMask %s", extensions=None,), + priorLabelCodes=dict(argstr="--priorLabelCodes %s", sep=",",), ) inputs = BRAINSCreateLabelMapFromProbabilityMaps.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSCreateLabelMapFromProbabilityMaps_outputs(): output_map = dict( - cleanLabelVolume=dict(extensions=None, ), - dirtyLabelVolume=dict(extensions=None, ), + cleanLabelVolume=dict(extensions=None,), + dirtyLabelVolume=dict(extensions=None,), ) outputs = BRAINSCreateLabelMapFromProbabilityMaps.output_spec() diff --git a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSCut.py b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSCut.py index 4731029fb1..c72579c470 100644 --- a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSCut.py +++ b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSCut.py @@ -4,41 +4,35 @@ def test_BRAINSCut_inputs(): input_map = dict( - NoTrainingVectorShuffling=dict( - argstr='--NoTrainingVectorShuffling ', ), - applyModel=dict(argstr='--applyModel ', ), - args=dict(argstr='%s', ), - computeSSEOn=dict(argstr='--computeSSEOn ', ), - createVectors=dict(argstr='--createVectors ', ), - environ=dict( - nohash=True, - usedefault=True, - ), - generateProbability=dict(argstr='--generateProbability ', ), - histogramEqualization=dict(argstr='--histogramEqualization ', ), - method=dict(argstr='--method %s', ), + NoTrainingVectorShuffling=dict(argstr="--NoTrainingVectorShuffling ",), + applyModel=dict(argstr="--applyModel ",), + args=dict(argstr="%s",), + computeSSEOn=dict(argstr="--computeSSEOn ",), + createVectors=dict(argstr="--createVectors ",), + environ=dict(nohash=True, usedefault=True,), + generateProbability=dict(argstr="--generateProbability ",), + histogramEqualization=dict(argstr="--histogramEqualization ",), + method=dict(argstr="--method %s",), modelConfigurationFilename=dict( - argstr='--modelConfigurationFilename %s', - extensions=None, - ), - modelFilename=dict(argstr='--modelFilename %s', ), - multiStructureThreshold=dict(argstr='--multiStructureThreshold ', ), - netConfiguration=dict( - argstr='--netConfiguration %s', - extensions=None, + argstr="--modelConfigurationFilename %s", extensions=None, ), - numberOfTrees=dict(argstr='--numberOfTrees %d', ), - randomTreeDepth=dict(argstr='--randomTreeDepth %d', ), - trainModel=dict(argstr='--trainModel ', ), - trainModelStartIndex=dict(argstr='--trainModelStartIndex %d', ), - validate=dict(argstr='--validate ', ), - verbose=dict(argstr='--verbose %d', ), + modelFilename=dict(argstr="--modelFilename %s",), + multiStructureThreshold=dict(argstr="--multiStructureThreshold ",), + netConfiguration=dict(argstr="--netConfiguration %s", extensions=None,), + numberOfTrees=dict(argstr="--numberOfTrees %d",), + randomTreeDepth=dict(argstr="--randomTreeDepth %d",), + trainModel=dict(argstr="--trainModel ",), + trainModelStartIndex=dict(argstr="--trainModelStartIndex %d",), + validate=dict(argstr="--validate ",), + verbose=dict(argstr="--verbose %d",), ) inputs = BRAINSCut.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSCut_outputs(): output_map = dict() outputs = BRAINSCut.output_spec() diff --git a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSMultiSTAPLE.py b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSMultiSTAPLE.py index 85364618b8..7f91e4e803 100644 --- a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSMultiSTAPLE.py +++ b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSMultiSTAPLE.py @@ -4,38 +4,32 @@ def test_BRAINSMultiSTAPLE_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), inputCompositeT1Volume=dict( - argstr='--inputCompositeT1Volume %s', - extensions=None, + argstr="--inputCompositeT1Volume %s", extensions=None, ), - inputLabelVolume=dict(argstr='--inputLabelVolume %s...', ), - inputTransform=dict(argstr='--inputTransform %s...', ), - labelForUndecidedPixels=dict(argstr='--labelForUndecidedPixels %d', ), + inputLabelVolume=dict(argstr="--inputLabelVolume %s...",), + inputTransform=dict(argstr="--inputTransform %s...",), + labelForUndecidedPixels=dict(argstr="--labelForUndecidedPixels %d",), outputConfusionMatrix=dict( - argstr='--outputConfusionMatrix %s', - hash_files=False, - ), - outputMultiSTAPLE=dict( - argstr='--outputMultiSTAPLE %s', - hash_files=False, + argstr="--outputConfusionMatrix %s", hash_files=False, ), - resampledVolumePrefix=dict(argstr='--resampledVolumePrefix %s', ), - skipResampling=dict(argstr='--skipResampling ', ), + outputMultiSTAPLE=dict(argstr="--outputMultiSTAPLE %s", hash_files=False,), + resampledVolumePrefix=dict(argstr="--resampledVolumePrefix %s",), + skipResampling=dict(argstr="--skipResampling ",), ) inputs = BRAINSMultiSTAPLE.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSMultiSTAPLE_outputs(): output_map = dict( - outputConfusionMatrix=dict(extensions=None, ), - outputMultiSTAPLE=dict(extensions=None, ), + outputConfusionMatrix=dict(extensions=None,), + outputMultiSTAPLE=dict(extensions=None,), ) outputs = BRAINSMultiSTAPLE.output_spec() diff --git a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSROIAuto.py b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSROIAuto.py index c4b68f598b..69562cc9ce 100644 --- a/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSROIAuto.py +++ b/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSROIAuto.py @@ -4,42 +4,30 @@ def test_BRAINSROIAuto_inputs(): input_map = dict( - ROIAutoDilateSize=dict(argstr='--ROIAutoDilateSize %f', ), - args=dict(argstr='%s', ), - closingSize=dict(argstr='--closingSize %f', ), - cropOutput=dict(argstr='--cropOutput ', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - maskOutput=dict(argstr='--maskOutput ', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - otsuPercentileThreshold=dict(argstr='--otsuPercentileThreshold %f', ), - outputROIMaskVolume=dict( - argstr='--outputROIMaskVolume %s', - hash_files=False, - ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - outputVolumePixelType=dict(argstr='--outputVolumePixelType %s', ), - thresholdCorrectionFactor=dict( - argstr='--thresholdCorrectionFactor %f', ), + ROIAutoDilateSize=dict(argstr="--ROIAutoDilateSize %f",), + args=dict(argstr="%s",), + closingSize=dict(argstr="--closingSize %f",), + cropOutput=dict(argstr="--cropOutput ",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + maskOutput=dict(argstr="--maskOutput ",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + otsuPercentileThreshold=dict(argstr="--otsuPercentileThreshold %f",), + outputROIMaskVolume=dict(argstr="--outputROIMaskVolume %s", hash_files=False,), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + outputVolumePixelType=dict(argstr="--outputVolumePixelType %s",), + thresholdCorrectionFactor=dict(argstr="--thresholdCorrectionFactor %f",), ) inputs = BRAINSROIAuto.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSROIAuto_outputs(): output_map = dict( - outputROIMaskVolume=dict(extensions=None, ), - outputVolume=dict(extensions=None, ), + outputROIMaskVolume=dict(extensions=None,), outputVolume=dict(extensions=None,), ) outputs = BRAINSROIAuto.output_spec() diff --git a/nipype/interfaces/semtools/segmentation/tests/test_auto_BinaryMaskEditorBasedOnLandmarks.py b/nipype/interfaces/semtools/segmentation/tests/test_auto_BinaryMaskEditorBasedOnLandmarks.py index 859af9132b..fd28644cdc 100644 --- a/nipype/interfaces/semtools/segmentation/tests/test_auto_BinaryMaskEditorBasedOnLandmarks.py +++ b/nipype/interfaces/semtools/segmentation/tests/test_auto_BinaryMaskEditorBasedOnLandmarks.py @@ -4,38 +4,22 @@ def test_BinaryMaskEditorBasedOnLandmarks_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputBinaryVolume=dict( - argstr='--inputBinaryVolume %s', - extensions=None, - ), - inputLandmarkNames=dict( - argstr='--inputLandmarkNames %s', - sep=',', - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputBinaryVolume=dict(argstr="--inputBinaryVolume %s", extensions=None,), + inputLandmarkNames=dict(argstr="--inputLandmarkNames %s", sep=",",), inputLandmarkNamesForObliquePlane=dict( - argstr='--inputLandmarkNamesForObliquePlane %s', - sep=',', + argstr="--inputLandmarkNamesForObliquePlane %s", sep=",", ), inputLandmarksFilename=dict( - argstr='--inputLandmarksFilename %s', - extensions=None, - ), - outputBinaryVolume=dict( - argstr='--outputBinaryVolume %s', - hash_files=False, + argstr="--inputLandmarksFilename %s", extensions=None, ), + outputBinaryVolume=dict(argstr="--outputBinaryVolume %s", hash_files=False,), setCutDirectionForLandmark=dict( - argstr='--setCutDirectionForLandmark %s', - sep=',', + argstr="--setCutDirectionForLandmark %s", sep=",", ), setCutDirectionForObliquePlane=dict( - argstr='--setCutDirectionForObliquePlane %s', - sep=',', + argstr="--setCutDirectionForObliquePlane %s", sep=",", ), ) inputs = BinaryMaskEditorBasedOnLandmarks.input_spec() @@ -43,8 +27,10 @@ def test_BinaryMaskEditorBasedOnLandmarks_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BinaryMaskEditorBasedOnLandmarks_outputs(): - output_map = dict(outputBinaryVolume=dict(extensions=None, ), ) + output_map = dict(outputBinaryVolume=dict(extensions=None,),) outputs = BinaryMaskEditorBasedOnLandmarks.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/segmentation/tests/test_auto_ESLR.py b/nipype/interfaces/semtools/segmentation/tests/test_auto_ESLR.py index 8c006051a7..2fe0fc16ce 100644 --- a/nipype/interfaces/semtools/segmentation/tests/test_auto_ESLR.py +++ b/nipype/interfaces/semtools/segmentation/tests/test_auto_ESLR.py @@ -4,34 +4,27 @@ def test_ESLR_inputs(): input_map = dict( - args=dict(argstr='%s', ), - closingSize=dict(argstr='--closingSize %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - high=dict(argstr='--high %d', ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - low=dict(argstr='--low %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - openingSize=dict(argstr='--openingSize %d', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - preserveOutside=dict(argstr='--preserveOutside ', ), - safetySize=dict(argstr='--safetySize %d', ), + args=dict(argstr="%s",), + closingSize=dict(argstr="--closingSize %d",), + environ=dict(nohash=True, usedefault=True,), + high=dict(argstr="--high %d",), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + low=dict(argstr="--low %d",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + openingSize=dict(argstr="--openingSize %d",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + preserveOutside=dict(argstr="--preserveOutside ",), + safetySize=dict(argstr="--safetySize %d",), ) inputs = ESLR.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ESLR_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = ESLR.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/testing/featuredetection.py b/nipype/interfaces/semtools/testing/featuredetection.py index e8f332c0a6..19e5076b0a 100644 --- a/nipype/interfaces/semtools/testing/featuredetection.py +++ b/nipype/interfaces/semtools/testing/featuredetection.py @@ -3,16 +3,28 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class SphericalCoordinateGenerationInputSpec(CommandLineInputSpec): inputAtlasImage = File( - desc="Input atlas image", exists=True, argstr="--inputAtlasImage %s") + desc="Input atlas image", exists=True, argstr="--inputAtlasImage %s" + ) outputPath = traits.Str( - desc="Output path for rho, phi and theta images", - argstr="--outputPath %s") + desc="Output path for rho, phi and theta images", argstr="--outputPath %s" + ) class SphericalCoordinateGenerationOutputSpec(TraitedSpec): diff --git a/nipype/interfaces/semtools/testing/generateaveragelmkfile.py b/nipype/interfaces/semtools/testing/generateaveragelmkfile.py index bbb414c366..3995a9b73d 100644 --- a/nipype/interfaces/semtools/testing/generateaveragelmkfile.py +++ b/nipype/interfaces/semtools/testing/generateaveragelmkfile.py @@ -3,7 +3,18 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os @@ -12,21 +23,22 @@ class GenerateAverageLmkFileInputSpec(CommandLineInputSpec): traits.Str, desc="Input landmark files names (.fcsv or .wts)", sep=",", - argstr="--inputLandmarkFiles %s") + argstr="--inputLandmarkFiles %s", + ) outputLandmarkFile = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Ouput landmark file name that includes average values for landmarks (.fcsv or .wts)", - argstr="--outputLandmarkFile %s") + desc="Ouput landmark file name that includes average values for landmarks (.fcsv or .wts)", + argstr="--outputLandmarkFile %s", + ) class GenerateAverageLmkFileOutputSpec(TraitedSpec): outputLandmarkFile = File( - desc= - "Ouput landmark file name that includes average values for landmarks (.fcsv or .wts)", - exists=True) + desc="Ouput landmark file name that includes average values for landmarks (.fcsv or .wts)", + exists=True, + ) class GenerateAverageLmkFile(SEMLikeCommandLine): @@ -43,5 +55,5 @@ class GenerateAverageLmkFile(SEMLikeCommandLine): input_spec = GenerateAverageLmkFileInputSpec output_spec = GenerateAverageLmkFileOutputSpec _cmd = " GenerateAverageLmkFile " - _outputs_filenames = {'outputLandmarkFile': 'outputLandmarkFile'} + _outputs_filenames = {"outputLandmarkFile": "outputLandmarkFile"} _redirect_x = False diff --git a/nipype/interfaces/semtools/testing/landmarkscompare.py b/nipype/interfaces/semtools/testing/landmarkscompare.py index 872d6d0df0..066a92f24b 100644 --- a/nipype/interfaces/semtools/testing/landmarkscompare.py +++ b/nipype/interfaces/semtools/testing/landmarkscompare.py @@ -3,7 +3,18 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os @@ -11,15 +22,17 @@ class LandmarksCompareInputSpec(CommandLineInputSpec): inputLandmarkFile1 = File( desc="First input landmark file (.fcsv or .wts)", exists=True, - argstr="--inputLandmarkFile1 %s") + argstr="--inputLandmarkFile1 %s", + ) inputLandmarkFile2 = File( desc="Second input landmark file (.fcsv or .wts)", exists=True, - argstr="--inputLandmarkFile2 %s") + argstr="--inputLandmarkFile2 %s", + ) tolerance = traits.Float( - desc= - "The maximum error (in mm) allowed in each direction of a landmark", - argstr="--tolerance %f") + desc="The maximum error (in mm) allowed in each direction of a landmark", + argstr="--tolerance %f", + ) class LandmarksCompareOutputSpec(TraitedSpec): diff --git a/nipype/interfaces/semtools/tests/test_auto_DWICompare.py b/nipype/interfaces/semtools/tests/test_auto_DWICompare.py index 305d60b6aa..a9a270d6a5 100644 --- a/nipype/interfaces/semtools/tests/test_auto_DWICompare.py +++ b/nipype/interfaces/semtools/tests/test_auto_DWICompare.py @@ -4,25 +4,18 @@ def test_DWICompare_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume1=dict( - argstr='--inputVolume1 %s', - extensions=None, - ), - inputVolume2=dict( - argstr='--inputVolume2 %s', - extensions=None, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume1=dict(argstr="--inputVolume1 %s", extensions=None,), + inputVolume2=dict(argstr="--inputVolume2 %s", extensions=None,), ) inputs = DWICompare.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWICompare_outputs(): output_map = dict() outputs = DWICompare.output_spec() diff --git a/nipype/interfaces/semtools/tests/test_auto_DWISimpleCompare.py b/nipype/interfaces/semtools/tests/test_auto_DWISimpleCompare.py index 9140141254..f8f099104d 100644 --- a/nipype/interfaces/semtools/tests/test_auto_DWISimpleCompare.py +++ b/nipype/interfaces/semtools/tests/test_auto_DWISimpleCompare.py @@ -4,26 +4,19 @@ def test_DWISimpleCompare_inputs(): input_map = dict( - args=dict(argstr='%s', ), - checkDWIData=dict(argstr='--checkDWIData ', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume1=dict( - argstr='--inputVolume1 %s', - extensions=None, - ), - inputVolume2=dict( - argstr='--inputVolume2 %s', - extensions=None, - ), + args=dict(argstr="%s",), + checkDWIData=dict(argstr="--checkDWIData ",), + environ=dict(nohash=True, usedefault=True,), + inputVolume1=dict(argstr="--inputVolume1 %s", extensions=None,), + inputVolume2=dict(argstr="--inputVolume2 %s", extensions=None,), ) inputs = DWISimpleCompare.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWISimpleCompare_outputs(): output_map = dict() outputs = DWISimpleCompare.output_spec() diff --git a/nipype/interfaces/semtools/tests/test_auto_GenerateCsfClippedFromClassifiedImage.py b/nipype/interfaces/semtools/tests/test_auto_GenerateCsfClippedFromClassifiedImage.py index 1cdeab73f7..99d8d5d226 100644 --- a/nipype/interfaces/semtools/tests/test_auto_GenerateCsfClippedFromClassifiedImage.py +++ b/nipype/interfaces/semtools/tests/test_auto_GenerateCsfClippedFromClassifiedImage.py @@ -4,27 +4,20 @@ def test_GenerateCsfClippedFromClassifiedImage_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputCassifiedVolume=dict( - argstr='--inputCassifiedVolume %s', - extensions=None, - ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputCassifiedVolume=dict(argstr="--inputCassifiedVolume %s", extensions=None,), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), ) inputs = GenerateCsfClippedFromClassifiedImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GenerateCsfClippedFromClassifiedImage_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = GenerateCsfClippedFromClassifiedImage.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/__init__.py b/nipype/interfaces/semtools/utilities/__init__.py index b59f373cf9..2209064909 100644 --- a/nipype/interfaces/semtools/utilities/__init__.py +++ b/nipype/interfaces/semtools/utilities/__init__.py @@ -1,10 +1,23 @@ # -*- coding: utf-8 -*- from .brains import ( - BRAINSConstellationModeler, landmarksConstellationWeights, - BRAINSTrimForegroundInDirection, BRAINSLmkTransform, BRAINSMush, - BRAINSTransformConvert, landmarksConstellationAligner, BRAINSEyeDetector, - BRAINSLinearModelerEPCA, BRAINSInitializedControlPoints, - CleanUpOverlapLabels, BRAINSClipInferior, - GenerateLabelMapFromProbabilityMap, BRAINSAlignMSP, - BRAINSLandmarkInitializer, insertMidACPCpoint, BRAINSSnapShotWriter, - JointHistogram, ShuffleVectorsModule, ImageRegionPlotter) + BRAINSConstellationModeler, + landmarksConstellationWeights, + BRAINSTrimForegroundInDirection, + BRAINSLmkTransform, + BRAINSMush, + BRAINSTransformConvert, + landmarksConstellationAligner, + BRAINSEyeDetector, + BRAINSLinearModelerEPCA, + BRAINSInitializedControlPoints, + CleanUpOverlapLabels, + BRAINSClipInferior, + GenerateLabelMapFromProbabilityMap, + BRAINSAlignMSP, + BRAINSLandmarkInitializer, + insertMidACPCpoint, + BRAINSSnapShotWriter, + JointHistogram, + ShuffleVectorsModule, + ImageRegionPlotter, +) diff --git a/nipype/interfaces/semtools/utilities/brains.py b/nipype/interfaces/semtools/utilities/brains.py index abc696b5d9..59a61a1137 100644 --- a/nipype/interfaces/semtools/utilities/brains.py +++ b/nipype/interfaces/semtools/utilities/brains.py @@ -5,82 +5,93 @@ import os -from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, - TraitedSpec, File, Directory, traits, isdefined, - InputMultiPath, OutputMultiPath) +from ...base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) class BRAINSConstellationModelerInputSpec(CommandLineInputSpec): verbose = traits.Bool( desc=", Show more verbose output, ", - argstr="--verbose ") + argstr="--verbose ", + ) inputTrainingList = File( - desc= - ", Setup file, giving all parameters for training up a template model for each landmark., ", + desc=", Setup file, giving all parameters for training up a template model for each landmark., ", exists=True, - argstr="--inputTrainingList %s") + argstr="--inputTrainingList %s", + ) outputModel = traits.Either( traits.Bool, File(), hash_files=False, - desc= - ", The full filename of the output model file., ", - argstr="--outputModel %s") + desc=", The full filename of the output model file., ", + argstr="--outputModel %s", + ) saveOptimizedLandmarks = traits.Bool( - desc= - ", Flag to make a new subject-specific landmark definition file in the same format produced by Slicer3 with the optimized landmark (the detected RP, AC, and PC) in it. Useful to tighten the variances in the ConstellationModeler., ", - argstr="--saveOptimizedLandmarks ") + desc=", Flag to make a new subject-specific landmark definition file in the same format produced by Slicer3 with the optimized landmark (the detected RP, AC, and PC) in it. Useful to tighten the variances in the ConstellationModeler., ", + argstr="--saveOptimizedLandmarks ", + ) optimizedLandmarksFilenameExtender = traits.Str( - desc= - ", If the trainingList is (indexFullPathName) and contains landmark data filenames [path]/[filename].fcsv , make the optimized landmarks filenames out of [path]/[filename](thisExtender) and the optimized version of the input trainingList out of (indexFullPathName)(thisExtender) , when you rewrite all the landmarks according to the saveOptimizedLandmarks flag., ", - argstr="--optimizedLandmarksFilenameExtender %s") + desc=", If the trainingList is (indexFullPathName) and contains landmark data filenames [path]/[filename].fcsv , make the optimized landmarks filenames out of [path]/[filename](thisExtender) and the optimized version of the input trainingList out of (indexFullPathName)(thisExtender) , when you rewrite all the landmarks according to the saveOptimizedLandmarks flag., ", + argstr="--optimizedLandmarksFilenameExtender %s", + ) resultsDir = traits.Either( traits.Bool, Directory(), hash_files=False, - desc= - ", The directory for the results to be written., ", - argstr="--resultsDir %s") + desc=", The directory for the results to be written., ", + argstr="--resultsDir %s", + ) mspQualityLevel = traits.Int( - desc= - ", Flag cotrols how agressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds)., ", - argstr="--mspQualityLevel %d") + desc=", Flag cotrols how agressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds)., ", + argstr="--mspQualityLevel %d", + ) rescaleIntensities = traits.Bool( - desc= - ", Flag to turn on rescaling image intensities on input., ", - argstr="--rescaleIntensities ") + desc=", Flag to turn on rescaling image intensities on input., ", + argstr="--rescaleIntensities ", + ) trimRescaledIntensities = traits.Float( - desc= - ", Turn on clipping the rescaled image one-tailed on input. Units of standard deviations above the mean. Very large values are very permissive. Non-positive value turns clipping off. Defaults to removing 0.00001 of a normal tail above the mean., ", - argstr="--trimRescaledIntensities %f") + desc=", Turn on clipping the rescaled image one-tailed on input. Units of standard deviations above the mean. Very large values are very permissive. Non-positive value turns clipping off. Defaults to removing 0.00001 of a normal tail above the mean., ", + argstr="--trimRescaledIntensities %f", + ) rescaleIntensitiesOutputRange = InputMultiPath( traits.Int, - desc= - ", This pair of integers gives the lower and upper bounds on the signal portion of the output image. Out-of-field voxels are taken from BackgroundFillValue., ", + desc=", This pair of integers gives the lower and upper bounds on the signal portion of the output image. Out-of-field voxels are taken from BackgroundFillValue., ", sep=",", - argstr="--rescaleIntensitiesOutputRange %s") + argstr="--rescaleIntensitiesOutputRange %s", + ) BackgroundFillValue = traits.Str( - desc= - "Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", - argstr="--BackgroundFillValue %s") + desc="Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", + argstr="--BackgroundFillValue %s", + ) writedebuggingImagesLevel = traits.Int( - desc= - ", This flag controls if debugging images are produced. By default value of 0 is no images. Anything greater than zero will be increasing level of debugging images., ", - argstr="--writedebuggingImagesLevel %d") + desc=", This flag controls if debugging images are produced. By default value of 0 is no images. Anything greater than zero will be increasing level of debugging images., ", + argstr="--writedebuggingImagesLevel %d", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSConstellationModelerOutputSpec(TraitedSpec): outputModel = File( - desc= - ", The full filename of the output model file., ", - exists=True) + desc=", The full filename of the output model file., ", + exists=True, + ) resultsDir = Directory( - desc= - ", The directory for the results to be written., ", - exists=True) + desc=", The directory for the results to be written., ", + exists=True, + ) class BRAINSConstellationModeler(SEMLikeCommandLine): @@ -95,41 +106,40 @@ class BRAINSConstellationModeler(SEMLikeCommandLine): input_spec = BRAINSConstellationModelerInputSpec output_spec = BRAINSConstellationModelerOutputSpec _cmd = " BRAINSConstellationModeler " - _outputs_filenames = { - 'outputModel': 'outputModel.mdl', - 'resultsDir': 'resultsDir' - } + _outputs_filenames = {"outputModel": "outputModel.mdl", "resultsDir": "resultsDir"} _redirect_x = False class landmarksConstellationWeightsInputSpec(CommandLineInputSpec): inputTrainingList = File( - desc= - ", Setup file, giving all parameters for training up a Weight list for landmark., ", + desc=", Setup file, giving all parameters for training up a Weight list for landmark., ", exists=True, - argstr="--inputTrainingList %s") + argstr="--inputTrainingList %s", + ) inputTemplateModel = File( desc="User-specified template model., ", exists=True, - argstr="--inputTemplateModel %s") + argstr="--inputTemplateModel %s", + ) LLSModel = File( desc="Linear least squares model filename in HD5 format", exists=True, - argstr="--LLSModel %s") + argstr="--LLSModel %s", + ) outputWeightsList = traits.Either( traits.Bool, File(), hash_files=False, - desc= - ", The filename of a csv file which is a list of landmarks and their corresponding weights., ", - argstr="--outputWeightsList %s") + desc=", The filename of a csv file which is a list of landmarks and their corresponding weights., ", + argstr="--outputWeightsList %s", + ) class landmarksConstellationWeightsOutputSpec(TraitedSpec): outputWeightsList = File( - desc= - ", The filename of a csv file which is a list of landmarks and their corresponding weights., ", - exists=True) + desc=", The filename of a csv file which is a list of landmarks and their corresponding weights., ", + exists=True, + ) class landmarksConstellationWeights(SEMLikeCommandLine): @@ -144,7 +154,7 @@ class landmarksConstellationWeights(SEMLikeCommandLine): input_spec = landmarksConstellationWeightsInputSpec output_spec = landmarksConstellationWeightsOutputSpec _cmd = " landmarksConstellationWeights " - _outputs_filenames = {'outputWeightsList': 'outputWeightsList.wts'} + _outputs_filenames = {"outputWeightsList": "outputWeightsList.wts"} _redirect_x = False @@ -152,44 +162,46 @@ class BRAINSTrimForegroundInDirectionInputSpec(CommandLineInputSpec): inputVolume = File( desc="Input image to trim off the neck (and also air-filling noise.)", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Output image with neck and air-filling noise trimmed isotropic image with AC at center of image.", - argstr="--outputVolume %s") + desc="Output image with neck and air-filling noise trimmed isotropic image with AC at center of image.", + argstr="--outputVolume %s", + ) directionCode = traits.Int( - desc= - ", This flag chooses which dimension to compare. The sign lets you flip direction., ", - argstr="--directionCode %d") + desc=", This flag chooses which dimension to compare. The sign lets you flip direction., ", + argstr="--directionCode %d", + ) otsuPercentileThreshold = traits.Float( - desc= - ", This is a parameter to FindLargestForegroundFilledMask, which is employed to trim off air-filling noise., ", - argstr="--otsuPercentileThreshold %f") + desc=", This is a parameter to FindLargestForegroundFilledMask, which is employed to trim off air-filling noise., ", + argstr="--otsuPercentileThreshold %f", + ) closingSize = traits.Int( - desc= - ", This is a parameter to FindLargestForegroundFilledMask, ", - argstr="--closingSize %d") + desc=", This is a parameter to FindLargestForegroundFilledMask, ", + argstr="--closingSize %d", + ) headSizeLimit = traits.Float( - desc= - ", Use this to vary from the command line our search for how much upper tissue is head for the center-of-mass calculation. Units are CCs, not cubic millimeters., ", - argstr="--headSizeLimit %f") + desc=", Use this to vary from the command line our search for how much upper tissue is head for the center-of-mass calculation. Units are CCs, not cubic millimeters., ", + argstr="--headSizeLimit %f", + ) BackgroundFillValue = traits.Str( - desc= - "Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", - argstr="--BackgroundFillValue %s") + desc="Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", + argstr="--BackgroundFillValue %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSTrimForegroundInDirectionOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Output image with neck and air-filling noise trimmed isotropic image with AC at center of image.", - exists=True) + desc="Output image with neck and air-filling noise trimmed isotropic image with AC at center of image.", + exists=True, + ) class BRAINSTrimForegroundInDirection(SEMLikeCommandLine): @@ -208,7 +220,7 @@ class BRAINSTrimForegroundInDirection(SEMLikeCommandLine): input_spec = BRAINSTrimForegroundInDirectionInputSpec output_spec = BRAINSTrimForegroundInDirectionOutputSpec _cmd = " BRAINSTrimForegroundInDirection " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False @@ -216,42 +228,51 @@ class BRAINSLmkTransformInputSpec(CommandLineInputSpec): inputMovingLandmarks = File( desc="Input Moving Landmark list file in fcsv, ", exists=True, - argstr="--inputMovingLandmarks %s") + argstr="--inputMovingLandmarks %s", + ) inputFixedLandmarks = File( desc="Input Fixed Landmark list file in fcsv, ", exists=True, - argstr="--inputFixedLandmarks %s") + argstr="--inputFixedLandmarks %s", + ) outputAffineTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="The filename for the estimated affine transform, ", - argstr="--outputAffineTransform %s") + argstr="--outputAffineTransform %s", + ) inputMovingVolume = File( desc="The filename of input moving volume", exists=True, - argstr="--inputMovingVolume %s") + argstr="--inputMovingVolume %s", + ) inputReferenceVolume = File( desc="The filename of the reference volume", exists=True, - argstr="--inputReferenceVolume %s") + argstr="--inputReferenceVolume %s", + ) outputResampledVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="The filename of the output resampled volume", - argstr="--outputResampledVolume %s") + argstr="--outputResampledVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSLmkTransformOutputSpec(TraitedSpec): outputAffineTransform = File( desc="The filename for the estimated affine transform, ", - exists=True) + exists=True, + ) outputResampledVolume = File( - desc="The filename of the output resampled volume", exists=True) + desc="The filename of the output resampled volume", exists=True + ) class BRAINSLmkTransform(SEMLikeCommandLine): @@ -271,8 +292,8 @@ class BRAINSLmkTransform(SEMLikeCommandLine): output_spec = BRAINSLmkTransformOutputSpec _cmd = " BRAINSLmkTransform " _outputs_filenames = { - 'outputResampledVolume': 'outputResampledVolume.nii', - 'outputAffineTransform': 'outputAffineTransform.h5' + "outputResampledVolume": "outputResampledVolume.nii", + "outputAffineTransform": "outputAffineTransform.h5", } _redirect_x = False @@ -281,82 +302,95 @@ class BRAINSMushInputSpec(CommandLineInputSpec): inputFirstVolume = File( desc="Input image (1) for mixture optimization", exists=True, - argstr="--inputFirstVolume %s") + argstr="--inputFirstVolume %s", + ) inputSecondVolume = File( desc="Input image (2) for mixture optimization", exists=True, - argstr="--inputSecondVolume %s") + argstr="--inputSecondVolume %s", + ) inputMaskVolume = File( desc="Input label image for mixture optimization", exists=True, - argstr="--inputMaskVolume %s") + argstr="--inputMaskVolume %s", + ) outputWeightsFile = traits.Either( traits.Bool, File(), hash_files=False, desc="Output Weights File", - argstr="--outputWeightsFile %s") + argstr="--outputWeightsFile %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="The MUSH image produced from the T1 and T2 weighted images", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) outputMask = traits.Either( traits.Bool, File(), hash_files=False, desc="The brain volume mask generated from the MUSH image", - argstr="--outputMask %s") + argstr="--outputMask %s", + ) seed = InputMultiPath( traits.Int, desc="Seed Point for Brain Region Filling", sep=",", - argstr="--seed %s") + argstr="--seed %s", + ) desiredMean = traits.Float( desc="Desired mean within the mask for weighted sum of both images.", - argstr="--desiredMean %f") + argstr="--desiredMean %f", + ) desiredVariance = traits.Float( - desc= - "Desired variance within the mask for weighted sum of both images.", - argstr="--desiredVariance %f") + desc="Desired variance within the mask for weighted sum of both images.", + argstr="--desiredVariance %f", + ) lowerThresholdFactorPre = traits.Float( desc="Lower threshold factor for finding an initial brain mask", - argstr="--lowerThresholdFactorPre %f") + argstr="--lowerThresholdFactorPre %f", + ) upperThresholdFactorPre = traits.Float( desc="Upper threshold factor for finding an initial brain mask", - argstr="--upperThresholdFactorPre %f") + argstr="--upperThresholdFactorPre %f", + ) lowerThresholdFactor = traits.Float( desc="Lower threshold factor for defining the brain mask", - argstr="--lowerThresholdFactor %f") + argstr="--lowerThresholdFactor %f", + ) upperThresholdFactor = traits.Float( desc="Upper threshold factor for defining the brain mask", - argstr="--upperThresholdFactor %f") + argstr="--upperThresholdFactor %f", + ) boundingBoxSize = InputMultiPath( traits.Int, - desc= - "Size of the cubic bounding box mask used when no brain mask is present", + desc="Size of the cubic bounding box mask used when no brain mask is present", sep=",", - argstr="--boundingBoxSize %s") + argstr="--boundingBoxSize %s", + ) boundingBoxStart = InputMultiPath( traits.Int, - desc= - "XYZ point-coordinate for the start of the cubic bounding box mask used when no brain mask is present", + desc="XYZ point-coordinate for the start of the cubic bounding box mask used when no brain mask is present", sep=",", - argstr="--boundingBoxStart %s") + argstr="--boundingBoxStart %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSMushOutputSpec(TraitedSpec): outputWeightsFile = File(desc="Output Weights File", exists=True) outputVolume = File( - desc="The MUSH image produced from the T1 and T2 weighted images", - exists=True) + desc="The MUSH image produced from the T1 and T2 weighted images", exists=True + ) outputMask = File( - desc="The brain volume mask generated from the MUSH image", - exists=True) + desc="The brain volume mask generated from the MUSH image", exists=True + ) class BRAINSMush(SEMLikeCommandLine): @@ -382,9 +416,9 @@ class BRAINSMush(SEMLikeCommandLine): output_spec = BRAINSMushOutputSpec _cmd = " BRAINSMush " _outputs_filenames = { - 'outputMask': 'outputMask.nii.gz', - 'outputWeightsFile': 'outputWeightsFile.txt', - 'outputVolume': 'outputVolume.nii.gz' + "outputMask": "outputMask.nii.gz", + "outputWeightsFile": "outputWeightsFile.txt", + "outputVolume": "outputVolume.nii.gz", } _redirect_x = False @@ -399,22 +433,21 @@ class BRAINSTransformConvertInputSpec(CommandLineInputSpec): "ScaleSkewVersor", "DisplacementField", "Same", - desc= - "The target transformation type. Must be conversion-compatible with the input transform type", - argstr="--outputTransformType %s") + desc="The target transformation type. Must be conversion-compatible with the input transform type", + argstr="--outputTransformType %s", + ) outputPrecisionType = traits.Enum( "double", "float", - desc= - "Precision type of the output transform. It can be either single precision or double precision", - argstr="--outputPrecisionType %s") + desc="Precision type of the output transform. It can be either single precision or double precision", + argstr="--outputPrecisionType %s", + ) displacementVolume = traits.Either( - traits.Bool, - File(), - hash_files=False, - argstr="--displacementVolume %s") + traits.Bool, File(), hash_files=False, argstr="--displacementVolume %s" + ) outputTransform = traits.Either( - traits.Bool, File(), hash_files=False, argstr="--outputTransform %s") + traits.Bool, File(), hash_files=False, argstr="--outputTransform %s" + ) class BRAINSTransformConvertOutputSpec(TraitedSpec): @@ -443,8 +476,8 @@ class BRAINSTransformConvert(SEMLikeCommandLine): output_spec = BRAINSTransformConvertOutputSpec _cmd = " BRAINSTransformConvert " _outputs_filenames = { - 'displacementVolume': 'displacementVolume.nii', - 'outputTransform': 'outputTransform.mat' + "displacementVolume": "displacementVolume.nii", + "outputTransform": "outputTransform.mat", } _redirect_x = False @@ -453,18 +486,19 @@ class landmarksConstellationAlignerInputSpec(CommandLineInputSpec): inputLandmarksPaired = File( desc="Input landmark file (.fcsv)", exists=True, - argstr="--inputLandmarksPaired %s") + argstr="--inputLandmarksPaired %s", + ) outputLandmarksPaired = traits.Either( traits.Bool, File(), hash_files=False, desc="Output landmark file (.fcsv)", - argstr="--outputLandmarksPaired %s") + argstr="--outputLandmarksPaired %s", + ) class landmarksConstellationAlignerOutputSpec(TraitedSpec): - outputLandmarksPaired = File( - desc="Output landmark file (.fcsv)", exists=True) + outputLandmarksPaired = File(desc="Output landmark file (.fcsv)", exists=True) class landmarksConstellationAligner(SEMLikeCommandLine): @@ -481,24 +515,24 @@ class landmarksConstellationAligner(SEMLikeCommandLine): input_spec = landmarksConstellationAlignerInputSpec output_spec = landmarksConstellationAlignerOutputSpec _cmd = " landmarksConstellationAligner " - _outputs_filenames = {'outputLandmarksPaired': 'outputLandmarksPaired'} + _outputs_filenames = {"outputLandmarksPaired": "outputLandmarksPaired"} _redirect_x = False class BRAINSEyeDetectorInputSpec(CommandLineInputSpec): numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") - inputVolume = File( - desc="The input volume", exists=True, argstr="--inputVolume %s") + argstr="--numberOfThreads %d", + ) + inputVolume = File(desc="The input volume", exists=True, argstr="--inputVolume %s") outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="The output volume", - argstr="--outputVolume %s") - debugDir = traits.Str( - desc="A place for debug information", argstr="--debugDir %s") + argstr="--outputVolume %s", + ) + debugDir = traits.Str(desc="A place for debug information", argstr="--debugDir %s") class BRAINSEyeDetectorOutputSpec(TraitedSpec): @@ -519,7 +553,7 @@ class BRAINSEyeDetector(SEMLikeCommandLine): input_spec = BRAINSEyeDetectorInputSpec output_spec = BRAINSEyeDetectorOutputSpec _cmd = " BRAINSEyeDetector " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False @@ -527,10 +561,12 @@ class BRAINSLinearModelerEPCAInputSpec(CommandLineInputSpec): inputTrainingList = File( desc="Input Training Landmark List Filename, ", exists=True, - argstr="--inputTrainingList %s") + argstr="--inputTrainingList %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSLinearModelerEPCAOutputSpec(TraitedSpec): @@ -558,31 +594,33 @@ class BRAINSLinearModelerEPCA(SEMLikeCommandLine): class BRAINSInitializedControlPointsInputSpec(CommandLineInputSpec): - inputVolume = File( - desc="Input Volume", exists=True, argstr="--inputVolume %s") + inputVolume = File(desc="Input Volume", exists=True, argstr="--inputVolume %s") outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output Volume", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) splineGridSize = InputMultiPath( traits.Int, - desc= - "The number of subdivisions of the BSpline Grid to be centered on the image space. Each dimension must have at least 3 subdivisions for the BSpline to be correctly computed. ", + desc="The number of subdivisions of the BSpline Grid to be centered on the image space. Each dimension must have at least 3 subdivisions for the BSpline to be correctly computed. ", sep=",", - argstr="--splineGridSize %s") + argstr="--splineGridSize %s", + ) permuteOrder = InputMultiPath( traits.Int, - desc= - "The permutation order for the images. The default is 0,1,2 (i.e. no permutation)", + desc="The permutation order for the images. The default is 0,1,2 (i.e. no permutation)", sep=",", - argstr="--permuteOrder %s") + argstr="--permuteOrder %s", + ) outputLandmarksFile = traits.Str( - desc="Output filename", argstr="--outputLandmarksFile %s") + desc="Output filename", argstr="--outputLandmarksFile %s" + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSInitializedControlPointsOutputSpec(TraitedSpec): @@ -609,30 +647,29 @@ class BRAINSInitializedControlPoints(SEMLikeCommandLine): input_spec = BRAINSInitializedControlPointsInputSpec output_spec = BRAINSInitializedControlPointsOutputSpec _cmd = " BRAINSInitializedControlPoints " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class CleanUpOverlapLabelsInputSpec(CommandLineInputSpec): inputBinaryVolumes = InputMultiPath( File(exists=True), - desc= - "The list of binary images to be checked and cleaned up. Order is important. Binary volume given first always wins out. ", - argstr="--inputBinaryVolumes %s...") + desc="The list of binary images to be checked and cleaned up. Order is important. Binary volume given first always wins out. ", + argstr="--inputBinaryVolumes %s...", + ) outputBinaryVolumes = traits.Either( traits.Bool, - InputMultiPath(File(), ), + InputMultiPath(File(),), hash_files=False, - desc= - "The output label map images, with integer values in it. Each label value specified in the inputLabels is combined into this output label map volume", - argstr="--outputBinaryVolumes %s...") + desc="The output label map images, with integer values in it. Each label value specified in the inputLabels is combined into this output label map volume", + argstr="--outputBinaryVolumes %s...", + ) class CleanUpOverlapLabelsOutputSpec(TraitedSpec): outputBinaryVolumes = OutputMultiPath( File(exists=True), - desc= - "The output label map images, with integer values in it. Each label value specified in the inputLabels is combined into this output label map volume" + desc="The output label map images, with integer values in it. Each label value specified in the inputLabels is combined into this output label map volume", ) @@ -652,7 +689,7 @@ class CleanUpOverlapLabels(SEMLikeCommandLine): input_spec = CleanUpOverlapLabelsInputSpec output_spec = CleanUpOverlapLabelsOutputSpec _cmd = " CleanUpOverlapLabels " - _outputs_filenames = {'outputBinaryVolumes': 'outputBinaryVolumes.nii'} + _outputs_filenames = {"outputBinaryVolumes": "outputBinaryVolumes.nii"} _redirect_x = False @@ -660,32 +697,34 @@ class BRAINSClipInferiorInputSpec(CommandLineInputSpec): inputVolume = File( desc="Input image to make a clipped short int copy from.", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Output image, a short int copy of the upper portion of the input image, filled with BackgroundFillValue.", - argstr="--outputVolume %s") + desc="Output image, a short int copy of the upper portion of the input image, filled with BackgroundFillValue.", + argstr="--outputVolume %s", + ) acLowerBound = traits.Float( - desc= - ", When the input image to the output image, replace the image with the BackgroundFillValue everywhere below the plane This Far in physical units (millimeters) below (inferior to) the AC point (assumed to be the voxel field middle.) The oversize default was chosen to have no effect. Based on visualizing a thousand masks in the IPIG study, we recommend a limit no smaller than 80.0 mm., ", - argstr="--acLowerBound %f") + desc=", When the input image to the output image, replace the image with the BackgroundFillValue everywhere below the plane This Far in physical units (millimeters) below (inferior to) the AC point (assumed to be the voxel field middle.) The oversize default was chosen to have no effect. Based on visualizing a thousand masks in the IPIG study, we recommend a limit no smaller than 80.0 mm., ", + argstr="--acLowerBound %f", + ) BackgroundFillValue = traits.Str( - desc= - "Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", - argstr="--BackgroundFillValue %s") + desc="Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", + argstr="--BackgroundFillValue %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSClipInferiorOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Output image, a short int copy of the upper portion of the input image, filled with BackgroundFillValue.", - exists=True) + desc="Output image, a short int copy of the upper portion of the input image, filled with BackgroundFillValue.", + exists=True, + ) class BRAINSClipInferior(SEMLikeCommandLine): @@ -702,7 +741,7 @@ class BRAINSClipInferior(SEMLikeCommandLine): input_spec = BRAINSClipInferiorInputSpec output_spec = BRAINSClipInferiorOutputSpec _cmd = " BRAINSClipInferior " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False @@ -710,21 +749,25 @@ class GenerateLabelMapFromProbabilityMapInputSpec(CommandLineInputSpec): inputVolumes = InputMultiPath( File(exists=True), desc="The Input probaiblity images to be computed for lable maps", - argstr="--inputVolumes %s...") + argstr="--inputVolumes %s...", + ) outputLabelVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="The Input binary image for region of interest", - argstr="--outputLabelVolume %s") + argstr="--outputLabelVolume %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class GenerateLabelMapFromProbabilityMapOutputSpec(TraitedSpec): outputLabelVolume = File( - desc="The Input binary image for region of interest", exists=True) + desc="The Input binary image for region of interest", exists=True + ) class GenerateLabelMapFromProbabilityMap(SEMLikeCommandLine): @@ -743,7 +786,7 @@ class GenerateLabelMapFromProbabilityMap(SEMLikeCommandLine): input_spec = GenerateLabelMapFromProbabilityMapInputSpec output_spec = GenerateLabelMapFromProbabilityMapOutputSpec _cmd = " GenerateLabelMapFromProbabilityMap " - _outputs_filenames = {'outputLabelVolume': 'outputLabelVolume.nii.gz'} + _outputs_filenames = {"outputLabelVolume": "outputLabelVolume.nii.gz"} _redirect_x = False @@ -751,47 +794,51 @@ class BRAINSAlignMSPInputSpec(CommandLineInputSpec): inputVolume = File( desc=", The Image to be resampled, ", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) OutputresampleMSP = traits.Either( traits.Bool, File(), hash_files=False, desc=", The image to be output., ", - argstr="--OutputresampleMSP %s") + argstr="--OutputresampleMSP %s", + ) verbose = traits.Bool( - desc=", Show more verbose output, ", argstr="--verbose ") + desc=", Show more verbose output, ", argstr="--verbose " + ) resultsDir = traits.Either( traits.Bool, Directory(), hash_files=False, desc=", The directory for the results to be written., ", - argstr="--resultsDir %s") + argstr="--resultsDir %s", + ) writedebuggingImagesLevel = traits.Int( - desc= - ", This flag controls if debugging images are produced. By default value of 0 is no images. Anything greater than zero will be increasing level of debugging images., ", - argstr="--writedebuggingImagesLevel %d") + desc=", This flag controls if debugging images are produced. By default value of 0 is no images. Anything greater than zero will be increasing level of debugging images., ", + argstr="--writedebuggingImagesLevel %d", + ) mspQualityLevel = traits.Int( - desc= - ", Flag cotrols how agressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds)., ", - argstr="--mspQualityLevel %d") + desc=", Flag cotrols how agressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds)., ", + argstr="--mspQualityLevel %d", + ) rescaleIntensities = traits.Bool( - desc= - ", Flag to turn on rescaling image intensities on input., ", - argstr="--rescaleIntensities ") + desc=", Flag to turn on rescaling image intensities on input., ", + argstr="--rescaleIntensities ", + ) trimRescaledIntensities = traits.Float( - desc= - ", Turn on clipping the rescaled image one-tailed on input. Units of standard deviations above the mean. Very large values are very permissive. Non-positive value turns clipping off. Defaults to removing 0.00001 of a normal tail above the mean., ", - argstr="--trimRescaledIntensities %f") + desc=", Turn on clipping the rescaled image one-tailed on input. Units of standard deviations above the mean. Very large values are very permissive. Non-positive value turns clipping off. Defaults to removing 0.00001 of a normal tail above the mean., ", + argstr="--trimRescaledIntensities %f", + ) rescaleIntensitiesOutputRange = InputMultiPath( traits.Int, - desc= - ", This pair of integers gives the lower and upper bounds on the signal portion of the output image. Out-of-field voxels are taken from BackgroundFillValue., ", + desc=", This pair of integers gives the lower and upper bounds on the signal portion of the output image. Out-of-field voxels are taken from BackgroundFillValue., ", sep=",", - argstr="--rescaleIntensitiesOutputRange %s") + argstr="--rescaleIntensitiesOutputRange %s", + ) BackgroundFillValue = traits.Str( - desc= - "Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", - argstr="--BackgroundFillValue %s") + desc="Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", + argstr="--BackgroundFillValue %s", + ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", @@ -803,20 +850,23 @@ class BRAINSAlignMSPInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", - argstr="--interpolationMode %s") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", + argstr="--interpolationMode %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSAlignMSPOutputSpec(TraitedSpec): OutputresampleMSP = File( - desc=", The image to be output., ", exists=True) + desc=", The image to be output., ", exists=True + ) resultsDir = Directory( desc=", The directory for the results to be written., ", - exists=True) + exists=True, + ) class BRAINSAlignMSP(SEMLikeCommandLine): @@ -832,8 +882,8 @@ class BRAINSAlignMSP(SEMLikeCommandLine): output_spec = BRAINSAlignMSPOutputSpec _cmd = " BRAINSAlignMSP " _outputs_filenames = { - 'OutputresampleMSP': 'OutputresampleMSP.nii', - 'resultsDir': 'resultsDir' + "OutputresampleMSP": "OutputresampleMSP.nii", + "resultsDir": "resultsDir", } _redirect_x = False @@ -842,28 +892,31 @@ class BRAINSLandmarkInitializerInputSpec(CommandLineInputSpec): inputFixedLandmarkFilename = File( desc="input fixed landmark. *.fcsv", exists=True, - argstr="--inputFixedLandmarkFilename %s") + argstr="--inputFixedLandmarkFilename %s", + ) inputMovingLandmarkFilename = File( desc="input moving landmark. *.fcsv", exists=True, - argstr="--inputMovingLandmarkFilename %s") + argstr="--inputMovingLandmarkFilename %s", + ) inputWeightFilename = File( - desc= - "Input weight file name for landmarks. Higher weighted landmark will be considered more heavily. Weights are propotional, that is the magnitude of weights will be normalized by its minimum and maximum value. ", + desc="Input weight file name for landmarks. Higher weighted landmark will be considered more heavily. Weights are propotional, that is the magnitude of weights will be normalized by its minimum and maximum value. ", exists=True, - argstr="--inputWeightFilename %s") + argstr="--inputWeightFilename %s", + ) outputTransformFilename = traits.Either( traits.Bool, File(), hash_files=False, desc="output transform file name (ex: ./outputTransform.mat) ", - argstr="--outputTransformFilename %s") + argstr="--outputTransformFilename %s", + ) class BRAINSLandmarkInitializerOutputSpec(TraitedSpec): outputTransformFilename = File( - desc="output transform file name (ex: ./outputTransform.mat) ", - exists=True) + desc="output transform file name (ex: ./outputTransform.mat) ", exists=True + ) class BRAINSLandmarkInitializer(SEMLikeCommandLine): @@ -884,21 +937,21 @@ class BRAINSLandmarkInitializer(SEMLikeCommandLine): input_spec = BRAINSLandmarkInitializerInputSpec output_spec = BRAINSLandmarkInitializerOutputSpec _cmd = " BRAINSLandmarkInitializer " - _outputs_filenames = {'outputTransformFilename': 'outputTransformFilename'} + _outputs_filenames = {"outputTransformFilename": "outputTransformFilename"} _redirect_x = False class insertMidACPCpointInputSpec(CommandLineInputSpec): inputLandmarkFile = File( - desc="Input landmark file (.fcsv)", - exists=True, - argstr="--inputLandmarkFile %s") + desc="Input landmark file (.fcsv)", exists=True, argstr="--inputLandmarkFile %s" + ) outputLandmarkFile = traits.Either( traits.Bool, File(), hash_files=False, desc="Output landmark file (.fcsv)", - argstr="--outputLandmarkFile %s") + argstr="--outputLandmarkFile %s", + ) class insertMidACPCpointOutputSpec(TraitedSpec): @@ -919,56 +972,56 @@ class insertMidACPCpoint(SEMLikeCommandLine): input_spec = insertMidACPCpointInputSpec output_spec = insertMidACPCpointOutputSpec _cmd = " insertMidACPCpoint " - _outputs_filenames = {'outputLandmarkFile': 'outputLandmarkFile'} + _outputs_filenames = {"outputLandmarkFile": "outputLandmarkFile"} _redirect_x = False class BRAINSSnapShotWriterInputSpec(CommandLineInputSpec): inputVolumes = InputMultiPath( File(exists=True), - desc= - "Input image volume list to be extracted as 2D image. Multiple input is possible. At least one input is required.", - argstr="--inputVolumes %s...") + desc="Input image volume list to be extracted as 2D image. Multiple input is possible. At least one input is required.", + argstr="--inputVolumes %s...", + ) inputBinaryVolumes = InputMultiPath( File(exists=True), - desc= - "Input mask (binary) volume list to be extracted as 2D image. Multiple input is possible.", - argstr="--inputBinaryVolumes %s...") + desc="Input mask (binary) volume list to be extracted as 2D image. Multiple input is possible.", + argstr="--inputBinaryVolumes %s...", + ) inputSliceToExtractInPhysicalPoint = InputMultiPath( traits.Float, - desc= - "2D slice number of input images. For autoWorkUp output, which AC-PC aligned, 0,0,0 will be the center.", + desc="2D slice number of input images. For autoWorkUp output, which AC-PC aligned, 0,0,0 will be the center.", sep=",", - argstr="--inputSliceToExtractInPhysicalPoint %s") + argstr="--inputSliceToExtractInPhysicalPoint %s", + ) inputSliceToExtractInIndex = InputMultiPath( traits.Int, - desc= - "2D slice number of input images. For size of 256*256*256 image, 128 is usually used.", + desc="2D slice number of input images. For size of 256*256*256 image, 128 is usually used.", sep=",", - argstr="--inputSliceToExtractInIndex %s") + argstr="--inputSliceToExtractInIndex %s", + ) inputSliceToExtractInPercent = InputMultiPath( traits.Int, - desc= - "2D slice number of input images. Percentage input from 0%-100%. (ex. --inputSliceToExtractInPercent 50,50,50", + desc="2D slice number of input images. Percentage input from 0%-100%. (ex. --inputSliceToExtractInPercent 50,50,50", sep=",", - argstr="--inputSliceToExtractInPercent %s") + argstr="--inputSliceToExtractInPercent %s", + ) inputPlaneDirection = InputMultiPath( traits.Int, - desc= - "Plane to display. In general, 0=saggital, 1=coronal, and 2=axial plane.", + desc="Plane to display. In general, 0=saggital, 1=coronal, and 2=axial plane.", sep=",", - argstr="--inputPlaneDirection %s") + argstr="--inputPlaneDirection %s", + ) outputFilename = traits.Either( traits.Bool, File(), hash_files=False, desc="2D file name of input images. Required.", - argstr="--outputFilename %s") + argstr="--outputFilename %s", + ) class BRAINSSnapShotWriterOutputSpec(TraitedSpec): - outputFilename = File( - desc="2D file name of input images. Required.", exists=True) + outputFilename = File(desc="2D file name of input images. Required.", exists=True) class BRAINSSnapShotWriter(SEMLikeCommandLine): @@ -989,7 +1042,7 @@ class BRAINSSnapShotWriter(SEMLikeCommandLine): input_spec = BRAINSSnapShotWriterInputSpec output_spec = BRAINSSnapShotWriterOutputSpec _cmd = " BRAINSSnapShotWriter " - _outputs_filenames = {'outputFilename': 'outputFilename'} + _outputs_filenames = {"outputFilename": "outputFilename"} _redirect_x = False @@ -997,27 +1050,30 @@ class JointHistogramInputSpec(CommandLineInputSpec): inputVolumeInXAxis = File( desc="The Input image to be computed for statistics", exists=True, - argstr="--inputVolumeInXAxis %s") + argstr="--inputVolumeInXAxis %s", + ) inputVolumeInYAxis = File( desc="The Input image to be computed for statistics", exists=True, - argstr="--inputVolumeInYAxis %s") + argstr="--inputVolumeInYAxis %s", + ) inputMaskVolumeInXAxis = File( - desc= - "Input mask volume for inputVolumeInXAxis. Histogram will be computed just for the masked region", + desc="Input mask volume for inputVolumeInXAxis. Histogram will be computed just for the masked region", exists=True, - argstr="--inputMaskVolumeInXAxis %s") + argstr="--inputMaskVolumeInXAxis %s", + ) inputMaskVolumeInYAxis = File( - desc= - "Input mask volume for inputVolumeInYAxis. Histogram will be computed just for the masked region", + desc="Input mask volume for inputVolumeInYAxis. Histogram will be computed just for the masked region", exists=True, - argstr="--inputMaskVolumeInYAxis %s") + argstr="--inputMaskVolumeInYAxis %s", + ) outputJointHistogramImage = traits.Str( - desc= - " output joint histogram image file name. Histogram is usually 2D image. ", - argstr="--outputJointHistogramImage %s") + desc=" output joint histogram image file name. Histogram is usually 2D image. ", + argstr="--outputJointHistogramImage %s", + ) verbose = traits.Bool( - desc=" print debugging information, ", argstr="--verbose ") + desc=" print debugging information, ", argstr="--verbose " + ) class JointHistogramOutputSpec(TraitedSpec): @@ -1046,28 +1102,28 @@ class JointHistogram(SEMLikeCommandLine): class ShuffleVectorsModuleInputSpec(CommandLineInputSpec): inputVectorFileBaseName = File( - desc= - "input vector file name prefix. Usually end with .txt and header file has prost fix of .txt.hdr", + desc="input vector file name prefix. Usually end with .txt and header file has prost fix of .txt.hdr", exists=True, - argstr="--inputVectorFileBaseName %s") + argstr="--inputVectorFileBaseName %s", + ) outputVectorFileBaseName = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "output vector file name prefix. Usually end with .txt and header file has prost fix of .txt.hdr", - argstr="--outputVectorFileBaseName %s") + desc="output vector file name prefix. Usually end with .txt and header file has prost fix of .txt.hdr", + argstr="--outputVectorFileBaseName %s", + ) resampleProportion = traits.Float( - desc= - "downsample size of 1 will be the same size as the input images, downsample size of 3 will throw 2/3 the vectors away.", - argstr="--resampleProportion %f") + desc="downsample size of 1 will be the same size as the input images, downsample size of 3 will throw 2/3 the vectors away.", + argstr="--resampleProportion %f", + ) class ShuffleVectorsModuleOutputSpec(TraitedSpec): outputVectorFileBaseName = File( - desc= - "output vector file name prefix. Usually end with .txt and header file has prost fix of .txt.hdr", - exists=True) + desc="output vector file name prefix. Usually end with .txt and header file has prost fix of .txt.hdr", + exists=True, + ) class ShuffleVectorsModule(SEMLikeCommandLine): @@ -1088,9 +1144,7 @@ class ShuffleVectorsModule(SEMLikeCommandLine): input_spec = ShuffleVectorsModuleInputSpec output_spec = ShuffleVectorsModuleOutputSpec _cmd = " ShuffleVectorsModule " - _outputs_filenames = { - 'outputVectorFileBaseName': 'outputVectorFileBaseName' - } + _outputs_filenames = {"outputVectorFileBaseName": "outputVectorFileBaseName"} _redirect_x = False @@ -1098,32 +1152,38 @@ class ImageRegionPlotterInputSpec(CommandLineInputSpec): inputVolume1 = File( desc="The Input image to be computed for statistics", exists=True, - argstr="--inputVolume1 %s") + argstr="--inputVolume1 %s", + ) inputVolume2 = File( desc="The Input image to be computed for statistics", exists=True, - argstr="--inputVolume2 %s") + argstr="--inputVolume2 %s", + ) inputBinaryROIVolume = File( desc="The Input binary image for region of interest", exists=True, - argstr="--inputBinaryROIVolume %s") + argstr="--inputBinaryROIVolume %s", + ) inputLabelVolume = File( - desc="The Label Image", exists=True, argstr="--inputLabelVolume %s") + desc="The Label Image", exists=True, argstr="--inputLabelVolume %s" + ) numberOfHistogramBins = traits.Int( - desc=" the number of histogram levels", - argstr="--numberOfHistogramBins %d") + desc=" the number of histogram levels", argstr="--numberOfHistogramBins %d" + ) outputJointHistogramData = traits.Str( - desc=" output data file name", argstr="--outputJointHistogramData %s") + desc=" output data file name", argstr="--outputJointHistogramData %s" + ) useROIAUTO = traits.Bool( - desc= - " Use ROIAUTO to compute region of interest. This cannot be used with inputLabelVolume", - argstr="--useROIAUTO ") + desc=" Use ROIAUTO to compute region of interest. This cannot be used with inputLabelVolume", + argstr="--useROIAUTO ", + ) useIntensityForHistogram = traits.Bool( - desc= - " Create Intensity Joint Histogram instead of Quantile Joint Histogram", - argstr="--useIntensityForHistogram ") + desc=" Create Intensity Joint Histogram instead of Quantile Joint Histogram", + argstr="--useIntensityForHistogram ", + ) verbose = traits.Bool( - desc=" print debugging information, ", argstr="--verbose ") + desc=" print debugging information, ", argstr="--verbose " + ) class ImageRegionPlotterOutputSpec(TraitedSpec): @@ -1152,42 +1212,45 @@ class ImageRegionPlotter(SEMLikeCommandLine): class fcsv_to_hdf5InputSpec(CommandLineInputSpec): versionID = traits.Str( - desc= - ", Current version ID. It should be match with the version of BCD that will be using the output model file, ", - argstr="--versionID %s") + desc=", Current version ID. It should be match with the version of BCD that will be using the output model file, ", + argstr="--versionID %s", + ) landmarksInformationFile = traits.Either( traits.Bool, File(), hash_files=False, desc=", name of HDF5 file to write matrices into, ", - argstr="--landmarksInformationFile %s") + argstr="--landmarksInformationFile %s", + ) landmarkTypesList = File( desc=", file containing list of landmark types, ", exists=True, - argstr="--landmarkTypesList %s") + argstr="--landmarkTypesList %s", + ) modelFile = traits.Either( traits.Bool, File(), hash_files=False, - desc= - ", name of HDF5 file containing BRAINSConstellationDetector Model file (LLSMatrices, LLSMeans and LLSSearchRadii), ", - argstr="--modelFile %s") + desc=", name of HDF5 file containing BRAINSConstellationDetector Model file (LLSMatrices, LLSMeans and LLSSearchRadii), ", + argstr="--modelFile %s", + ) landmarkGlobPattern = traits.Str( - desc="Glob pattern to select fcsv files", - argstr="--landmarkGlobPattern %s") + desc="Glob pattern to select fcsv files", argstr="--landmarkGlobPattern %s" + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class fcsv_to_hdf5OutputSpec(TraitedSpec): landmarksInformationFile = File( - desc=", name of HDF5 file to write matrices into, ", - exists=True) + desc=", name of HDF5 file to write matrices into, ", exists=True + ) modelFile = File( - desc= - ", name of HDF5 file containing BRAINSConstellationDetector Model file (LLSMatrices, LLSMeans and LLSSearchRadii), ", - exists=True) + desc=", name of HDF5 file containing BRAINSConstellationDetector Model file (LLSMatrices, LLSMeans and LLSSearchRadii), ", + exists=True, + ) class fcsv_to_hdf5(SEMLikeCommandLine): @@ -1203,8 +1266,8 @@ class fcsv_to_hdf5(SEMLikeCommandLine): output_spec = fcsv_to_hdf5OutputSpec _cmd = " fcsv_to_hdf5 " _outputs_filenames = { - 'modelFile': 'modelFile', - 'landmarksInformationFile': 'landmarksInformationFile.h5' + "modelFile": "modelFile", + "landmarksInformationFile": "landmarksInformationFile.h5", } _redirect_x = False @@ -1213,38 +1276,38 @@ class FindCenterOfBrainInputSpec(CommandLineInputSpec): inputVolume = File( desc="The image in which to find the center.", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) imageMask = File(exists=True, argstr="--imageMask %s") clippedImageMask = traits.Either( - traits.Bool, File(), hash_files=False, argstr="--clippedImageMask %s") + traits.Bool, File(), hash_files=False, argstr="--clippedImageMask %s" + ) maximize = traits.Bool(argstr="--maximize ") axis = traits.Int(argstr="--axis %d") - otsuPercentileThreshold = traits.Float( - argstr="--otsuPercentileThreshold %f") + otsuPercentileThreshold = traits.Float(argstr="--otsuPercentileThreshold %f") closingSize = traits.Int(argstr="--closingSize %d") headSizeLimit = traits.Float(argstr="--headSizeLimit %f") headSizeEstimate = traits.Float(argstr="--headSizeEstimate %f") backgroundValue = traits.Int(argstr="--backgroundValue %d") generateDebugImages = traits.Bool(argstr="--generateDebugImages ") debugDistanceImage = traits.Either( - traits.Bool, - File(), - hash_files=False, - argstr="--debugDistanceImage %s") + traits.Bool, File(), hash_files=False, argstr="--debugDistanceImage %s" + ) debugGridImage = traits.Either( - traits.Bool, File(), hash_files=False, argstr="--debugGridImage %s") + traits.Bool, File(), hash_files=False, argstr="--debugGridImage %s" + ) debugAfterGridComputationsForegroundImage = traits.Either( traits.Bool, File(), hash_files=False, - argstr="--debugAfterGridComputationsForegroundImage %s") + argstr="--debugAfterGridComputationsForegroundImage %s", + ) debugClippedImageMask = traits.Either( - traits.Bool, - File(), - hash_files=False, - argstr="--debugClippedImageMask %s") + traits.Bool, File(), hash_files=False, argstr="--debugClippedImageMask %s" + ) debugTrimmedImage = traits.Either( - traits.Bool, File(), hash_files=False, argstr="--debugTrimmedImage %s") + traits.Bool, File(), hash_files=False, argstr="--debugTrimmedImage %s" + ) class FindCenterOfBrainOutputSpec(TraitedSpec): @@ -1277,17 +1340,11 @@ class FindCenterOfBrain(SEMLikeCommandLine): output_spec = FindCenterOfBrainOutputSpec _cmd = " FindCenterOfBrain " _outputs_filenames = { - 'debugClippedImageMask': - 'debugClippedImageMask.nii', - 'debugTrimmedImage': - 'debugTrimmedImage.nii', - 'debugDistanceImage': - 'debugDistanceImage.nii', - 'debugGridImage': - 'debugGridImage.nii', - 'clippedImageMask': - 'clippedImageMask.nii', - 'debugAfterGridComputationsForegroundImage': - 'debugAfterGridComputationsForegroundImage.nii' + "debugClippedImageMask": "debugClippedImageMask.nii", + "debugTrimmedImage": "debugTrimmedImage.nii", + "debugDistanceImage": "debugDistanceImage.nii", + "debugGridImage": "debugGridImage.nii", + "clippedImageMask": "clippedImageMask.nii", + "debugAfterGridComputationsForegroundImage": "debugAfterGridComputationsForegroundImage.nii", } _redirect_x = False diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSAlignMSP.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSAlignMSP.py index b5913daba3..2dabdd4b6d 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSAlignMSP.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSAlignMSP.py @@ -4,47 +4,32 @@ def test_BRAINSAlignMSP_inputs(): input_map = dict( - BackgroundFillValue=dict(argstr='--BackgroundFillValue %s', ), - OutputresampleMSP=dict( - argstr='--OutputresampleMSP %s', - hash_files=False, - ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - interpolationMode=dict(argstr='--interpolationMode %s', ), - mspQualityLevel=dict(argstr='--mspQualityLevel %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - rescaleIntensities=dict(argstr='--rescaleIntensities ', ), + BackgroundFillValue=dict(argstr="--BackgroundFillValue %s",), + OutputresampleMSP=dict(argstr="--OutputresampleMSP %s", hash_files=False,), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + interpolationMode=dict(argstr="--interpolationMode %s",), + mspQualityLevel=dict(argstr="--mspQualityLevel %d",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + rescaleIntensities=dict(argstr="--rescaleIntensities ",), rescaleIntensitiesOutputRange=dict( - argstr='--rescaleIntensitiesOutputRange %s', - sep=',', - ), - resultsDir=dict( - argstr='--resultsDir %s', - hash_files=False, + argstr="--rescaleIntensitiesOutputRange %s", sep=",", ), - trimRescaledIntensities=dict(argstr='--trimRescaledIntensities %f', ), - verbose=dict(argstr='--verbose ', ), - writedebuggingImagesLevel=dict( - argstr='--writedebuggingImagesLevel %d', ), + resultsDir=dict(argstr="--resultsDir %s", hash_files=False,), + trimRescaledIntensities=dict(argstr="--trimRescaledIntensities %f",), + verbose=dict(argstr="--verbose ",), + writedebuggingImagesLevel=dict(argstr="--writedebuggingImagesLevel %d",), ) inputs = BRAINSAlignMSP.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSAlignMSP_outputs(): - output_map = dict( - OutputresampleMSP=dict(extensions=None, ), - resultsDir=dict(), - ) + output_map = dict(OutputresampleMSP=dict(extensions=None,), resultsDir=dict(),) outputs = BRAINSAlignMSP.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSClipInferior.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSClipInferior.py index 39fb8fe023..76958e0b2f 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSClipInferior.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSClipInferior.py @@ -4,30 +4,23 @@ def test_BRAINSClipInferior_inputs(): input_map = dict( - BackgroundFillValue=dict(argstr='--BackgroundFillValue %s', ), - acLowerBound=dict(argstr='--acLowerBound %f', ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), + BackgroundFillValue=dict(argstr="--BackgroundFillValue %s",), + acLowerBound=dict(argstr="--acLowerBound %f",), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), ) inputs = BRAINSClipInferior.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSClipInferior_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = BRAINSClipInferior.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSConstellationModeler.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSConstellationModeler.py index 0edcebc668..61129f62cc 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSConstellationModeler.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSConstellationModeler.py @@ -4,49 +4,35 @@ def test_BRAINSConstellationModeler_inputs(): input_map = dict( - BackgroundFillValue=dict(argstr='--BackgroundFillValue %s', ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputTrainingList=dict( - argstr='--inputTrainingList %s', - extensions=None, - ), - mspQualityLevel=dict(argstr='--mspQualityLevel %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + BackgroundFillValue=dict(argstr="--BackgroundFillValue %s",), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputTrainingList=dict(argstr="--inputTrainingList %s", extensions=None,), + mspQualityLevel=dict(argstr="--mspQualityLevel %d",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), optimizedLandmarksFilenameExtender=dict( - argstr='--optimizedLandmarksFilenameExtender %s', ), - outputModel=dict( - argstr='--outputModel %s', - hash_files=False, + argstr="--optimizedLandmarksFilenameExtender %s", ), - rescaleIntensities=dict(argstr='--rescaleIntensities ', ), + outputModel=dict(argstr="--outputModel %s", hash_files=False,), + rescaleIntensities=dict(argstr="--rescaleIntensities ",), rescaleIntensitiesOutputRange=dict( - argstr='--rescaleIntensitiesOutputRange %s', - sep=',', - ), - resultsDir=dict( - argstr='--resultsDir %s', - hash_files=False, + argstr="--rescaleIntensitiesOutputRange %s", sep=",", ), - saveOptimizedLandmarks=dict(argstr='--saveOptimizedLandmarks ', ), - trimRescaledIntensities=dict(argstr='--trimRescaledIntensities %f', ), - verbose=dict(argstr='--verbose ', ), - writedebuggingImagesLevel=dict( - argstr='--writedebuggingImagesLevel %d', ), + resultsDir=dict(argstr="--resultsDir %s", hash_files=False,), + saveOptimizedLandmarks=dict(argstr="--saveOptimizedLandmarks ",), + trimRescaledIntensities=dict(argstr="--trimRescaledIntensities %f",), + verbose=dict(argstr="--verbose ",), + writedebuggingImagesLevel=dict(argstr="--writedebuggingImagesLevel %d",), ) inputs = BRAINSConstellationModeler.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSConstellationModeler_outputs(): - output_map = dict( - outputModel=dict(extensions=None, ), - resultsDir=dict(), - ) + output_map = dict(outputModel=dict(extensions=None,), resultsDir=dict(),) outputs = BRAINSConstellationModeler.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSEyeDetector.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSEyeDetector.py index 9c1fe4a993..8ac3c8a21a 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSEyeDetector.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSEyeDetector.py @@ -4,29 +4,22 @@ def test_BRAINSEyeDetector_inputs(): input_map = dict( - args=dict(argstr='%s', ), - debugDir=dict(argstr='--debugDir %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), + args=dict(argstr="%s",), + debugDir=dict(argstr="--debugDir %s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), ) inputs = BRAINSEyeDetector.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSEyeDetector_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = BRAINSEyeDetector.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSInitializedControlPoints.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSInitializedControlPoints.py index 66accefbee..cf20b5a37b 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSInitializedControlPoints.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSInitializedControlPoints.py @@ -4,37 +4,24 @@ def test_BRAINSInitializedControlPoints_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputLandmarksFile=dict(argstr='--outputLandmarksFile %s', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - permuteOrder=dict( - argstr='--permuteOrder %s', - sep=',', - ), - splineGridSize=dict( - argstr='--splineGridSize %s', - sep=',', - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputLandmarksFile=dict(argstr="--outputLandmarksFile %s",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + permuteOrder=dict(argstr="--permuteOrder %s", sep=",",), + splineGridSize=dict(argstr="--splineGridSize %s", sep=",",), ) inputs = BRAINSInitializedControlPoints.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSInitializedControlPoints_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = BRAINSInitializedControlPoints.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLandmarkInitializer.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLandmarkInitializer.py index dd39f41295..cdb17c6232 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLandmarkInitializer.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLandmarkInitializer.py @@ -4,26 +4,17 @@ def test_BRAINSLandmarkInitializer_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), inputFixedLandmarkFilename=dict( - argstr='--inputFixedLandmarkFilename %s', - extensions=None, + argstr="--inputFixedLandmarkFilename %s", extensions=None, ), inputMovingLandmarkFilename=dict( - argstr='--inputMovingLandmarkFilename %s', - extensions=None, - ), - inputWeightFilename=dict( - argstr='--inputWeightFilename %s', - extensions=None, + argstr="--inputMovingLandmarkFilename %s", extensions=None, ), + inputWeightFilename=dict(argstr="--inputWeightFilename %s", extensions=None,), outputTransformFilename=dict( - argstr='--outputTransformFilename %s', - hash_files=False, + argstr="--outputTransformFilename %s", hash_files=False, ), ) inputs = BRAINSLandmarkInitializer.input_spec() @@ -31,8 +22,10 @@ def test_BRAINSLandmarkInitializer_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSLandmarkInitializer_outputs(): - output_map = dict(outputTransformFilename=dict(extensions=None, ), ) + output_map = dict(outputTransformFilename=dict(extensions=None,),) outputs = BRAINSLandmarkInitializer.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLinearModelerEPCA.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLinearModelerEPCA.py index 6692e342a0..a9fdee554b 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLinearModelerEPCA.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLinearModelerEPCA.py @@ -4,22 +4,18 @@ def test_BRAINSLinearModelerEPCA_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputTrainingList=dict( - argstr='--inputTrainingList %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputTrainingList=dict(argstr="--inputTrainingList %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), ) inputs = BRAINSLinearModelerEPCA.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSLinearModelerEPCA_outputs(): output_map = dict() outputs = BRAINSLinearModelerEPCA.output_spec() diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLmkTransform.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLmkTransform.py index 88b0846768..f459589580 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLmkTransform.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLmkTransform.py @@ -4,35 +4,18 @@ def test_BRAINSLmkTransform_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputFixedLandmarks=dict( - argstr='--inputFixedLandmarks %s', - extensions=None, - ), - inputMovingLandmarks=dict( - argstr='--inputMovingLandmarks %s', - extensions=None, - ), - inputMovingVolume=dict( - argstr='--inputMovingVolume %s', - extensions=None, - ), - inputReferenceVolume=dict( - argstr='--inputReferenceVolume %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputFixedLandmarks=dict(argstr="--inputFixedLandmarks %s", extensions=None,), + inputMovingLandmarks=dict(argstr="--inputMovingLandmarks %s", extensions=None,), + inputMovingVolume=dict(argstr="--inputMovingVolume %s", extensions=None,), + inputReferenceVolume=dict(argstr="--inputReferenceVolume %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), outputAffineTransform=dict( - argstr='--outputAffineTransform %s', - hash_files=False, + argstr="--outputAffineTransform %s", hash_files=False, ), outputResampledVolume=dict( - argstr='--outputResampledVolume %s', - hash_files=False, + argstr="--outputResampledVolume %s", hash_files=False, ), ) inputs = BRAINSLmkTransform.input_spec() @@ -40,10 +23,12 @@ def test_BRAINSLmkTransform_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSLmkTransform_outputs(): output_map = dict( - outputAffineTransform=dict(extensions=None, ), - outputResampledVolume=dict(extensions=None, ), + outputAffineTransform=dict(extensions=None,), + outputResampledVolume=dict(extensions=None,), ) outputs = BRAINSLmkTransform.output_spec() diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSMush.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSMush.py index a1bde9d454..b16829ef8f 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSMush.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSMush.py @@ -4,65 +4,37 @@ def test_BRAINSMush_inputs(): input_map = dict( - args=dict(argstr='%s', ), - boundingBoxSize=dict( - argstr='--boundingBoxSize %s', - sep=',', - ), - boundingBoxStart=dict( - argstr='--boundingBoxStart %s', - sep=',', - ), - desiredMean=dict(argstr='--desiredMean %f', ), - desiredVariance=dict(argstr='--desiredVariance %f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputFirstVolume=dict( - argstr='--inputFirstVolume %s', - extensions=None, - ), - inputMaskVolume=dict( - argstr='--inputMaskVolume %s', - extensions=None, - ), - inputSecondVolume=dict( - argstr='--inputSecondVolume %s', - extensions=None, - ), - lowerThresholdFactor=dict(argstr='--lowerThresholdFactor %f', ), - lowerThresholdFactorPre=dict(argstr='--lowerThresholdFactorPre %f', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputMask=dict( - argstr='--outputMask %s', - hash_files=False, - ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - outputWeightsFile=dict( - argstr='--outputWeightsFile %s', - hash_files=False, - ), - seed=dict( - argstr='--seed %s', - sep=',', - ), - upperThresholdFactor=dict(argstr='--upperThresholdFactor %f', ), - upperThresholdFactorPre=dict(argstr='--upperThresholdFactorPre %f', ), + args=dict(argstr="%s",), + boundingBoxSize=dict(argstr="--boundingBoxSize %s", sep=",",), + boundingBoxStart=dict(argstr="--boundingBoxStart %s", sep=",",), + desiredMean=dict(argstr="--desiredMean %f",), + desiredVariance=dict(argstr="--desiredVariance %f",), + environ=dict(nohash=True, usedefault=True,), + inputFirstVolume=dict(argstr="--inputFirstVolume %s", extensions=None,), + inputMaskVolume=dict(argstr="--inputMaskVolume %s", extensions=None,), + inputSecondVolume=dict(argstr="--inputSecondVolume %s", extensions=None,), + lowerThresholdFactor=dict(argstr="--lowerThresholdFactor %f",), + lowerThresholdFactorPre=dict(argstr="--lowerThresholdFactorPre %f",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputMask=dict(argstr="--outputMask %s", hash_files=False,), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + outputWeightsFile=dict(argstr="--outputWeightsFile %s", hash_files=False,), + seed=dict(argstr="--seed %s", sep=",",), + upperThresholdFactor=dict(argstr="--upperThresholdFactor %f",), + upperThresholdFactorPre=dict(argstr="--upperThresholdFactorPre %f",), ) inputs = BRAINSMush.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSMush_outputs(): output_map = dict( - outputMask=dict(extensions=None, ), - outputVolume=dict(extensions=None, ), - outputWeightsFile=dict(extensions=None, ), + outputMask=dict(extensions=None,), + outputVolume=dict(extensions=None,), + outputWeightsFile=dict(extensions=None,), ) outputs = BRAINSMush.output_spec() diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSSnapShotWriter.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSSnapShotWriter.py index 2118b58ed9..30bf49a6ad 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSSnapShotWriter.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSSnapShotWriter.py @@ -4,41 +4,31 @@ def test_BRAINSSnapShotWriter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputBinaryVolumes=dict(argstr='--inputBinaryVolumes %s...', ), - inputPlaneDirection=dict( - argstr='--inputPlaneDirection %s', - sep=',', - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputBinaryVolumes=dict(argstr="--inputBinaryVolumes %s...",), + inputPlaneDirection=dict(argstr="--inputPlaneDirection %s", sep=",",), inputSliceToExtractInIndex=dict( - argstr='--inputSliceToExtractInIndex %s', - sep=',', + argstr="--inputSliceToExtractInIndex %s", sep=",", ), inputSliceToExtractInPercent=dict( - argstr='--inputSliceToExtractInPercent %s', - sep=',', + argstr="--inputSliceToExtractInPercent %s", sep=",", ), inputSliceToExtractInPhysicalPoint=dict( - argstr='--inputSliceToExtractInPhysicalPoint %s', - sep=',', - ), - inputVolumes=dict(argstr='--inputVolumes %s...', ), - outputFilename=dict( - argstr='--outputFilename %s', - hash_files=False, + argstr="--inputSliceToExtractInPhysicalPoint %s", sep=",", ), + inputVolumes=dict(argstr="--inputVolumes %s...",), + outputFilename=dict(argstr="--outputFilename %s", hash_files=False,), ) inputs = BRAINSSnapShotWriter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSSnapShotWriter_outputs(): - output_map = dict(outputFilename=dict(extensions=None, ), ) + output_map = dict(outputFilename=dict(extensions=None,),) outputs = BRAINSSnapShotWriter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSTransformConvert.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSTransformConvert.py index 6abf0a9f3f..4316f4561b 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSTransformConvert.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSTransformConvert.py @@ -4,39 +4,26 @@ def test_BRAINSTransformConvert_inputs(): input_map = dict( - args=dict(argstr='%s', ), - displacementVolume=dict( - argstr='--displacementVolume %s', - hash_files=False, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputTransform=dict( - argstr='--inputTransform %s', - extensions=None, - ), - outputPrecisionType=dict(argstr='--outputPrecisionType %s', ), - outputTransform=dict( - argstr='--outputTransform %s', - hash_files=False, - ), - outputTransformType=dict(argstr='--outputTransformType %s', ), - referenceVolume=dict( - argstr='--referenceVolume %s', - extensions=None, - ), + args=dict(argstr="%s",), + displacementVolume=dict(argstr="--displacementVolume %s", hash_files=False,), + environ=dict(nohash=True, usedefault=True,), + inputTransform=dict(argstr="--inputTransform %s", extensions=None,), + outputPrecisionType=dict(argstr="--outputPrecisionType %s",), + outputTransform=dict(argstr="--outputTransform %s", hash_files=False,), + outputTransformType=dict(argstr="--outputTransformType %s",), + referenceVolume=dict(argstr="--referenceVolume %s", extensions=None,), ) inputs = BRAINSTransformConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSTransformConvert_outputs(): output_map = dict( - displacementVolume=dict(extensions=None, ), - outputTransform=dict(extensions=None, ), + displacementVolume=dict(extensions=None,), + outputTransform=dict(extensions=None,), ) outputs = BRAINSTransformConvert.output_spec() diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSTrimForegroundInDirection.py b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSTrimForegroundInDirection.py index 2bf13debc4..ed3180746c 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSTrimForegroundInDirection.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSTrimForegroundInDirection.py @@ -4,33 +4,26 @@ def test_BRAINSTrimForegroundInDirection_inputs(): input_map = dict( - BackgroundFillValue=dict(argstr='--BackgroundFillValue %s', ), - args=dict(argstr='%s', ), - closingSize=dict(argstr='--closingSize %d', ), - directionCode=dict(argstr='--directionCode %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - headSizeLimit=dict(argstr='--headSizeLimit %f', ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - otsuPercentileThreshold=dict(argstr='--otsuPercentileThreshold %f', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), + BackgroundFillValue=dict(argstr="--BackgroundFillValue %s",), + args=dict(argstr="%s",), + closingSize=dict(argstr="--closingSize %d",), + directionCode=dict(argstr="--directionCode %d",), + environ=dict(nohash=True, usedefault=True,), + headSizeLimit=dict(argstr="--headSizeLimit %f",), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + otsuPercentileThreshold=dict(argstr="--otsuPercentileThreshold %f",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), ) inputs = BRAINSTrimForegroundInDirection.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSTrimForegroundInDirection_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = BRAINSTrimForegroundInDirection.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_CleanUpOverlapLabels.py b/nipype/interfaces/semtools/utilities/tests/test_auto_CleanUpOverlapLabels.py index fe56cb0e38..b7a9167092 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_CleanUpOverlapLabels.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_CleanUpOverlapLabels.py @@ -4,15 +4,11 @@ def test_CleanUpOverlapLabels_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputBinaryVolumes=dict(argstr='--inputBinaryVolumes %s...', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputBinaryVolumes=dict(argstr="--inputBinaryVolumes %s...",), outputBinaryVolumes=dict( - argstr='--outputBinaryVolumes %s...', - hash_files=False, + argstr="--outputBinaryVolumes %s...", hash_files=False, ), ) inputs = CleanUpOverlapLabels.input_spec() @@ -20,8 +16,10 @@ def test_CleanUpOverlapLabels_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CleanUpOverlapLabels_outputs(): - output_map = dict(outputBinaryVolumes=dict(), ) + output_map = dict(outputBinaryVolumes=dict(),) outputs = CleanUpOverlapLabels.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_FindCenterOfBrain.py b/nipype/interfaces/semtools/utilities/tests/test_auto_FindCenterOfBrain.py index 701eb2438e..bf2d9ab03d 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_FindCenterOfBrain.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_FindCenterOfBrain.py @@ -4,65 +4,44 @@ def test_FindCenterOfBrain_inputs(): input_map = dict( - args=dict(argstr='%s', ), - axis=dict(argstr='--axis %d', ), - backgroundValue=dict(argstr='--backgroundValue %d', ), - clippedImageMask=dict( - argstr='--clippedImageMask %s', - hash_files=False, - ), - closingSize=dict(argstr='--closingSize %d', ), + args=dict(argstr="%s",), + axis=dict(argstr="--axis %d",), + backgroundValue=dict(argstr="--backgroundValue %d",), + clippedImageMask=dict(argstr="--clippedImageMask %s", hash_files=False,), + closingSize=dict(argstr="--closingSize %d",), debugAfterGridComputationsForegroundImage=dict( - argstr='--debugAfterGridComputationsForegroundImage %s', - hash_files=False, + argstr="--debugAfterGridComputationsForegroundImage %s", hash_files=False, ), debugClippedImageMask=dict( - argstr='--debugClippedImageMask %s', - hash_files=False, - ), - debugDistanceImage=dict( - argstr='--debugDistanceImage %s', - hash_files=False, - ), - debugGridImage=dict( - argstr='--debugGridImage %s', - hash_files=False, - ), - debugTrimmedImage=dict( - argstr='--debugTrimmedImage %s', - hash_files=False, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - generateDebugImages=dict(argstr='--generateDebugImages ', ), - headSizeEstimate=dict(argstr='--headSizeEstimate %f', ), - headSizeLimit=dict(argstr='--headSizeLimit %f', ), - imageMask=dict( - argstr='--imageMask %s', - extensions=None, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - maximize=dict(argstr='--maximize ', ), - otsuPercentileThreshold=dict(argstr='--otsuPercentileThreshold %f', ), + argstr="--debugClippedImageMask %s", hash_files=False, + ), + debugDistanceImage=dict(argstr="--debugDistanceImage %s", hash_files=False,), + debugGridImage=dict(argstr="--debugGridImage %s", hash_files=False,), + debugTrimmedImage=dict(argstr="--debugTrimmedImage %s", hash_files=False,), + environ=dict(nohash=True, usedefault=True,), + generateDebugImages=dict(argstr="--generateDebugImages ",), + headSizeEstimate=dict(argstr="--headSizeEstimate %f",), + headSizeLimit=dict(argstr="--headSizeLimit %f",), + imageMask=dict(argstr="--imageMask %s", extensions=None,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + maximize=dict(argstr="--maximize ",), + otsuPercentileThreshold=dict(argstr="--otsuPercentileThreshold %f",), ) inputs = FindCenterOfBrain.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FindCenterOfBrain_outputs(): output_map = dict( - clippedImageMask=dict(extensions=None, ), - debugAfterGridComputationsForegroundImage=dict(extensions=None, ), - debugClippedImageMask=dict(extensions=None, ), - debugDistanceImage=dict(extensions=None, ), - debugGridImage=dict(extensions=None, ), - debugTrimmedImage=dict(extensions=None, ), + clippedImageMask=dict(extensions=None,), + debugAfterGridComputationsForegroundImage=dict(extensions=None,), + debugClippedImageMask=dict(extensions=None,), + debugDistanceImage=dict(extensions=None,), + debugGridImage=dict(extensions=None,), + debugTrimmedImage=dict(extensions=None,), ) outputs = FindCenterOfBrain.output_spec() diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_GenerateLabelMapFromProbabilityMap.py b/nipype/interfaces/semtools/utilities/tests/test_auto_GenerateLabelMapFromProbabilityMap.py index f2a89bbe6a..289473e902 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_GenerateLabelMapFromProbabilityMap.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_GenerateLabelMapFromProbabilityMap.py @@ -4,25 +4,21 @@ def test_GenerateLabelMapFromProbabilityMap_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolumes=dict(argstr='--inputVolumes %s...', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputLabelVolume=dict( - argstr='--outputLabelVolume %s', - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolumes=dict(argstr="--inputVolumes %s...",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputLabelVolume=dict(argstr="--outputLabelVolume %s", hash_files=False,), ) inputs = GenerateLabelMapFromProbabilityMap.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GenerateLabelMapFromProbabilityMap_outputs(): - output_map = dict(outputLabelVolume=dict(extensions=None, ), ) + output_map = dict(outputLabelVolume=dict(extensions=None,),) outputs = GenerateLabelMapFromProbabilityMap.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_ImageRegionPlotter.py b/nipype/interfaces/semtools/utilities/tests/test_auto_ImageRegionPlotter.py index e51aa52502..7d9f72aedb 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_ImageRegionPlotter.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_ImageRegionPlotter.py @@ -4,39 +4,25 @@ def test_ImageRegionPlotter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputBinaryROIVolume=dict( - argstr='--inputBinaryROIVolume %s', - extensions=None, - ), - inputLabelVolume=dict( - argstr='--inputLabelVolume %s', - extensions=None, - ), - inputVolume1=dict( - argstr='--inputVolume1 %s', - extensions=None, - ), - inputVolume2=dict( - argstr='--inputVolume2 %s', - extensions=None, - ), - numberOfHistogramBins=dict(argstr='--numberOfHistogramBins %d', ), - outputJointHistogramData=dict( - argstr='--outputJointHistogramData %s', ), - useIntensityForHistogram=dict(argstr='--useIntensityForHistogram ', ), - useROIAUTO=dict(argstr='--useROIAUTO ', ), - verbose=dict(argstr='--verbose ', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputBinaryROIVolume=dict(argstr="--inputBinaryROIVolume %s", extensions=None,), + inputLabelVolume=dict(argstr="--inputLabelVolume %s", extensions=None,), + inputVolume1=dict(argstr="--inputVolume1 %s", extensions=None,), + inputVolume2=dict(argstr="--inputVolume2 %s", extensions=None,), + numberOfHistogramBins=dict(argstr="--numberOfHistogramBins %d",), + outputJointHistogramData=dict(argstr="--outputJointHistogramData %s",), + useIntensityForHistogram=dict(argstr="--useIntensityForHistogram ",), + useROIAUTO=dict(argstr="--useROIAUTO ",), + verbose=dict(argstr="--verbose ",), ) inputs = ImageRegionPlotter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ImageRegionPlotter_outputs(): output_map = dict() outputs = ImageRegionPlotter.output_spec() diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_JointHistogram.py b/nipype/interfaces/semtools/utilities/tests/test_auto_JointHistogram.py index ec583132b6..6ac46c3695 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_JointHistogram.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_JointHistogram.py @@ -4,36 +4,26 @@ def test_JointHistogram_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), inputMaskVolumeInXAxis=dict( - argstr='--inputMaskVolumeInXAxis %s', - extensions=None, + argstr="--inputMaskVolumeInXAxis %s", extensions=None, ), inputMaskVolumeInYAxis=dict( - argstr='--inputMaskVolumeInYAxis %s', - extensions=None, - ), - inputVolumeInXAxis=dict( - argstr='--inputVolumeInXAxis %s', - extensions=None, - ), - inputVolumeInYAxis=dict( - argstr='--inputVolumeInYAxis %s', - extensions=None, + argstr="--inputMaskVolumeInYAxis %s", extensions=None, ), - outputJointHistogramImage=dict( - argstr='--outputJointHistogramImage %s', ), - verbose=dict(argstr='--verbose ', ), + inputVolumeInXAxis=dict(argstr="--inputVolumeInXAxis %s", extensions=None,), + inputVolumeInYAxis=dict(argstr="--inputVolumeInYAxis %s", extensions=None,), + outputJointHistogramImage=dict(argstr="--outputJointHistogramImage %s",), + verbose=dict(argstr="--verbose ",), ) inputs = JointHistogram.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JointHistogram_outputs(): output_map = dict() outputs = JointHistogram.output_spec() diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_ShuffleVectorsModule.py b/nipype/interfaces/semtools/utilities/tests/test_auto_ShuffleVectorsModule.py index 3343270e9d..f3541344a0 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_ShuffleVectorsModule.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_ShuffleVectorsModule.py @@ -4,28 +4,25 @@ def test_ShuffleVectorsModule_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), inputVectorFileBaseName=dict( - argstr='--inputVectorFileBaseName %s', - extensions=None, + argstr="--inputVectorFileBaseName %s", extensions=None, ), outputVectorFileBaseName=dict( - argstr='--outputVectorFileBaseName %s', - hash_files=False, + argstr="--outputVectorFileBaseName %s", hash_files=False, ), - resampleProportion=dict(argstr='--resampleProportion %f', ), + resampleProportion=dict(argstr="--resampleProportion %f",), ) inputs = ShuffleVectorsModule.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ShuffleVectorsModule_outputs(): - output_map = dict(outputVectorFileBaseName=dict(extensions=None, ), ) + output_map = dict(outputVectorFileBaseName=dict(extensions=None,),) outputs = ShuffleVectorsModule.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_fcsv_to_hdf5.py b/nipype/interfaces/semtools/utilities/tests/test_auto_fcsv_to_hdf5.py index afd2d091ce..f64c302328 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_fcsv_to_hdf5.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_fcsv_to_hdf5.py @@ -4,36 +4,28 @@ def test_fcsv_to_hdf5_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - landmarkGlobPattern=dict(argstr='--landmarkGlobPattern %s', ), - landmarkTypesList=dict( - argstr='--landmarkTypesList %s', - extensions=None, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + landmarkGlobPattern=dict(argstr="--landmarkGlobPattern %s",), + landmarkTypesList=dict(argstr="--landmarkTypesList %s", extensions=None,), landmarksInformationFile=dict( - argstr='--landmarksInformationFile %s', - hash_files=False, - ), - modelFile=dict( - argstr='--modelFile %s', - hash_files=False, + argstr="--landmarksInformationFile %s", hash_files=False, ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - versionID=dict(argstr='--versionID %s', ), + modelFile=dict(argstr="--modelFile %s", hash_files=False,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + versionID=dict(argstr="--versionID %s",), ) inputs = fcsv_to_hdf5.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_fcsv_to_hdf5_outputs(): output_map = dict( - landmarksInformationFile=dict(extensions=None, ), - modelFile=dict(extensions=None, ), + landmarksInformationFile=dict(extensions=None,), + modelFile=dict(extensions=None,), ) outputs = fcsv_to_hdf5.output_spec() diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_insertMidACPCpoint.py b/nipype/interfaces/semtools/utilities/tests/test_auto_insertMidACPCpoint.py index fa2335fed6..7e02bfc9e1 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_insertMidACPCpoint.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_insertMidACPCpoint.py @@ -4,27 +4,20 @@ def test_insertMidACPCpoint_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputLandmarkFile=dict( - argstr='--inputLandmarkFile %s', - extensions=None, - ), - outputLandmarkFile=dict( - argstr='--outputLandmarkFile %s', - hash_files=False, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputLandmarkFile=dict(argstr="--inputLandmarkFile %s", extensions=None,), + outputLandmarkFile=dict(argstr="--outputLandmarkFile %s", hash_files=False,), ) inputs = insertMidACPCpoint.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_insertMidACPCpoint_outputs(): - output_map = dict(outputLandmarkFile=dict(extensions=None, ), ) + output_map = dict(outputLandmarkFile=dict(extensions=None,),) outputs = insertMidACPCpoint.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_landmarksConstellationAligner.py b/nipype/interfaces/semtools/utilities/tests/test_auto_landmarksConstellationAligner.py index 098cdfa613..79fedd5b68 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_landmarksConstellationAligner.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_landmarksConstellationAligner.py @@ -4,18 +4,11 @@ def test_landmarksConstellationAligner_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputLandmarksPaired=dict( - argstr='--inputLandmarksPaired %s', - extensions=None, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputLandmarksPaired=dict(argstr="--inputLandmarksPaired %s", extensions=None,), outputLandmarksPaired=dict( - argstr='--outputLandmarksPaired %s', - hash_files=False, + argstr="--outputLandmarksPaired %s", hash_files=False, ), ) inputs = landmarksConstellationAligner.input_spec() @@ -23,8 +16,10 @@ def test_landmarksConstellationAligner_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_landmarksConstellationAligner_outputs(): - output_map = dict(outputLandmarksPaired=dict(extensions=None, ), ) + output_map = dict(outputLandmarksPaired=dict(extensions=None,),) outputs = landmarksConstellationAligner.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/semtools/utilities/tests/test_auto_landmarksConstellationWeights.py b/nipype/interfaces/semtools/utilities/tests/test_auto_landmarksConstellationWeights.py index 702f8b6266..5bd27aa957 100644 --- a/nipype/interfaces/semtools/utilities/tests/test_auto_landmarksConstellationWeights.py +++ b/nipype/interfaces/semtools/utilities/tests/test_auto_landmarksConstellationWeights.py @@ -4,35 +4,22 @@ def test_landmarksConstellationWeights_inputs(): input_map = dict( - LLSModel=dict( - argstr='--LLSModel %s', - extensions=None, - ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputTemplateModel=dict( - argstr='--inputTemplateModel %s', - extensions=None, - ), - inputTrainingList=dict( - argstr='--inputTrainingList %s', - extensions=None, - ), - outputWeightsList=dict( - argstr='--outputWeightsList %s', - hash_files=False, - ), + LLSModel=dict(argstr="--LLSModel %s", extensions=None,), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputTemplateModel=dict(argstr="--inputTemplateModel %s", extensions=None,), + inputTrainingList=dict(argstr="--inputTrainingList %s", extensions=None,), + outputWeightsList=dict(argstr="--outputWeightsList %s", hash_files=False,), ) inputs = landmarksConstellationWeights.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_landmarksConstellationWeights_outputs(): - output_map = dict(outputWeightsList=dict(extensions=None, ), ) + output_map = dict(outputWeightsList=dict(extensions=None,),) outputs = landmarksConstellationWeights.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/__init__.py b/nipype/interfaces/slicer/__init__.py index 5198d81be3..bef4698d03 100644 --- a/nipype/interfaces/slicer/__init__.py +++ b/nipype/interfaces/slicer/__init__.py @@ -3,8 +3,14 @@ from .segmentation import * from .filtering import * from .utilities import EMSegmentTransformToNewFormat -from .surface import (MergeModels, ModelToLabelMap, GrayscaleModelMaker, - ProbeVolumeWithModel, LabelMapSmoothing, ModelMaker) +from .surface import ( + MergeModels, + ModelToLabelMap, + GrayscaleModelMaker, + ProbeVolumeWithModel, + LabelMapSmoothing, + ModelMaker, +) from .quantification import * from .legacy import * from .registration import * diff --git a/nipype/interfaces/slicer/converters.py b/nipype/interfaces/slicer/converters.py index e93b994110..cc477e99d0 100644 --- a/nipype/interfaces/slicer/converters.py +++ b/nipype/interfaces/slicer/converters.py @@ -3,7 +3,18 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os @@ -11,36 +22,40 @@ class DicomToNrrdConverterInputSpec(CommandLineInputSpec): inputDicomDirectory = Directory( desc="Directory holding Dicom series", exists=True, - argstr="--inputDicomDirectory %s") + argstr="--inputDicomDirectory %s", + ) outputDirectory = traits.Either( traits.Bool, Directory(), hash_files=False, desc="Directory holding the output NRRD format", - argstr="--outputDirectory %s") + argstr="--outputDirectory %s", + ) outputVolume = traits.Str( - desc="Output filename (.nhdr or .nrrd)", argstr="--outputVolume %s") + desc="Output filename (.nhdr or .nrrd)", argstr="--outputVolume %s" + ) smallGradientThreshold = traits.Float( - desc= - "If a gradient magnitude is greater than 0 and less than smallGradientThreshold, then DicomToNrrdConverter will display an error message and quit, unless the useBMatrixGradientDirections option is set.", - argstr="--smallGradientThreshold %f") + desc="If a gradient magnitude is greater than 0 and less than smallGradientThreshold, then DicomToNrrdConverter will display an error message and quit, unless the useBMatrixGradientDirections option is set.", + argstr="--smallGradientThreshold %f", + ) writeProtocolGradientsFile = traits.Bool( - desc= - "Write the protocol gradients to a file suffixed by \'.txt\' as they were specified in the procol by multiplying each diffusion gradient direction by the measurement frame. This file is for debugging purposes only, the format is not fixed, and will likely change as debugging of new dicom formats is necessary.", - argstr="--writeProtocolGradientsFile ") + desc="Write the protocol gradients to a file suffixed by '.txt' as they were specified in the procol by multiplying each diffusion gradient direction by the measurement frame. This file is for debugging purposes only, the format is not fixed, and will likely change as debugging of new dicom formats is necessary.", + argstr="--writeProtocolGradientsFile ", + ) useIdentityMeaseurementFrame = traits.Bool( - desc= - "Adjust all the gradients so that the measurement frame is an identity matrix.", - argstr="--useIdentityMeaseurementFrame ") + desc="Adjust all the gradients so that the measurement frame is an identity matrix.", + argstr="--useIdentityMeaseurementFrame ", + ) useBMatrixGradientDirections = traits.Bool( - desc= - "Fill the nhdr header with the gradient directions and bvalues computed out of the BMatrix. Only changes behavior for Siemens data.", - argstr="--useBMatrixGradientDirections ") + desc="Fill the nhdr header with the gradient directions and bvalues computed out of the BMatrix. Only changes behavior for Siemens data.", + argstr="--useBMatrixGradientDirections ", + ) class DicomToNrrdConverterOutputSpec(TraitedSpec): outputDirectory = Directory( - desc="Directory holding the output NRRD format", exists=True) + desc="Directory holding the output NRRD format", exists=True + ) class DicomToNrrdConverter(SEMLikeCommandLine): @@ -65,19 +80,19 @@ class DicomToNrrdConverter(SEMLikeCommandLine): input_spec = DicomToNrrdConverterInputSpec output_spec = DicomToNrrdConverterOutputSpec _cmd = "DicomToNrrdConverter " - _outputs_filenames = {'outputDirectory': 'outputDirectory'} + _outputs_filenames = {"outputDirectory": "outputDirectory"} class OrientScalarVolumeInputSpec(CommandLineInputSpec): - inputVolume1 = File( - position=-2, desc="Input volume 1", exists=True, argstr="%s") + inputVolume1 = File(position=-2, desc="Input volume 1", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="The oriented volume", - argstr="%s") + argstr="%s", + ) orientation = traits.Enum( "Axial", "Coronal", @@ -131,7 +146,8 @@ class OrientScalarVolumeInputSpec(CommandLineInputSpec): "AIL", "ASL", desc="Orientation choices", - argstr="--orientation %s") + argstr="--orientation %s", + ) class OrientScalarVolumeOutputSpec(TraitedSpec): @@ -158,4 +174,4 @@ class OrientScalarVolume(SEMLikeCommandLine): input_spec = OrientScalarVolumeInputSpec output_spec = OrientScalarVolumeOutputSpec _cmd = "OrientScalarVolume " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/diffusion/__init__.py b/nipype/interfaces/slicer/diffusion/__init__.py index f6081f6c0c..d7f3089de5 100644 --- a/nipype/interfaces/slicer/diffusion/__init__.py +++ b/nipype/interfaces/slicer/diffusion/__init__.py @@ -1,5 +1,12 @@ # -*- coding: utf-8 -*- from .diffusion import ( - ResampleDTIVolume, DWIRicianLMMSEFilter, TractographyLabelMapSeeding, - DWIJointRicianLMMSEFilter, DiffusionWeightedVolumeMasking, DTIimport, - DWIToDTIEstimation, DiffusionTensorScalarMeasurements, DTIexport) + ResampleDTIVolume, + DWIRicianLMMSEFilter, + TractographyLabelMapSeeding, + DWIJointRicianLMMSEFilter, + DiffusionWeightedVolumeMasking, + DTIimport, + DWIToDTIEstimation, + DiffusionTensorScalarMeasurements, + DTIexport, +) diff --git a/nipype/interfaces/slicer/diffusion/diffusion.py b/nipype/interfaces/slicer/diffusion/diffusion.py index a088d25f8a..8fc0f0c6c9 100644 --- a/nipype/interfaces/slicer/diffusion/diffusion.py +++ b/nipype/interfaces/slicer/diffusion/diffusion.py @@ -3,138 +3,156 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class ResampleDTIVolumeInputSpec(CommandLineInputSpec): inputVolume = File( - position=-2, - desc="Input volume to be resampled", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be resampled", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Resampled Volume", - argstr="%s") + argstr="%s", + ) Reference = File( desc="Reference Volume (spacing,size,orientation,origin)", exists=True, - argstr="--Reference %s") + argstr="--Reference %s", + ) transformationFile = File(exists=True, argstr="--transformationFile %s") defField = File( - desc= - "File containing the deformation field (3D vector image containing vectors with 3 components)", + desc="File containing the deformation field (3D vector image containing vectors with 3 components)", exists=True, - argstr="--defField %s") + argstr="--defField %s", + ) hfieldtype = traits.Enum( "displacement", "h-Field", desc="Set if the deformation field is an -Field", - argstr="--hfieldtype %s") + argstr="--hfieldtype %s", + ) interpolation = traits.Enum( "linear", "nn", "ws", "bs", - desc= - "Sampling algorithm (linear , nn (nearest neighborhoor), ws (WindowedSinc), bs (BSpline) )", - argstr="--interpolation %s") + desc="Sampling algorithm (linear , nn (nearest neighborhoor), ws (WindowedSinc), bs (BSpline) )", + argstr="--interpolation %s", + ) correction = traits.Enum( "zero", "none", "abs", "nearest", - desc= - "Correct the tensors if computed tensor is not semi-definite positive", - argstr="--correction %s") + desc="Correct the tensors if computed tensor is not semi-definite positive", + argstr="--correction %s", + ) transform_tensor_method = traits.Enum( "PPD", "FS", - desc= - "Chooses between 2 methods to transform the tensors: Finite Strain (FS), faster but less accurate, or Preservation of the Principal Direction (PPD)", - argstr="--transform_tensor_method %s") + desc="Chooses between 2 methods to transform the tensors: Finite Strain (FS), faster but less accurate, or Preservation of the Principal Direction (PPD)", + argstr="--transform_tensor_method %s", + ) transform_order = traits.Enum( "input-to-output", "output-to-input", desc="Select in what order the transforms are read", - argstr="--transform_order %s") + argstr="--transform_order %s", + ) notbulk = traits.Bool( - desc= - "The transform following the BSpline transform is not set as a bulk transform for the BSpline transform", - argstr="--notbulk ") + desc="The transform following the BSpline transform is not set as a bulk transform for the BSpline transform", + argstr="--notbulk ", + ) spaceChange = traits.Bool( - desc= - "Space Orientation between transform and image is different (RAS/LPS) (warning: if the transform is a Transform Node in Slicer3, do not select)", - argstr="--spaceChange ") + desc="Space Orientation between transform and image is different (RAS/LPS) (warning: if the transform is a Transform Node in Slicer3, do not select)", + argstr="--spaceChange ", + ) rotation_point = traits.List( desc="Center of rotation (only for rigid and affine transforms)", - argstr="--rotation_point %s") + argstr="--rotation_point %s", + ) centered_transform = traits.Bool( - desc= - "Set the center of the transformation to the center of the input image (only for rigid and affine transforms)", - argstr="--centered_transform ") + desc="Set the center of the transformation to the center of the input image (only for rigid and affine transforms)", + argstr="--centered_transform ", + ) image_center = traits.Enum( "input", "output", - desc= - "Image to use to center the transform (used only if \'Centered Transform\' is selected)", - argstr="--image_center %s") + desc="Image to use to center the transform (used only if 'Centered Transform' is selected)", + argstr="--image_center %s", + ) Inverse_ITK_Transformation = traits.Bool( - desc= - "Inverse the transformation before applying it from output image to input image (only for rigid and affine transforms)", - argstr="--Inverse_ITK_Transformation ") + desc="Inverse the transformation before applying it from output image to input image (only for rigid and affine transforms)", + argstr="--Inverse_ITK_Transformation ", + ) spacing = InputMultiPath( traits.Float, desc="Spacing along each dimension (0 means use input spacing)", sep=",", - argstr="--spacing %s") + argstr="--spacing %s", + ) size = InputMultiPath( traits.Float, desc="Size along each dimension (0 means use input size)", sep=",", - argstr="--size %s") - origin = traits.List( - desc="Origin of the output Image", argstr="--origin %s") + argstr="--size %s", + ) + origin = traits.List(desc="Origin of the output Image", argstr="--origin %s") direction_matrix = InputMultiPath( traits.Float, - desc= - "9 parameters of the direction matrix by rows (ijk to LPS if LPS transform, ijk to RAS if RAS transform)", + desc="9 parameters of the direction matrix by rows (ijk to LPS if LPS transform, ijk to RAS if RAS transform)", sep=",", - argstr="--direction_matrix %s") + argstr="--direction_matrix %s", + ) number_of_thread = traits.Int( desc="Number of thread used to compute the output image", - argstr="--number_of_thread %d") + argstr="--number_of_thread %d", + ) default_pixel_value = traits.Float( - desc= - "Default pixel value for samples falling outside of the input region", - argstr="--default_pixel_value %f") + desc="Default pixel value for samples falling outside of the input region", + argstr="--default_pixel_value %f", + ) window_function = traits.Enum( "h", "c", "w", "l", "b", - desc= - "Window Function , h = Hamming , c = Cosine , w = Welch , l = Lanczos , b = Blackman", - argstr="--window_function %s") + desc="Window Function , h = Hamming , c = Cosine , w = Welch , l = Lanczos , b = Blackman", + argstr="--window_function %s", + ) spline_order = traits.Int( desc="Spline Order (Spline order may be from 0 to 5)", - argstr="--spline_order %d") + argstr="--spline_order %d", + ) transform_matrix = InputMultiPath( traits.Float, - desc= - "12 parameters of the transform matrix by rows ( --last 3 being translation-- )", + desc="12 parameters of the transform matrix by rows ( --last 3 being translation-- )", sep=",", - argstr="--transform_matrix %s") + argstr="--transform_matrix %s", + ) transform = traits.Enum( "rt", "a", desc="Transform algorithm, rt = Rigid Transform, a = Affine Transform", - argstr="--transform %s") + argstr="--transform %s", + ) class ResampleDTIVolumeOutputSpec(TraitedSpec): @@ -161,45 +179,50 @@ class ResampleDTIVolume(SEMLikeCommandLine): input_spec = ResampleDTIVolumeInputSpec output_spec = ResampleDTIVolumeOutputSpec _cmd = "ResampleDTIVolume " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class DWIRicianLMMSEFilterInputSpec(CommandLineInputSpec): iter = traits.Int( - desc="Number of iterations for the noise removal filter.", - argstr="--iter %d") + desc="Number of iterations for the noise removal filter.", argstr="--iter %d" + ) re = InputMultiPath( - traits.Int, desc="Estimation radius.", sep=",", argstr="--re %s") - rf = InputMultiPath( - traits.Int, desc="Filtering radius.", sep=",", argstr="--rf %s") + traits.Int, desc="Estimation radius.", sep=",", argstr="--re %s" + ) + rf = InputMultiPath(traits.Int, desc="Filtering radius.", sep=",", argstr="--rf %s") mnvf = traits.Int( desc="Minimum number of voxels in kernel used for filtering.", - argstr="--mnvf %d") + argstr="--mnvf %d", + ) mnve = traits.Int( desc="Minimum number of voxels in kernel used for estimation.", - argstr="--mnve %d") + argstr="--mnve %d", + ) minnstd = traits.Int( - desc="Minimum allowed noise standard deviation.", - argstr="--minnstd %d") + desc="Minimum allowed noise standard deviation.", argstr="--minnstd %d" + ) maxnstd = traits.Int( - desc="Maximum allowed noise standard deviation.", - argstr="--maxnstd %d") + desc="Maximum allowed noise standard deviation.", argstr="--maxnstd %d" + ) hrf = traits.Float( - desc="How many histogram bins per unit interval.", argstr="--hrf %f") + desc="How many histogram bins per unit interval.", argstr="--hrf %f" + ) uav = traits.Bool( - desc="Use absolute value in case of negative square.", argstr="--uav ") - inputVolume = File( - position=-2, desc="Input DWI volume.", exists=True, argstr="%s") + desc="Use absolute value in case of negative square.", argstr="--uav " + ) + inputVolume = File(position=-2, desc="Input DWI volume.", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output DWI volume.", - argstr="%s") + argstr="%s", + ) compressOutput = traits.Bool( desc="Compress the data of the compressed file using gzip", - argstr="--compressOutput ") + argstr="--compressOutput ", + ) class DWIRicianLMMSEFilterOutputSpec(TraitedSpec): @@ -229,73 +252,79 @@ class DWIRicianLMMSEFilter(SEMLikeCommandLine): input_spec = DWIRicianLMMSEFilterInputSpec output_spec = DWIRicianLMMSEFilterOutputSpec _cmd = "DWIRicianLMMSEFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class TractographyLabelMapSeedingInputSpec(CommandLineInputSpec): - InputVolume = File( - position=-2, desc="Input DTI volume", exists=True, argstr="%s") + InputVolume = File(position=-2, desc="Input DTI volume", exists=True, argstr="%s") inputroi = File( - desc="Label map with seeding ROIs", - exists=True, - argstr="--inputroi %s") + desc="Label map with seeding ROIs", exists=True, argstr="--inputroi %s" + ) OutputFibers = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Tractography result", - argstr="%s") + argstr="%s", + ) useindexspace = traits.Bool( - desc="Seed at IJK voxel grid", argstr="--useindexspace ") + desc="Seed at IJK voxel grid", argstr="--useindexspace " + ) seedspacing = traits.Float( - desc= - "Spacing (in mm) between seed points, only matters if use Use Index Space is off", - argstr="--seedspacing %f") + desc="Spacing (in mm) between seed points, only matters if use Use Index Space is off", + argstr="--seedspacing %f", + ) randomgrid = traits.Bool( - desc="Enable random placing of seeds", argstr="--randomgrid ") + desc="Enable random placing of seeds", argstr="--randomgrid " + ) clthreshold = traits.Float( desc="Minimum Linear Measure for the seeding to start.", - argstr="--clthreshold %f") + argstr="--clthreshold %f", + ) minimumlength = traits.Float( - desc="Minimum length of the fibers (in mm)", - argstr="--minimumlength %f") + desc="Minimum length of the fibers (in mm)", argstr="--minimumlength %f" + ) maximumlength = traits.Float( - desc="Maximum length of fibers (in mm)", argstr="--maximumlength %f") + desc="Maximum length of fibers (in mm)", argstr="--maximumlength %f" + ) stoppingmode = traits.Enum( "LinearMeasure", "FractionalAnisotropy", desc="Tensor measurement used to stop the tractography", - argstr="--stoppingmode %s") + argstr="--stoppingmode %s", + ) stoppingvalue = traits.Float( - desc= - "Tractography will stop when the stopping measurement drops below this value", - argstr="--stoppingvalue %f") + desc="Tractography will stop when the stopping measurement drops below this value", + argstr="--stoppingvalue %f", + ) stoppingcurvature = traits.Float( - desc= - "Tractography will stop if radius of curvature becomes smaller than this number units are degrees per mm", - argstr="--stoppingcurvature %f") + desc="Tractography will stop if radius of curvature becomes smaller than this number units are degrees per mm", + argstr="--stoppingcurvature %f", + ) integrationsteplength = traits.Float( desc="Distance between points on the same fiber in mm", - argstr="--integrationsteplength %f") + argstr="--integrationsteplength %f", + ) label = traits.Int( - desc="Label value that defines seeding region.", argstr="--label %d") + desc="Label value that defines seeding region.", argstr="--label %d" + ) writetofile = traits.Bool( - desc="Write fibers to disk or create in the scene?", - argstr="--writetofile ") + desc="Write fibers to disk or create in the scene?", argstr="--writetofile " + ) outputdirectory = traits.Either( traits.Bool, Directory(), hash_files=False, desc="Directory in which to save fiber(s)", - argstr="--outputdirectory %s") + argstr="--outputdirectory %s", + ) name = traits.Str(desc="Name to use for fiber files", argstr="--name %s") class TractographyLabelMapSeedingOutputSpec(TraitedSpec): OutputFibers = File(position=-1, desc="Tractography result", exists=True) - outputdirectory = Directory( - desc="Directory in which to save fiber(s)", exists=True) + outputdirectory = Directory(desc="Directory in which to save fiber(s)", exists=True) class TractographyLabelMapSeeding(SEMLikeCommandLine): @@ -321,32 +350,33 @@ class TractographyLabelMapSeeding(SEMLikeCommandLine): output_spec = TractographyLabelMapSeedingOutputSpec _cmd = "TractographyLabelMapSeeding " _outputs_filenames = { - 'OutputFibers': 'OutputFibers.vtk', - 'outputdirectory': 'outputdirectory' + "OutputFibers": "OutputFibers.vtk", + "outputdirectory": "outputdirectory", } class DWIJointRicianLMMSEFilterInputSpec(CommandLineInputSpec): re = InputMultiPath( - traits.Int, desc="Estimation radius.", sep=",", argstr="--re %s") - rf = InputMultiPath( - traits.Int, desc="Filtering radius.", sep=",", argstr="--rf %s") + traits.Int, desc="Estimation radius.", sep=",", argstr="--re %s" + ) + rf = InputMultiPath(traits.Int, desc="Filtering radius.", sep=",", argstr="--rf %s") ng = traits.Int( - desc= - "The number of the closest gradients that are used to jointly filter a given gradient direction (0 to use all).", - argstr="--ng %d") - inputVolume = File( - position=-2, desc="Input DWI volume.", exists=True, argstr="%s") + desc="The number of the closest gradients that are used to jointly filter a given gradient direction (0 to use all).", + argstr="--ng %d", + ) + inputVolume = File(position=-2, desc="Input DWI volume.", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output DWI volume.", - argstr="%s") + argstr="%s", + ) compressOutput = traits.Bool( desc="Compress the data of the compressed file using gzip", - argstr="--compressOutput ") + argstr="--compressOutput ", + ) class DWIJointRicianLMMSEFilterOutputSpec(TraitedSpec): @@ -376,37 +406,38 @@ class DWIJointRicianLMMSEFilter(SEMLikeCommandLine): input_spec = DWIJointRicianLMMSEFilterInputSpec output_spec = DWIJointRicianLMMSEFilterOutputSpec _cmd = "DWIJointRicianLMMSEFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class DiffusionWeightedVolumeMaskingInputSpec(CommandLineInputSpec): - inputVolume = File( - position=-4, desc="Input DWI volume", exists=True, argstr="%s") + inputVolume = File(position=-4, desc="Input DWI volume", exists=True, argstr="%s") outputBaseline = traits.Either( traits.Bool, File(), position=-2, hash_files=False, desc="Estimated baseline volume", - argstr="%s") + argstr="%s", + ) thresholdMask = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Otsu Threshold Mask", - argstr="%s") + argstr="%s", + ) otsuomegathreshold = traits.Float( - desc= - "Control the sharpness of the threshold in the Otsu computation. 0: lower threshold, 1: higher threhold", - argstr="--otsuomegathreshold %f") + desc="Control the sharpness of the threshold in the Otsu computation. 0: lower threshold, 1: higher threhold", + argstr="--otsuomegathreshold %f", + ) removeislands = traits.Bool( - desc="Remove Islands in Threshold Mask?", argstr="--removeislands ") + desc="Remove Islands in Threshold Mask?", argstr="--removeislands " + ) class DiffusionWeightedVolumeMaskingOutputSpec(TraitedSpec): - outputBaseline = File( - position=-2, desc="Estimated baseline volume", exists=True) + outputBaseline = File(position=-2, desc="Estimated baseline volume", exists=True) thresholdMask = File(position=-1, desc="Otsu Threshold Mask", exists=True) @@ -431,25 +462,25 @@ class DiffusionWeightedVolumeMasking(SEMLikeCommandLine): output_spec = DiffusionWeightedVolumeMaskingOutputSpec _cmd = "DiffusionWeightedVolumeMasking " _outputs_filenames = { - 'outputBaseline': 'outputBaseline.nii', - 'thresholdMask': 'thresholdMask.nii' + "outputBaseline": "outputBaseline.nii", + "thresholdMask": "thresholdMask.nii", } class DTIimportInputSpec(CommandLineInputSpec): - inputFile = File( - position=-2, desc="Input DTI file", exists=True, argstr="%s") + inputFile = File(position=-2, desc="Input DTI file", exists=True, argstr="%s") outputTensor = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output DTI volume", - argstr="%s") + argstr="%s", + ) testingmode = traits.Bool( - desc= - "Enable testing mode. Sample helix file (helix-DTI.nhdr) will be loaded into Slicer and converted in Nifti.", - argstr="--testingmode ") + desc="Enable testing mode. Sample helix file (helix-DTI.nhdr) will be loaded into Slicer and converted in Nifti.", + argstr="--testingmode ", + ) class DTIimportOutputSpec(TraitedSpec): @@ -476,45 +507,45 @@ class DTIimport(SEMLikeCommandLine): input_spec = DTIimportInputSpec output_spec = DTIimportOutputSpec _cmd = "DTIimport " - _outputs_filenames = {'outputTensor': 'outputTensor.nii'} + _outputs_filenames = {"outputTensor": "outputTensor.nii"} class DWIToDTIEstimationInputSpec(CommandLineInputSpec): - inputVolume = File( - position=-3, desc="Input DWI volume", exists=True, argstr="%s") + inputVolume = File(position=-3, desc="Input DWI volume", exists=True, argstr="%s") mask = File( - desc="Mask where the tensors will be computed", - exists=True, - argstr="--mask %s") + desc="Mask where the tensors will be computed", exists=True, argstr="--mask %s" + ) outputTensor = traits.Either( traits.Bool, File(), position=-2, hash_files=False, desc="Estimated DTI volume", - argstr="%s") + argstr="%s", + ) outputBaseline = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Estimated baseline volume", - argstr="%s") + argstr="%s", + ) enumeration = traits.Enum( "LS", "WLS", desc="LS: Least Squares, WLS: Weighted Least Squares", - argstr="--enumeration %s") + argstr="--enumeration %s", + ) shiftNeg = traits.Bool( - desc= - "Shift eigenvalues so all are positive (accounts for bad tensors related to noise or acquisition error)", - argstr="--shiftNeg ") + desc="Shift eigenvalues so all are positive (accounts for bad tensors related to noise or acquisition error)", + argstr="--shiftNeg ", + ) class DWIToDTIEstimationOutputSpec(TraitedSpec): outputTensor = File(position=-2, desc="Estimated DTI volume", exists=True) - outputBaseline = File( - position=-1, desc="Estimated baseline volume", exists=True) + outputBaseline = File(position=-1, desc="Estimated baseline volume", exists=True) class DWIToDTIEstimation(SEMLikeCommandLine): @@ -542,21 +573,21 @@ class DWIToDTIEstimation(SEMLikeCommandLine): output_spec = DWIToDTIEstimationOutputSpec _cmd = "DWIToDTIEstimation " _outputs_filenames = { - 'outputTensor': 'outputTensor.nii', - 'outputBaseline': 'outputBaseline.nii' + "outputTensor": "outputTensor.nii", + "outputBaseline": "outputBaseline.nii", } class DiffusionTensorScalarMeasurementsInputSpec(CommandLineInputSpec): - inputVolume = File( - position=-3, desc="Input DTI volume", exists=True, argstr="%s") + inputVolume = File(position=-3, desc="Input DTI volume", exists=True, argstr="%s") outputScalar = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Scalar volume derived from tensor", - argstr="%s") + argstr="%s", + ) enumeration = traits.Enum( "Trace", "Determinant", @@ -584,12 +615,14 @@ class DiffusionTensorScalarMeasurementsInputSpec(CommandLineInputSpec): "ParallelDiffusivity", "PerpendicularDffusivity", desc="An enumeration of strings", - argstr="--enumeration %s") + argstr="--enumeration %s", + ) class DiffusionTensorScalarMeasurementsOutputSpec(TraitedSpec): outputScalar = File( - position=-1, desc="Scalar volume derived from tensor", exists=True) + position=-1, desc="Scalar volume derived from tensor", exists=True + ) class DiffusionTensorScalarMeasurements(SEMLikeCommandLine): @@ -612,19 +645,19 @@ class DiffusionTensorScalarMeasurements(SEMLikeCommandLine): input_spec = DiffusionTensorScalarMeasurementsInputSpec output_spec = DiffusionTensorScalarMeasurementsOutputSpec _cmd = "DiffusionTensorScalarMeasurements " - _outputs_filenames = {'outputScalar': 'outputScalar.nii'} + _outputs_filenames = {"outputScalar": "outputScalar.nii"} class DTIexportInputSpec(CommandLineInputSpec): - inputTensor = File( - position=-2, desc="Input DTI volume", exists=True, argstr="%s") + inputTensor = File(position=-2, desc="Input DTI volume", exists=True, argstr="%s") outputFile = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output DTI file", - argstr="%s") + argstr="%s", + ) class DTIexportOutputSpec(TraitedSpec): @@ -651,4 +684,4 @@ class DTIexport(SEMLikeCommandLine): input_spec = DTIexportInputSpec output_spec = DTIexportOutputSpec _cmd = "DTIexport " - _outputs_filenames = {'outputFile': 'outputFile'} + _outputs_filenames = {"outputFile": "outputFile"} diff --git a/nipype/interfaces/slicer/diffusion/tests/test_auto_DTIexport.py b/nipype/interfaces/slicer/diffusion/tests/test_auto_DTIexport.py index 5b72027e1a..7c7f02cacc 100644 --- a/nipype/interfaces/slicer/diffusion/tests/test_auto_DTIexport.py +++ b/nipype/interfaces/slicer/diffusion/tests/test_auto_DTIexport.py @@ -4,32 +4,20 @@ def test_DTIexport_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputTensor=dict( - argstr='%s', - extensions=None, - position=-2, - ), - outputFile=dict( - argstr='%s', - hash_files=False, - position=-1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputTensor=dict(argstr="%s", extensions=None, position=-2,), + outputFile=dict(argstr="%s", hash_files=False, position=-1,), ) inputs = DTIexport.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DTIexport_outputs(): - output_map = dict(outputFile=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputFile=dict(extensions=None, position=-1,),) outputs = DTIexport.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/diffusion/tests/test_auto_DTIimport.py b/nipype/interfaces/slicer/diffusion/tests/test_auto_DTIimport.py index 9b9c5f1929..65ed035819 100644 --- a/nipype/interfaces/slicer/diffusion/tests/test_auto_DTIimport.py +++ b/nipype/interfaces/slicer/diffusion/tests/test_auto_DTIimport.py @@ -4,33 +4,21 @@ def test_DTIimport_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputFile=dict( - argstr='%s', - extensions=None, - position=-2, - ), - outputTensor=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - testingmode=dict(argstr='--testingmode ', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputFile=dict(argstr="%s", extensions=None, position=-2,), + outputTensor=dict(argstr="%s", hash_files=False, position=-1,), + testingmode=dict(argstr="--testingmode ",), ) inputs = DTIimport.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DTIimport_outputs(): - output_map = dict(outputTensor=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputTensor=dict(extensions=None, position=-1,),) outputs = DTIimport.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIJointRicianLMMSEFilter.py b/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIJointRicianLMMSEFilter.py index 186014c407..3c0d7c8861 100644 --- a/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIJointRicianLMMSEFilter.py +++ b/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIJointRicianLMMSEFilter.py @@ -4,42 +4,24 @@ def test_DWIJointRicianLMMSEFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - compressOutput=dict(argstr='--compressOutput ', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - ng=dict(argstr='--ng %d', ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - re=dict( - argstr='--re %s', - sep=',', - ), - rf=dict( - argstr='--rf %s', - sep=',', - ), + args=dict(argstr="%s",), + compressOutput=dict(argstr="--compressOutput ",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="%s", extensions=None, position=-2,), + ng=dict(argstr="--ng %d",), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), + re=dict(argstr="--re %s", sep=",",), + rf=dict(argstr="--rf %s", sep=",",), ) inputs = DWIJointRicianLMMSEFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWIJointRicianLMMSEFilter_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = DWIJointRicianLMMSEFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIRicianLMMSEFilter.py b/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIRicianLMMSEFilter.py index 1c488f912f..8dda7d3105 100644 --- a/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIRicianLMMSEFilter.py +++ b/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIRicianLMMSEFilter.py @@ -4,48 +4,30 @@ def test_DWIRicianLMMSEFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - compressOutput=dict(argstr='--compressOutput ', ), - environ=dict( - nohash=True, - usedefault=True, - ), - hrf=dict(argstr='--hrf %f', ), - inputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - iter=dict(argstr='--iter %d', ), - maxnstd=dict(argstr='--maxnstd %d', ), - minnstd=dict(argstr='--minnstd %d', ), - mnve=dict(argstr='--mnve %d', ), - mnvf=dict(argstr='--mnvf %d', ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - re=dict( - argstr='--re %s', - sep=',', - ), - rf=dict( - argstr='--rf %s', - sep=',', - ), - uav=dict(argstr='--uav ', ), + args=dict(argstr="%s",), + compressOutput=dict(argstr="--compressOutput ",), + environ=dict(nohash=True, usedefault=True,), + hrf=dict(argstr="--hrf %f",), + inputVolume=dict(argstr="%s", extensions=None, position=-2,), + iter=dict(argstr="--iter %d",), + maxnstd=dict(argstr="--maxnstd %d",), + minnstd=dict(argstr="--minnstd %d",), + mnve=dict(argstr="--mnve %d",), + mnvf=dict(argstr="--mnvf %d",), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), + re=dict(argstr="--re %s", sep=",",), + rf=dict(argstr="--rf %s", sep=",",), + uav=dict(argstr="--uav ",), ) inputs = DWIRicianLMMSEFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWIRicianLMMSEFilter_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = DWIRicianLMMSEFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIToDTIEstimation.py b/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIToDTIEstimation.py index 87a1300476..ecb618c2ff 100644 --- a/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIToDTIEstimation.py +++ b/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIToDTIEstimation.py @@ -4,48 +4,26 @@ def test_DWIToDTIEstimation_inputs(): input_map = dict( - args=dict(argstr='%s', ), - enumeration=dict(argstr='--enumeration %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='%s', - extensions=None, - position=-3, - ), - mask=dict( - argstr='--mask %s', - extensions=None, - ), - outputBaseline=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - outputTensor=dict( - argstr='%s', - hash_files=False, - position=-2, - ), - shiftNeg=dict(argstr='--shiftNeg ', ), + args=dict(argstr="%s",), + enumeration=dict(argstr="--enumeration %s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="%s", extensions=None, position=-3,), + mask=dict(argstr="--mask %s", extensions=None,), + outputBaseline=dict(argstr="%s", hash_files=False, position=-1,), + outputTensor=dict(argstr="%s", hash_files=False, position=-2,), + shiftNeg=dict(argstr="--shiftNeg ",), ) inputs = DWIToDTIEstimation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWIToDTIEstimation_outputs(): output_map = dict( - outputBaseline=dict( - extensions=None, - position=-1, - ), - outputTensor=dict( - extensions=None, - position=-2, - ), + outputBaseline=dict(extensions=None, position=-1,), + outputTensor=dict(extensions=None, position=-2,), ) outputs = DWIToDTIEstimation.output_spec() diff --git a/nipype/interfaces/slicer/diffusion/tests/test_auto_DiffusionTensorScalarMeasurements.py b/nipype/interfaces/slicer/diffusion/tests/test_auto_DiffusionTensorScalarMeasurements.py index 44b5d9228b..70df302ae6 100644 --- a/nipype/interfaces/slicer/diffusion/tests/test_auto_DiffusionTensorScalarMeasurements.py +++ b/nipype/interfaces/slicer/diffusion/tests/test_auto_DiffusionTensorScalarMeasurements.py @@ -4,33 +4,21 @@ def test_DiffusionTensorScalarMeasurements_inputs(): input_map = dict( - args=dict(argstr='%s', ), - enumeration=dict(argstr='--enumeration %s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='%s', - extensions=None, - position=-3, - ), - outputScalar=dict( - argstr='%s', - hash_files=False, - position=-1, - ), + args=dict(argstr="%s",), + enumeration=dict(argstr="--enumeration %s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="%s", extensions=None, position=-3,), + outputScalar=dict(argstr="%s", hash_files=False, position=-1,), ) inputs = DiffusionTensorScalarMeasurements.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DiffusionTensorScalarMeasurements_outputs(): - output_map = dict(outputScalar=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputScalar=dict(extensions=None, position=-1,),) outputs = DiffusionTensorScalarMeasurements.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/diffusion/tests/test_auto_DiffusionWeightedVolumeMasking.py b/nipype/interfaces/slicer/diffusion/tests/test_auto_DiffusionWeightedVolumeMasking.py index fdd979678e..659f86a642 100644 --- a/nipype/interfaces/slicer/diffusion/tests/test_auto_DiffusionWeightedVolumeMasking.py +++ b/nipype/interfaces/slicer/diffusion/tests/test_auto_DiffusionWeightedVolumeMasking.py @@ -4,44 +4,25 @@ def test_DiffusionWeightedVolumeMasking_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='%s', - extensions=None, - position=-4, - ), - otsuomegathreshold=dict(argstr='--otsuomegathreshold %f', ), - outputBaseline=dict( - argstr='%s', - hash_files=False, - position=-2, - ), - removeislands=dict(argstr='--removeislands ', ), - thresholdMask=dict( - argstr='%s', - hash_files=False, - position=-1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="%s", extensions=None, position=-4,), + otsuomegathreshold=dict(argstr="--otsuomegathreshold %f",), + outputBaseline=dict(argstr="%s", hash_files=False, position=-2,), + removeislands=dict(argstr="--removeislands ",), + thresholdMask=dict(argstr="%s", hash_files=False, position=-1,), ) inputs = DiffusionWeightedVolumeMasking.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DiffusionWeightedVolumeMasking_outputs(): output_map = dict( - outputBaseline=dict( - extensions=None, - position=-2, - ), - thresholdMask=dict( - extensions=None, - position=-1, - ), + outputBaseline=dict(extensions=None, position=-2,), + thresholdMask=dict(extensions=None, position=-1,), ) outputs = DiffusionWeightedVolumeMasking.output_spec() diff --git a/nipype/interfaces/slicer/diffusion/tests/test_auto_ResampleDTIVolume.py b/nipype/interfaces/slicer/diffusion/tests/test_auto_ResampleDTIVolume.py index 1933c1d674..8db387f985 100644 --- a/nipype/interfaces/slicer/diffusion/tests/test_auto_ResampleDTIVolume.py +++ b/nipype/interfaces/slicer/diffusion/tests/test_auto_ResampleDTIVolume.py @@ -4,78 +4,44 @@ def test_ResampleDTIVolume_inputs(): input_map = dict( - Inverse_ITK_Transformation=dict( - argstr='--Inverse_ITK_Transformation ', ), - Reference=dict( - argstr='--Reference %s', - extensions=None, - ), - args=dict(argstr='%s', ), - centered_transform=dict(argstr='--centered_transform ', ), - correction=dict(argstr='--correction %s', ), - defField=dict( - argstr='--defField %s', - extensions=None, - ), - default_pixel_value=dict(argstr='--default_pixel_value %f', ), - direction_matrix=dict( - argstr='--direction_matrix %s', - sep=',', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - hfieldtype=dict(argstr='--hfieldtype %s', ), - image_center=dict(argstr='--image_center %s', ), - inputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - interpolation=dict(argstr='--interpolation %s', ), - notbulk=dict(argstr='--notbulk ', ), - number_of_thread=dict(argstr='--number_of_thread %d', ), - origin=dict(argstr='--origin %s', ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - rotation_point=dict(argstr='--rotation_point %s', ), - size=dict( - argstr='--size %s', - sep=',', - ), - spaceChange=dict(argstr='--spaceChange ', ), - spacing=dict( - argstr='--spacing %s', - sep=',', - ), - spline_order=dict(argstr='--spline_order %d', ), - transform=dict(argstr='--transform %s', ), - transform_matrix=dict( - argstr='--transform_matrix %s', - sep=',', - ), - transform_order=dict(argstr='--transform_order %s', ), - transform_tensor_method=dict(argstr='--transform_tensor_method %s', ), - transformationFile=dict( - argstr='--transformationFile %s', - extensions=None, - ), - window_function=dict(argstr='--window_function %s', ), + Inverse_ITK_Transformation=dict(argstr="--Inverse_ITK_Transformation ",), + Reference=dict(argstr="--Reference %s", extensions=None,), + args=dict(argstr="%s",), + centered_transform=dict(argstr="--centered_transform ",), + correction=dict(argstr="--correction %s",), + defField=dict(argstr="--defField %s", extensions=None,), + default_pixel_value=dict(argstr="--default_pixel_value %f",), + direction_matrix=dict(argstr="--direction_matrix %s", sep=",",), + environ=dict(nohash=True, usedefault=True,), + hfieldtype=dict(argstr="--hfieldtype %s",), + image_center=dict(argstr="--image_center %s",), + inputVolume=dict(argstr="%s", extensions=None, position=-2,), + interpolation=dict(argstr="--interpolation %s",), + notbulk=dict(argstr="--notbulk ",), + number_of_thread=dict(argstr="--number_of_thread %d",), + origin=dict(argstr="--origin %s",), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), + rotation_point=dict(argstr="--rotation_point %s",), + size=dict(argstr="--size %s", sep=",",), + spaceChange=dict(argstr="--spaceChange ",), + spacing=dict(argstr="--spacing %s", sep=",",), + spline_order=dict(argstr="--spline_order %d",), + transform=dict(argstr="--transform %s",), + transform_matrix=dict(argstr="--transform_matrix %s", sep=",",), + transform_order=dict(argstr="--transform_order %s",), + transform_tensor_method=dict(argstr="--transform_tensor_method %s",), + transformationFile=dict(argstr="--transformationFile %s", extensions=None,), + window_function=dict(argstr="--window_function %s",), ) inputs = ResampleDTIVolume.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ResampleDTIVolume_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = ResampleDTIVolume.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/diffusion/tests/test_auto_TractographyLabelMapSeeding.py b/nipype/interfaces/slicer/diffusion/tests/test_auto_TractographyLabelMapSeeding.py index e14bfeece8..6f36ac2a63 100644 --- a/nipype/interfaces/slicer/diffusion/tests/test_auto_TractographyLabelMapSeeding.py +++ b/nipype/interfaces/slicer/diffusion/tests/test_auto_TractographyLabelMapSeeding.py @@ -4,55 +4,36 @@ def test_TractographyLabelMapSeeding_inputs(): input_map = dict( - InputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - OutputFibers=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - args=dict(argstr='%s', ), - clthreshold=dict(argstr='--clthreshold %f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputroi=dict( - argstr='--inputroi %s', - extensions=None, - ), - integrationsteplength=dict(argstr='--integrationsteplength %f', ), - label=dict(argstr='--label %d', ), - maximumlength=dict(argstr='--maximumlength %f', ), - minimumlength=dict(argstr='--minimumlength %f', ), - name=dict(argstr='--name %s', ), - outputdirectory=dict( - argstr='--outputdirectory %s', - hash_files=False, - ), - randomgrid=dict(argstr='--randomgrid ', ), - seedspacing=dict(argstr='--seedspacing %f', ), - stoppingcurvature=dict(argstr='--stoppingcurvature %f', ), - stoppingmode=dict(argstr='--stoppingmode %s', ), - stoppingvalue=dict(argstr='--stoppingvalue %f', ), - useindexspace=dict(argstr='--useindexspace ', ), - writetofile=dict(argstr='--writetofile ', ), + InputVolume=dict(argstr="%s", extensions=None, position=-2,), + OutputFibers=dict(argstr="%s", hash_files=False, position=-1,), + args=dict(argstr="%s",), + clthreshold=dict(argstr="--clthreshold %f",), + environ=dict(nohash=True, usedefault=True,), + inputroi=dict(argstr="--inputroi %s", extensions=None,), + integrationsteplength=dict(argstr="--integrationsteplength %f",), + label=dict(argstr="--label %d",), + maximumlength=dict(argstr="--maximumlength %f",), + minimumlength=dict(argstr="--minimumlength %f",), + name=dict(argstr="--name %s",), + outputdirectory=dict(argstr="--outputdirectory %s", hash_files=False,), + randomgrid=dict(argstr="--randomgrid ",), + seedspacing=dict(argstr="--seedspacing %f",), + stoppingcurvature=dict(argstr="--stoppingcurvature %f",), + stoppingmode=dict(argstr="--stoppingmode %s",), + stoppingvalue=dict(argstr="--stoppingvalue %f",), + useindexspace=dict(argstr="--useindexspace ",), + writetofile=dict(argstr="--writetofile ",), ) inputs = TractographyLabelMapSeeding.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TractographyLabelMapSeeding_outputs(): output_map = dict( - OutputFibers=dict( - extensions=None, - position=-1, - ), - outputdirectory=dict(), + OutputFibers=dict(extensions=None, position=-1,), outputdirectory=dict(), ) outputs = TractographyLabelMapSeeding.output_spec() diff --git a/nipype/interfaces/slicer/filtering/__init__.py b/nipype/interfaces/slicer/filtering/__init__.py index 1270e20d26..13b79f8705 100644 --- a/nipype/interfaces/slicer/filtering/__init__.py +++ b/nipype/interfaces/slicer/filtering/__init__.py @@ -1,12 +1,18 @@ # -*- coding: utf-8 -*- -from .morphology import (GrayscaleGrindPeakImageFilter, - GrayscaleFillHoleImageFilter) -from .denoising import (GradientAnisotropicDiffusion, - CurvatureAnisotropicDiffusion, GaussianBlurImageFilter, - MedianImageFilter) -from .arithmetic import (MultiplyScalarVolumes, MaskScalarVolume, - SubtractScalarVolumes, AddScalarVolumes, - CastScalarVolume) +from .morphology import GrayscaleGrindPeakImageFilter, GrayscaleFillHoleImageFilter +from .denoising import ( + GradientAnisotropicDiffusion, + CurvatureAnisotropicDiffusion, + GaussianBlurImageFilter, + MedianImageFilter, +) +from .arithmetic import ( + MultiplyScalarVolumes, + MaskScalarVolume, + SubtractScalarVolumes, + AddScalarVolumes, + CastScalarVolume, +) from .extractskeleton import ExtractSkeleton from .histogrammatching import HistogramMatching from .thresholdscalarvolume import ThresholdScalarVolume diff --git a/nipype/interfaces/slicer/filtering/arithmetic.py b/nipype/interfaces/slicer/filtering/arithmetic.py index 22785e32e1..64d60feef3 100644 --- a/nipype/interfaces/slicer/filtering/arithmetic.py +++ b/nipype/interfaces/slicer/filtering/arithmetic.py @@ -3,30 +3,40 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class MultiplyScalarVolumesInputSpec(CommandLineInputSpec): - inputVolume1 = File( - position=-3, desc="Input volume 1", exists=True, argstr="%s") - inputVolume2 = File( - position=-2, desc="Input volume 2", exists=True, argstr="%s") + inputVolume1 = File(position=-3, desc="Input volume 1", exists=True, argstr="%s") + inputVolume2 = File(position=-2, desc="Input volume 2", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Volume1 * Volume2", - argstr="%s") + argstr="%s", + ) order = traits.Enum( "0", "1", "2", "3", - desc= - "Interpolation order if two images are in different coordinate frames or have different sampling.", - argstr="--order %s") + desc="Interpolation order if two images are in different coordinate frames or have different sampling.", + argstr="--order %s", + ) class MultiplyScalarVolumesOutputSpec(TraitedSpec): @@ -53,42 +63,39 @@ class MultiplyScalarVolumes(SEMLikeCommandLine): input_spec = MultiplyScalarVolumesInputSpec output_spec = MultiplyScalarVolumesOutputSpec _cmd = "MultiplyScalarVolumes " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class MaskScalarVolumeInputSpec(CommandLineInputSpec): InputVolume = File( - position=-3, - desc="Input volume to be masked", - exists=True, - argstr="%s") + position=-3, desc="Input volume to be masked", exists=True, argstr="%s" + ) MaskVolume = File( - position=-2, - desc="Label volume containing the mask", - exists=True, - argstr="%s") + position=-2, desc="Label volume containing the mask", exists=True, argstr="%s" + ) OutputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, - desc= - "Output volume: Input Volume masked by label value from Mask Volume", - argstr="%s") + desc="Output volume: Input Volume masked by label value from Mask Volume", + argstr="%s", + ) label = traits.Int( - desc="Label value in the Mask Volume to use as the mask", - argstr="--label %d") + desc="Label value in the Mask Volume to use as the mask", argstr="--label %d" + ) replace = traits.Int( desc="Value to use for the output volume outside of the mask", - argstr="--replace %d") + argstr="--replace %d", + ) class MaskScalarVolumeOutputSpec(TraitedSpec): OutputVolume = File( position=-1, - desc= - "Output volume: Input Volume masked by label value from Mask Volume", - exists=True) + desc="Output volume: Input Volume masked by label value from Mask Volume", + exists=True, + ) class MaskScalarVolume(SEMLikeCommandLine): @@ -111,29 +118,28 @@ class MaskScalarVolume(SEMLikeCommandLine): input_spec = MaskScalarVolumeInputSpec output_spec = MaskScalarVolumeOutputSpec _cmd = "MaskScalarVolume " - _outputs_filenames = {'OutputVolume': 'OutputVolume.nii'} + _outputs_filenames = {"OutputVolume": "OutputVolume.nii"} class SubtractScalarVolumesInputSpec(CommandLineInputSpec): - inputVolume1 = File( - position=-3, desc="Input volume 1", exists=True, argstr="%s") - inputVolume2 = File( - position=-2, desc="Input volume 2", exists=True, argstr="%s") + inputVolume1 = File(position=-3, desc="Input volume 1", exists=True, argstr="%s") + inputVolume2 = File(position=-2, desc="Input volume 2", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Volume1 - Volume2", - argstr="%s") + argstr="%s", + ) order = traits.Enum( "0", "1", "2", "3", - desc= - "Interpolation order if two images are in different coordinate frames or have different sampling.", - argstr="--order %s") + desc="Interpolation order if two images are in different coordinate frames or have different sampling.", + argstr="--order %s", + ) class SubtractScalarVolumesOutputSpec(TraitedSpec): @@ -160,29 +166,28 @@ class SubtractScalarVolumes(SEMLikeCommandLine): input_spec = SubtractScalarVolumesInputSpec output_spec = SubtractScalarVolumesOutputSpec _cmd = "SubtractScalarVolumes " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class AddScalarVolumesInputSpec(CommandLineInputSpec): - inputVolume1 = File( - position=-3, desc="Input volume 1", exists=True, argstr="%s") - inputVolume2 = File( - position=-2, desc="Input volume 2", exists=True, argstr="%s") + inputVolume1 = File(position=-3, desc="Input volume 1", exists=True, argstr="%s") + inputVolume2 = File(position=-2, desc="Input volume 2", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Volume1 + Volume2", - argstr="%s") + argstr="%s", + ) order = traits.Enum( "0", "1", "2", "3", - desc= - "Interpolation order if two images are in different coordinate frames or have different sampling.", - argstr="--order %s") + desc="Interpolation order if two images are in different coordinate frames or have different sampling.", + argstr="--order %s", + ) class AddScalarVolumesOutputSpec(TraitedSpec): @@ -209,22 +214,21 @@ class AddScalarVolumes(SEMLikeCommandLine): input_spec = AddScalarVolumesInputSpec output_spec = AddScalarVolumesOutputSpec _cmd = "AddScalarVolumes " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class CastScalarVolumeInputSpec(CommandLineInputSpec): InputVolume = File( - position=-2, - desc="Input volume, the volume to cast.", - exists=True, - argstr="%s") + position=-2, desc="Input volume, the volume to cast.", exists=True, argstr="%s" + ) OutputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output volume, cast to the new type.", - argstr="%s") + argstr="%s", + ) type = traits.Enum( "Char", "UnsignedChar", @@ -235,12 +239,14 @@ class CastScalarVolumeInputSpec(CommandLineInputSpec): "Float", "Double", desc="Type for the new output volume.", - argstr="--type %s") + argstr="--type %s", + ) class CastScalarVolumeOutputSpec(TraitedSpec): OutputVolume = File( - position=-1, desc="Output volume, cast to the new type.", exists=True) + position=-1, desc="Output volume, cast to the new type.", exists=True + ) class CastScalarVolume(SEMLikeCommandLine): @@ -265,4 +271,4 @@ class CastScalarVolume(SEMLikeCommandLine): input_spec = CastScalarVolumeInputSpec output_spec = CastScalarVolumeOutputSpec _cmd = "CastScalarVolume " - _outputs_filenames = {'OutputVolume': 'OutputVolume.nii'} + _outputs_filenames = {"OutputVolume": "OutputVolume.nii"} diff --git a/nipype/interfaces/slicer/filtering/checkerboardfilter.py b/nipype/interfaces/slicer/filtering/checkerboardfilter.py index e4ad85dc5e..1789359e7d 100644 --- a/nipype/interfaces/slicer/filtering/checkerboardfilter.py +++ b/nipype/interfaces/slicer/filtering/checkerboardfilter.py @@ -3,28 +3,42 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class CheckerBoardFilterInputSpec(CommandLineInputSpec): checkerPattern = InputMultiPath( traits.Int, - desc= - "The pattern of input 1 and input 2 in the output image. The user can specify the number of checkers in each dimension. A checkerPattern of 2,2,1 means that images will alternate in every other checker in the first two dimensions. The same pattern will be used in the 3rd dimension.", + desc="The pattern of input 1 and input 2 in the output image. The user can specify the number of checkers in each dimension. A checkerPattern of 2,2,1 means that images will alternate in every other checker in the first two dimensions. The same pattern will be used in the 3rd dimension.", sep=",", - argstr="--checkerPattern %s") + argstr="--checkerPattern %s", + ) inputVolume1 = File( - position=-3, desc="First Input volume", exists=True, argstr="%s") + position=-3, desc="First Input volume", exists=True, argstr="%s" + ) inputVolume2 = File( - position=-2, desc="Second Input volume", exists=True, argstr="%s") + position=-2, desc="Second Input volume", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", - argstr="%s") + argstr="%s", + ) class CheckerBoardFilterOutputSpec(TraitedSpec): @@ -51,4 +65,4 @@ class CheckerBoardFilter(SEMLikeCommandLine): input_spec = CheckerBoardFilterInputSpec output_spec = CheckerBoardFilterOutputSpec _cmd = "CheckerBoardFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/filtering/denoising.py b/nipype/interfaces/slicer/filtering/denoising.py index 0dbaaebf74..6c26b74618 100644 --- a/nipype/interfaces/slicer/filtering/denoising.py +++ b/nipype/interfaces/slicer/filtering/denoising.py @@ -3,35 +3,45 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class GradientAnisotropicDiffusionInputSpec(CommandLineInputSpec): conductance = traits.Float( - desc= - "Conductance controls the sensitivity of the conductance term. As a general rule, the lower the value, the more strongly the filter preserves edges. A high value will cause diffusion (smoothing) across edges. Note that the number of iterations controls how much smoothing is done within regions bounded by edges.", - argstr="--conductance %f") + desc="Conductance controls the sensitivity of the conductance term. As a general rule, the lower the value, the more strongly the filter preserves edges. A high value will cause diffusion (smoothing) across edges. Note that the number of iterations controls how much smoothing is done within regions bounded by edges.", + argstr="--conductance %f", + ) iterations = traits.Int( - desc= - "The more iterations, the more smoothing. Each iteration takes the same amount of time. If it takes 10 seconds for one iteration, then it will take 100 seconds for 10 iterations. Note that the conductance controls how much each iteration smooths across edges.", - argstr="--iterations %d") + desc="The more iterations, the more smoothing. Each iteration takes the same amount of time. If it takes 10 seconds for one iteration, then it will take 100 seconds for 10 iterations. Note that the conductance controls how much each iteration smooths across edges.", + argstr="--iterations %d", + ) timeStep = traits.Float( - desc= - "The time step depends on the dimensionality of the image. In Slicer the images are 3D and the default (.0625) time step will provide a stable solution.", - argstr="--timeStep %f") + desc="The time step depends on the dimensionality of the image. In Slicer the images are 3D and the default (.0625) time step will provide a stable solution.", + argstr="--timeStep %f", + ) inputVolume = File( - position=-2, - desc="Input volume to be filtered", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", - argstr="%s") + argstr="%s", + ) class GradientAnisotropicDiffusionOutputSpec(TraitedSpec): @@ -60,34 +70,33 @@ class GradientAnisotropicDiffusion(SEMLikeCommandLine): input_spec = GradientAnisotropicDiffusionInputSpec output_spec = GradientAnisotropicDiffusionOutputSpec _cmd = "GradientAnisotropicDiffusion " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class CurvatureAnisotropicDiffusionInputSpec(CommandLineInputSpec): conductance = traits.Float( - desc= - "Conductance controls the sensitivity of the conductance term. As a general rule, the lower the value, the more strongly the filter preserves edges. A high value will cause diffusion (smoothing) across edges. Note that the number of iterations controls how much smoothing is done within regions bounded by edges.", - argstr="--conductance %f") + desc="Conductance controls the sensitivity of the conductance term. As a general rule, the lower the value, the more strongly the filter preserves edges. A high value will cause diffusion (smoothing) across edges. Note that the number of iterations controls how much smoothing is done within regions bounded by edges.", + argstr="--conductance %f", + ) iterations = traits.Int( - desc= - "The more iterations, the more smoothing. Each iteration takes the same amount of time. If it takes 10 seconds for one iteration, then it will take 100 seconds for 10 iterations. Note that the conductance controls how much each iteration smooths across edges.", - argstr="--iterations %d") + desc="The more iterations, the more smoothing. Each iteration takes the same amount of time. If it takes 10 seconds for one iteration, then it will take 100 seconds for 10 iterations. Note that the conductance controls how much each iteration smooths across edges.", + argstr="--iterations %d", + ) timeStep = traits.Float( - desc= - "The time step depends on the dimensionality of the image. In Slicer the images are 3D and the default (.0625) time step will provide a stable solution.", - argstr="--timeStep %f") + desc="The time step depends on the dimensionality of the image. In Slicer the images are 3D and the default (.0625) time step will provide a stable solution.", + argstr="--timeStep %f", + ) inputVolume = File( - position=-2, - desc="Input volume to be filtered", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", - argstr="%s") + argstr="%s", + ) class CurvatureAnisotropicDiffusionOutputSpec(TraitedSpec): @@ -118,22 +127,23 @@ class CurvatureAnisotropicDiffusion(SEMLikeCommandLine): input_spec = CurvatureAnisotropicDiffusionInputSpec output_spec = CurvatureAnisotropicDiffusionOutputSpec _cmd = "CurvatureAnisotropicDiffusion " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class GaussianBlurImageFilterInputSpec(CommandLineInputSpec): sigma = traits.Float( desc="Sigma value in physical units (e.g., mm) of the Gaussian kernel", - argstr="--sigma %f") - inputVolume = File( - position=-2, desc="Input volume", exists=True, argstr="%s") + argstr="--sigma %f", + ) + inputVolume = File(position=-2, desc="Input volume", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Blurred Volume", - argstr="%s") + argstr="%s", + ) class GaussianBlurImageFilterOutputSpec(TraitedSpec): @@ -160,7 +170,7 @@ class GaussianBlurImageFilter(SEMLikeCommandLine): input_spec = GaussianBlurImageFilterInputSpec output_spec = GaussianBlurImageFilterOutputSpec _cmd = "GaussianBlurImageFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class MedianImageFilterInputSpec(CommandLineInputSpec): @@ -168,19 +178,19 @@ class MedianImageFilterInputSpec(CommandLineInputSpec): traits.Int, desc="The size of the neighborhood in each dimension", sep=",", - argstr="--neighborhood %s") + argstr="--neighborhood %s", + ) inputVolume = File( - position=-2, - desc="Input volume to be filtered", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", - argstr="%s") + argstr="%s", + ) class MedianImageFilterOutputSpec(TraitedSpec): @@ -207,4 +217,4 @@ class MedianImageFilter(SEMLikeCommandLine): input_spec = MedianImageFilterInputSpec output_spec = MedianImageFilterOutputSpec _cmd = "MedianImageFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/filtering/extractskeleton.py b/nipype/interfaces/slicer/filtering/extractskeleton.py index d7770c8f2e..7900be472c 100644 --- a/nipype/interfaces/slicer/filtering/extractskeleton.py +++ b/nipype/interfaces/slicer/filtering/extractskeleton.py @@ -3,37 +3,51 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class ExtractSkeletonInputSpec(CommandLineInputSpec): - InputImageFileName = File( - position=-2, desc="Input image", exists=True, argstr="%s") + InputImageFileName = File(position=-2, desc="Input image", exists=True, argstr="%s") OutputImageFileName = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Skeleton of the input image", - argstr="%s") + argstr="%s", + ) type = traits.Enum( - "1D", "2D", desc="Type of skeleton to create", argstr="--type %s") + "1D", "2D", desc="Type of skeleton to create", argstr="--type %s" + ) dontPrune = traits.Bool( desc="Return the full skeleton, not just the maximal skeleton", - argstr="--dontPrune ") + argstr="--dontPrune ", + ) numPoints = traits.Int( - desc="Number of points used to represent the skeleton", - argstr="--numPoints %d") + desc="Number of points used to represent the skeleton", argstr="--numPoints %d" + ) pointsFile = traits.Str( - desc= - "Name of the file to store the coordinates of the central (1D) skeleton points", - argstr="--pointsFile %s") + desc="Name of the file to store the coordinates of the central (1D) skeleton points", + argstr="--pointsFile %s", + ) class ExtractSkeletonOutputSpec(TraitedSpec): OutputImageFileName = File( - position=-1, desc="Skeleton of the input image", exists=True) + position=-1, desc="Skeleton of the input image", exists=True + ) class ExtractSkeleton(SEMLikeCommandLine): @@ -56,4 +70,4 @@ class ExtractSkeleton(SEMLikeCommandLine): input_spec = ExtractSkeletonInputSpec output_spec = ExtractSkeletonOutputSpec _cmd = "ExtractSkeleton " - _outputs_filenames = {'OutputImageFileName': 'OutputImageFileName.nii'} + _outputs_filenames = {"OutputImageFileName": "OutputImageFileName.nii"} diff --git a/nipype/interfaces/slicer/filtering/histogrammatching.py b/nipype/interfaces/slicer/filtering/histogrammatching.py index 1b3b26b061..9b6cb17813 100644 --- a/nipype/interfaces/slicer/filtering/histogrammatching.py +++ b/nipype/interfaces/slicer/filtering/histogrammatching.py @@ -3,47 +3,58 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class HistogramMatchingInputSpec(CommandLineInputSpec): numberOfHistogramLevels = traits.Int( desc="The number of hisogram levels to use", - argstr="--numberOfHistogramLevels %d") + argstr="--numberOfHistogramLevels %d", + ) numberOfMatchPoints = traits.Int( - desc="The number of match points to use", - argstr="--numberOfMatchPoints %d") + desc="The number of match points to use", argstr="--numberOfMatchPoints %d" + ) threshold = traits.Bool( - desc= - "If on, only pixels above the mean in each volume are thresholded.", - argstr="--threshold ") + desc="If on, only pixels above the mean in each volume are thresholded.", + argstr="--threshold ", + ) inputVolume = File( - position=-3, - desc="Input volume to be filtered", - exists=True, - argstr="%s") + position=-3, desc="Input volume to be filtered", exists=True, argstr="%s" + ) referenceVolume = File( position=-2, desc="Input volume whose histogram will be matched", exists=True, - argstr="%s") + argstr="%s", + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, - desc= - "Output volume. This is the input volume with intensities matched to the reference volume.", - argstr="%s") + desc="Output volume. This is the input volume with intensities matched to the reference volume.", + argstr="%s", + ) class HistogramMatchingOutputSpec(TraitedSpec): outputVolume = File( position=-1, - desc= - "Output volume. This is the input volume with intensities matched to the reference volume.", - exists=True) + desc="Output volume. This is the input volume with intensities matched to the reference volume.", + exists=True, + ) class HistogramMatching(SEMLikeCommandLine): @@ -72,4 +83,4 @@ class HistogramMatching(SEMLikeCommandLine): input_spec = HistogramMatchingInputSpec output_spec = HistogramMatchingOutputSpec _cmd = "HistogramMatching " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/filtering/imagelabelcombine.py b/nipype/interfaces/slicer/filtering/imagelabelcombine.py index 067a575045..666385845d 100644 --- a/nipype/interfaces/slicer/filtering/imagelabelcombine.py +++ b/nipype/interfaces/slicer/filtering/imagelabelcombine.py @@ -3,30 +3,44 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class ImageLabelCombineInputSpec(CommandLineInputSpec): InputLabelMap_A = File( - position=-3, desc="Label map image", exists=True, argstr="%s") + position=-3, desc="Label map image", exists=True, argstr="%s" + ) InputLabelMap_B = File( - position=-2, desc="Label map image", exists=True, argstr="%s") + position=-2, desc="Label map image", exists=True, argstr="%s" + ) OutputLabelMap = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Resulting Label map image", - argstr="%s") + argstr="%s", + ) first_overwrites = traits.Bool( desc="Use first or second label when both are present", - argstr="--first_overwrites ") + argstr="--first_overwrites ", + ) class ImageLabelCombineOutputSpec(TraitedSpec): - OutputLabelMap = File( - position=-1, desc="Resulting Label map image", exists=True) + OutputLabelMap = File(position=-1, desc="Resulting Label map image", exists=True) class ImageLabelCombine(SEMLikeCommandLine): @@ -47,4 +61,4 @@ class ImageLabelCombine(SEMLikeCommandLine): input_spec = ImageLabelCombineInputSpec output_spec = ImageLabelCombineOutputSpec _cmd = "ImageLabelCombine " - _outputs_filenames = {'OutputLabelMap': 'OutputLabelMap.nii'} + _outputs_filenames = {"OutputLabelMap": "OutputLabelMap.nii"} diff --git a/nipype/interfaces/slicer/filtering/morphology.py b/nipype/interfaces/slicer/filtering/morphology.py index 913c63d5ab..b33d3e9c6d 100644 --- a/nipype/interfaces/slicer/filtering/morphology.py +++ b/nipype/interfaces/slicer/filtering/morphology.py @@ -3,23 +3,33 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class GrayscaleGrindPeakImageFilterInputSpec(CommandLineInputSpec): inputVolume = File( - position=-2, - desc="Input volume to be filtered", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", - argstr="%s") + argstr="%s", + ) class GrayscaleGrindPeakImageFilterOutputSpec(TraitedSpec): @@ -56,22 +66,21 @@ class GrayscaleGrindPeakImageFilter(SEMLikeCommandLine): input_spec = GrayscaleGrindPeakImageFilterInputSpec output_spec = GrayscaleGrindPeakImageFilterOutputSpec _cmd = "GrayscaleGrindPeakImageFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class GrayscaleFillHoleImageFilterInputSpec(CommandLineInputSpec): inputVolume = File( - position=-2, - desc="Input volume to be filtered", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", - argstr="%s") + argstr="%s", + ) class GrayscaleFillHoleImageFilterOutputSpec(TraitedSpec): @@ -106,4 +115,4 @@ class GrayscaleFillHoleImageFilter(SEMLikeCommandLine): input_spec = GrayscaleFillHoleImageFilterInputSpec output_spec = GrayscaleFillHoleImageFilterOutputSpec _cmd = "GrayscaleFillHoleImageFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/filtering/n4itkbiasfieldcorrection.py b/nipype/interfaces/slicer/filtering/n4itkbiasfieldcorrection.py index 28f694f77e..b1243160b5 100644 --- a/nipype/interfaces/slicer/filtering/n4itkbiasfieldcorrection.py +++ b/nipype/interfaces/slicer/filtering/n4itkbiasfieldcorrection.py @@ -3,7 +3,18 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os @@ -11,60 +22,62 @@ class N4ITKBiasFieldCorrectionInputSpec(CommandLineInputSpec): inputimage = File( desc="Input image where you observe signal inhomegeneity", exists=True, - argstr="--inputimage %s") + argstr="--inputimage %s", + ) maskimage = File( - desc= - "Binary mask that defines the structure of your interest. NOTE: This parameter is OPTIONAL. If the mask is not specified, the module will use internally Otsu thresholding to define this mask. Better processing results can often be obtained when a meaningful mask is defined.", + desc="Binary mask that defines the structure of your interest. NOTE: This parameter is OPTIONAL. If the mask is not specified, the module will use internally Otsu thresholding to define this mask. Better processing results can often be obtained when a meaningful mask is defined.", exists=True, - argstr="--maskimage %s") + argstr="--maskimage %s", + ) outputimage = traits.Either( traits.Bool, File(), hash_files=False, desc="Result of processing", - argstr="--outputimage %s") + argstr="--outputimage %s", + ) outputbiasfield = traits.Either( traits.Bool, File(), hash_files=False, desc="Recovered bias field (OPTIONAL)", - argstr="--outputbiasfield %s") + argstr="--outputbiasfield %s", + ) iterations = InputMultiPath( traits.Int, - desc= - "Maximum number of iterations at each level of resolution. Larger values will increase execution time, but may lead to better results.", + desc="Maximum number of iterations at each level of resolution. Larger values will increase execution time, but may lead to better results.", sep=",", - argstr="--iterations %s") + argstr="--iterations %s", + ) convergencethreshold = traits.Float( - desc= - "Stopping criterion for the iterative bias estimation. Larger values will lead to smaller execution time.", - argstr="--convergencethreshold %f") + desc="Stopping criterion for the iterative bias estimation. Larger values will lead to smaller execution time.", + argstr="--convergencethreshold %f", + ) meshresolution = InputMultiPath( traits.Float, - desc= - "Resolution of the initial bspline grid defined as a sequence of three numbers. The actual resolution will be defined by adding the bspline order (default is 3) to the resolution in each dimension specified here. For example, 1,1,1 will result in a 4x4x4 grid of control points. This parameter may need to be adjusted based on your input image. In the multi-resolution N4 framework, the resolution of the bspline grid at subsequent iterations will be doubled. The number of resolutions is implicitly defined by Number of iterations parameter (the size of this list is the number of resolutions)", + desc="Resolution of the initial bspline grid defined as a sequence of three numbers. The actual resolution will be defined by adding the bspline order (default is 3) to the resolution in each dimension specified here. For example, 1,1,1 will result in a 4x4x4 grid of control points. This parameter may need to be adjusted based on your input image. In the multi-resolution N4 framework, the resolution of the bspline grid at subsequent iterations will be doubled. The number of resolutions is implicitly defined by Number of iterations parameter (the size of this list is the number of resolutions)", sep=",", - argstr="--meshresolution %s") + argstr="--meshresolution %s", + ) splinedistance = traits.Float( - desc= - "An alternative means to define the spline grid, by setting the distance between the control points. This parameter is used only if the grid resolution is not specified.", - argstr="--splinedistance %f") + desc="An alternative means to define the spline grid, by setting the distance between the control points. This parameter is used only if the grid resolution is not specified.", + argstr="--splinedistance %f", + ) shrinkfactor = traits.Int( - desc= - "Defines how much the image should be upsampled before estimating the inhomogeneity field. Increase if you want to reduce the execution time. 1 corresponds to the original resolution. Larger values will significantly reduce the computation time.", - argstr="--shrinkfactor %d") + desc="Defines how much the image should be upsampled before estimating the inhomogeneity field. Increase if you want to reduce the execution time. 1 corresponds to the original resolution. Larger values will significantly reduce the computation time.", + argstr="--shrinkfactor %d", + ) bsplineorder = traits.Int( - desc= - "Order of B-spline used in the approximation. Larger values will lead to longer execution times, may result in overfitting and poor result.", - argstr="--bsplineorder %d") - weightimage = File( - desc="Weight Image", exists=True, argstr="--weightimage %s") + desc="Order of B-spline used in the approximation. Larger values will lead to longer execution times, may result in overfitting and poor result.", + argstr="--bsplineorder %d", + ) + weightimage = File(desc="Weight Image", exists=True, argstr="--weightimage %s") histogramsharpening = InputMultiPath( traits.Float, - desc= - "A vector of up to three values. Non-zero values correspond to Bias Field Full Width at Half Maximum, Wiener filter noise, and Number of histogram bins.", + desc="A vector of up to three values. Non-zero values correspond to Bias Field Full Width at Half Maximum, Wiener filter noise, and Number of histogram bins.", sep=",", - argstr="--histogramsharpening %s") + argstr="--histogramsharpening %s", + ) class N4ITKBiasFieldCorrectionOutputSpec(TraitedSpec): @@ -93,6 +106,6 @@ class N4ITKBiasFieldCorrection(SEMLikeCommandLine): output_spec = N4ITKBiasFieldCorrectionOutputSpec _cmd = "N4ITKBiasFieldCorrection " _outputs_filenames = { - 'outputimage': 'outputimage.nii', - 'outputbiasfield': 'outputbiasfield.nii' + "outputimage": "outputimage.nii", + "outputbiasfield": "outputbiasfield.nii", } diff --git a/nipype/interfaces/slicer/filtering/resamplescalarvectordwivolume.py b/nipype/interfaces/slicer/filtering/resamplescalarvectordwivolume.py index 6205b76b54..c41827cb39 100644 --- a/nipype/interfaces/slicer/filtering/resamplescalarvectordwivolume.py +++ b/nipype/interfaces/slicer/filtering/resamplescalarvectordwivolume.py @@ -3,123 +3,139 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class ResampleScalarVectorDWIVolumeInputSpec(CommandLineInputSpec): inputVolume = File( - position=-2, - desc="Input Volume to be resampled", - exists=True, - argstr="%s") + position=-2, desc="Input Volume to be resampled", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Resampled Volume", - argstr="%s") + argstr="%s", + ) Reference = File( desc="Reference Volume (spacing,size,orientation,origin)", exists=True, - argstr="--Reference %s") + argstr="--Reference %s", + ) transformationFile = File(exists=True, argstr="--transformationFile %s") defField = File( - desc= - "File containing the deformation field (3D vector image containing vectors with 3 components)", + desc="File containing the deformation field (3D vector image containing vectors with 3 components)", exists=True, - argstr="--defField %s") + argstr="--defField %s", + ) hfieldtype = traits.Enum( "displacement", "h-Field", desc="Set if the deformation field is an h-Field", - argstr="--hfieldtype %s") + argstr="--hfieldtype %s", + ) interpolation = traits.Enum( "linear", "nn", "ws", "bs", - desc= - "Sampling algorithm (linear or nn (nearest neighborhoor), ws (WindowedSinc), bs (BSpline) )", - argstr="--interpolation %s") + desc="Sampling algorithm (linear or nn (nearest neighborhoor), ws (WindowedSinc), bs (BSpline) )", + argstr="--interpolation %s", + ) transform_order = traits.Enum( "input-to-output", "output-to-input", desc="Select in what order the transforms are read", - argstr="--transform_order %s") + argstr="--transform_order %s", + ) notbulk = traits.Bool( - desc= - "The transform following the BSpline transform is not set as a bulk transform for the BSpline transform", - argstr="--notbulk ") + desc="The transform following the BSpline transform is not set as a bulk transform for the BSpline transform", + argstr="--notbulk ", + ) spaceChange = traits.Bool( - desc= - "Space Orientation between transform and image is different (RAS/LPS) (warning: if the transform is a Transform Node in Slicer3, do not select)", - argstr="--spaceChange ") + desc="Space Orientation between transform and image is different (RAS/LPS) (warning: if the transform is a Transform Node in Slicer3, do not select)", + argstr="--spaceChange ", + ) rotation_point = traits.List( - desc= - "Rotation Point in case of rotation around a point (otherwise useless)", - argstr="--rotation_point %s") + desc="Rotation Point in case of rotation around a point (otherwise useless)", + argstr="--rotation_point %s", + ) centered_transform = traits.Bool( - desc= - "Set the center of the transformation to the center of the input image", - argstr="--centered_transform ") + desc="Set the center of the transformation to the center of the input image", + argstr="--centered_transform ", + ) image_center = traits.Enum( "input", "output", - desc= - "Image to use to center the transform (used only if \'Centered Transform\' is selected)", - argstr="--image_center %s") + desc="Image to use to center the transform (used only if 'Centered Transform' is selected)", + argstr="--image_center %s", + ) Inverse_ITK_Transformation = traits.Bool( - desc= - "Inverse the transformation before applying it from output image to input image", - argstr="--Inverse_ITK_Transformation ") + desc="Inverse the transformation before applying it from output image to input image", + argstr="--Inverse_ITK_Transformation ", + ) spacing = InputMultiPath( traits.Float, desc="Spacing along each dimension (0 means use input spacing)", sep=",", - argstr="--spacing %s") + argstr="--spacing %s", + ) size = InputMultiPath( traits.Float, desc="Size along each dimension (0 means use input size)", sep=",", - argstr="--size %s") - origin = traits.List( - desc="Origin of the output Image", argstr="--origin %s") + argstr="--size %s", + ) + origin = traits.List(desc="Origin of the output Image", argstr="--origin %s") direction_matrix = InputMultiPath( traits.Float, - desc= - "9 parameters of the direction matrix by rows (ijk to LPS if LPS transform, ijk to RAS if RAS transform)", + desc="9 parameters of the direction matrix by rows (ijk to LPS if LPS transform, ijk to RAS if RAS transform)", sep=",", - argstr="--direction_matrix %s") + argstr="--direction_matrix %s", + ) number_of_thread = traits.Int( desc="Number of thread used to compute the output image", - argstr="--number_of_thread %d") + argstr="--number_of_thread %d", + ) default_pixel_value = traits.Float( - desc= - "Default pixel value for samples falling outside of the input region", - argstr="--default_pixel_value %f") + desc="Default pixel value for samples falling outside of the input region", + argstr="--default_pixel_value %f", + ) window_function = traits.Enum( "h", "c", "w", "l", "b", - desc= - "Window Function , h = Hamming , c = Cosine , w = Welch , l = Lanczos , b = Blackman", - argstr="--window_function %s") + desc="Window Function , h = Hamming , c = Cosine , w = Welch , l = Lanczos , b = Blackman", + argstr="--window_function %s", + ) spline_order = traits.Int(desc="Spline Order", argstr="--spline_order %d") transform_matrix = InputMultiPath( traits.Float, - desc= - "12 parameters of the transform matrix by rows ( --last 3 being translation-- )", + desc="12 parameters of the transform matrix by rows ( --last 3 being translation-- )", sep=",", - argstr="--transform_matrix %s") + argstr="--transform_matrix %s", + ) transform = traits.Enum( "rt", "a", desc="Transform algorithm, rt = Rigid Transform, a = Affine Transform", - argstr="--transform %s") + argstr="--transform %s", + ) class ResampleScalarVectorDWIVolumeOutputSpec(TraitedSpec): @@ -150,4 +166,4 @@ class ResampleScalarVectorDWIVolume(SEMLikeCommandLine): input_spec = ResampleScalarVectorDWIVolumeInputSpec output_spec = ResampleScalarVectorDWIVolumeOutputSpec _cmd = "ResampleScalarVectorDWIVolume " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_AddScalarVolumes.py b/nipype/interfaces/slicer/filtering/tests/test_auto_AddScalarVolumes.py index cd05a91d05..3ea978e771 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_AddScalarVolumes.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_AddScalarVolumes.py @@ -4,38 +4,22 @@ def test_AddScalarVolumes_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume1=dict( - argstr='%s', - extensions=None, - position=-3, - ), - inputVolume2=dict( - argstr='%s', - extensions=None, - position=-2, - ), - order=dict(argstr='--order %s', ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume1=dict(argstr="%s", extensions=None, position=-3,), + inputVolume2=dict(argstr="%s", extensions=None, position=-2,), + order=dict(argstr="--order %s",), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), ) inputs = AddScalarVolumes.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AddScalarVolumes_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = AddScalarVolumes.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_CastScalarVolume.py b/nipype/interfaces/slicer/filtering/tests/test_auto_CastScalarVolume.py index 6ed35e0df7..e57c2a691b 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_CastScalarVolume.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_CastScalarVolume.py @@ -4,33 +4,21 @@ def test_CastScalarVolume_inputs(): input_map = dict( - InputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - OutputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - type=dict(argstr='--type %s', ), + InputVolume=dict(argstr="%s", extensions=None, position=-2,), + OutputVolume=dict(argstr="%s", hash_files=False, position=-1,), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + type=dict(argstr="--type %s",), ) inputs = CastScalarVolume.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CastScalarVolume_outputs(): - output_map = dict(OutputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(OutputVolume=dict(extensions=None, position=-1,),) outputs = CastScalarVolume.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_CheckerBoardFilter.py b/nipype/interfaces/slicer/filtering/tests/test_auto_CheckerBoardFilter.py index 9ad8894a35..564e2e14f8 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_CheckerBoardFilter.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_CheckerBoardFilter.py @@ -4,41 +4,22 @@ def test_CheckerBoardFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - checkerPattern=dict( - argstr='--checkerPattern %s', - sep=',', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume1=dict( - argstr='%s', - extensions=None, - position=-3, - ), - inputVolume2=dict( - argstr='%s', - extensions=None, - position=-2, - ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), + args=dict(argstr="%s",), + checkerPattern=dict(argstr="--checkerPattern %s", sep=",",), + environ=dict(nohash=True, usedefault=True,), + inputVolume1=dict(argstr="%s", extensions=None, position=-3,), + inputVolume2=dict(argstr="%s", extensions=None, position=-2,), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), ) inputs = CheckerBoardFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CheckerBoardFilter_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = CheckerBoardFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_CurvatureAnisotropicDiffusion.py b/nipype/interfaces/slicer/filtering/tests/test_auto_CurvatureAnisotropicDiffusion.py index 0dd2a4f946..189bd459a0 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_CurvatureAnisotropicDiffusion.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_CurvatureAnisotropicDiffusion.py @@ -4,35 +4,23 @@ def test_CurvatureAnisotropicDiffusion_inputs(): input_map = dict( - args=dict(argstr='%s', ), - conductance=dict(argstr='--conductance %f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - iterations=dict(argstr='--iterations %d', ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - timeStep=dict(argstr='--timeStep %f', ), + args=dict(argstr="%s",), + conductance=dict(argstr="--conductance %f",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="%s", extensions=None, position=-2,), + iterations=dict(argstr="--iterations %d",), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), + timeStep=dict(argstr="--timeStep %f",), ) inputs = CurvatureAnisotropicDiffusion.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CurvatureAnisotropicDiffusion_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = CurvatureAnisotropicDiffusion.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_ExtractSkeleton.py b/nipype/interfaces/slicer/filtering/tests/test_auto_ExtractSkeleton.py index f3b027c12e..93861cefa5 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_ExtractSkeleton.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_ExtractSkeleton.py @@ -4,36 +4,24 @@ def test_ExtractSkeleton_inputs(): input_map = dict( - InputImageFileName=dict( - argstr='%s', - extensions=None, - position=-2, - ), - OutputImageFileName=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - args=dict(argstr='%s', ), - dontPrune=dict(argstr='--dontPrune ', ), - environ=dict( - nohash=True, - usedefault=True, - ), - numPoints=dict(argstr='--numPoints %d', ), - pointsFile=dict(argstr='--pointsFile %s', ), - type=dict(argstr='--type %s', ), + InputImageFileName=dict(argstr="%s", extensions=None, position=-2,), + OutputImageFileName=dict(argstr="%s", hash_files=False, position=-1,), + args=dict(argstr="%s",), + dontPrune=dict(argstr="--dontPrune ",), + environ=dict(nohash=True, usedefault=True,), + numPoints=dict(argstr="--numPoints %d",), + pointsFile=dict(argstr="--pointsFile %s",), + type=dict(argstr="--type %s",), ) inputs = ExtractSkeleton.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ExtractSkeleton_outputs(): - output_map = dict(OutputImageFileName=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(OutputImageFileName=dict(extensions=None, position=-1,),) outputs = ExtractSkeleton.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_GaussianBlurImageFilter.py b/nipype/interfaces/slicer/filtering/tests/test_auto_GaussianBlurImageFilter.py index 5e01ce71cc..a0655cfb6e 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_GaussianBlurImageFilter.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_GaussianBlurImageFilter.py @@ -4,33 +4,21 @@ def test_GaussianBlurImageFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - sigma=dict(argstr='--sigma %f', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="%s", extensions=None, position=-2,), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), + sigma=dict(argstr="--sigma %f",), ) inputs = GaussianBlurImageFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GaussianBlurImageFilter_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = GaussianBlurImageFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_GradientAnisotropicDiffusion.py b/nipype/interfaces/slicer/filtering/tests/test_auto_GradientAnisotropicDiffusion.py index 3d803b9222..2b6e77d1d8 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_GradientAnisotropicDiffusion.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_GradientAnisotropicDiffusion.py @@ -4,35 +4,23 @@ def test_GradientAnisotropicDiffusion_inputs(): input_map = dict( - args=dict(argstr='%s', ), - conductance=dict(argstr='--conductance %f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - iterations=dict(argstr='--iterations %d', ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - timeStep=dict(argstr='--timeStep %f', ), + args=dict(argstr="%s",), + conductance=dict(argstr="--conductance %f",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="%s", extensions=None, position=-2,), + iterations=dict(argstr="--iterations %d",), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), + timeStep=dict(argstr="--timeStep %f",), ) inputs = GradientAnisotropicDiffusion.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GradientAnisotropicDiffusion_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = GradientAnisotropicDiffusion.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_GrayscaleFillHoleImageFilter.py b/nipype/interfaces/slicer/filtering/tests/test_auto_GrayscaleFillHoleImageFilter.py index 2d9bf34805..a12177d820 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_GrayscaleFillHoleImageFilter.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_GrayscaleFillHoleImageFilter.py @@ -4,32 +4,20 @@ def test_GrayscaleFillHoleImageFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="%s", extensions=None, position=-2,), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), ) inputs = GrayscaleFillHoleImageFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GrayscaleFillHoleImageFilter_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = GrayscaleFillHoleImageFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_GrayscaleGrindPeakImageFilter.py b/nipype/interfaces/slicer/filtering/tests/test_auto_GrayscaleGrindPeakImageFilter.py index ad1a04aff7..ab1c23f716 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_GrayscaleGrindPeakImageFilter.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_GrayscaleGrindPeakImageFilter.py @@ -4,32 +4,20 @@ def test_GrayscaleGrindPeakImageFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="%s", extensions=None, position=-2,), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), ) inputs = GrayscaleGrindPeakImageFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GrayscaleGrindPeakImageFilter_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = GrayscaleGrindPeakImageFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_HistogramMatching.py b/nipype/interfaces/slicer/filtering/tests/test_auto_HistogramMatching.py index 3ace435981..003ec4c8d8 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_HistogramMatching.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_HistogramMatching.py @@ -4,40 +4,24 @@ def test_HistogramMatching_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='%s', - extensions=None, - position=-3, - ), - numberOfHistogramLevels=dict(argstr='--numberOfHistogramLevels %d', ), - numberOfMatchPoints=dict(argstr='--numberOfMatchPoints %d', ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - referenceVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - threshold=dict(argstr='--threshold ', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="%s", extensions=None, position=-3,), + numberOfHistogramLevels=dict(argstr="--numberOfHistogramLevels %d",), + numberOfMatchPoints=dict(argstr="--numberOfMatchPoints %d",), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), + referenceVolume=dict(argstr="%s", extensions=None, position=-2,), + threshold=dict(argstr="--threshold ",), ) inputs = HistogramMatching.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_HistogramMatching_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = HistogramMatching.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_ImageLabelCombine.py b/nipype/interfaces/slicer/filtering/tests/test_auto_ImageLabelCombine.py index ec9a2bd0b2..8fb1596420 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_ImageLabelCombine.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_ImageLabelCombine.py @@ -4,38 +4,22 @@ def test_ImageLabelCombine_inputs(): input_map = dict( - InputLabelMap_A=dict( - argstr='%s', - extensions=None, - position=-3, - ), - InputLabelMap_B=dict( - argstr='%s', - extensions=None, - position=-2, - ), - OutputLabelMap=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - first_overwrites=dict(argstr='--first_overwrites ', ), + InputLabelMap_A=dict(argstr="%s", extensions=None, position=-3,), + InputLabelMap_B=dict(argstr="%s", extensions=None, position=-2,), + OutputLabelMap=dict(argstr="%s", hash_files=False, position=-1,), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + first_overwrites=dict(argstr="--first_overwrites ",), ) inputs = ImageLabelCombine.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ImageLabelCombine_outputs(): - output_map = dict(OutputLabelMap=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(OutputLabelMap=dict(extensions=None, position=-1,),) outputs = ImageLabelCombine.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_MaskScalarVolume.py b/nipype/interfaces/slicer/filtering/tests/test_auto_MaskScalarVolume.py index 17c20d18dc..fd8bf6aaf7 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_MaskScalarVolume.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_MaskScalarVolume.py @@ -4,39 +4,23 @@ def test_MaskScalarVolume_inputs(): input_map = dict( - InputVolume=dict( - argstr='%s', - extensions=None, - position=-3, - ), - MaskVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - OutputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - label=dict(argstr='--label %d', ), - replace=dict(argstr='--replace %d', ), + InputVolume=dict(argstr="%s", extensions=None, position=-3,), + MaskVolume=dict(argstr="%s", extensions=None, position=-2,), + OutputVolume=dict(argstr="%s", hash_files=False, position=-1,), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + label=dict(argstr="--label %d",), + replace=dict(argstr="--replace %d",), ) inputs = MaskScalarVolume.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MaskScalarVolume_outputs(): - output_map = dict(OutputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(OutputVolume=dict(extensions=None, position=-1,),) outputs = MaskScalarVolume.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_MedianImageFilter.py b/nipype/interfaces/slicer/filtering/tests/test_auto_MedianImageFilter.py index d02373d1c9..969a0dead6 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_MedianImageFilter.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_MedianImageFilter.py @@ -4,36 +4,21 @@ def test_MedianImageFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - neighborhood=dict( - argstr='--neighborhood %s', - sep=',', - ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="%s", extensions=None, position=-2,), + neighborhood=dict(argstr="--neighborhood %s", sep=",",), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), ) inputs = MedianImageFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MedianImageFilter_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = MedianImageFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_MultiplyScalarVolumes.py b/nipype/interfaces/slicer/filtering/tests/test_auto_MultiplyScalarVolumes.py index 95ac2f98a4..f6e521fe8e 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_MultiplyScalarVolumes.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_MultiplyScalarVolumes.py @@ -4,38 +4,22 @@ def test_MultiplyScalarVolumes_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume1=dict( - argstr='%s', - extensions=None, - position=-3, - ), - inputVolume2=dict( - argstr='%s', - extensions=None, - position=-2, - ), - order=dict(argstr='--order %s', ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume1=dict(argstr="%s", extensions=None, position=-3,), + inputVolume2=dict(argstr="%s", extensions=None, position=-2,), + order=dict(argstr="--order %s",), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), ) inputs = MultiplyScalarVolumes.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MultiplyScalarVolumes_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = MultiplyScalarVolumes.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_N4ITKBiasFieldCorrection.py b/nipype/interfaces/slicer/filtering/tests/test_auto_N4ITKBiasFieldCorrection.py index 0938eab33c..435b4d0f6a 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_N4ITKBiasFieldCorrection.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_N4ITKBiasFieldCorrection.py @@ -4,57 +4,31 @@ def test_N4ITKBiasFieldCorrection_inputs(): input_map = dict( - args=dict(argstr='%s', ), - bsplineorder=dict(argstr='--bsplineorder %d', ), - convergencethreshold=dict(argstr='--convergencethreshold %f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - histogramsharpening=dict( - argstr='--histogramsharpening %s', - sep=',', - ), - inputimage=dict( - argstr='--inputimage %s', - extensions=None, - ), - iterations=dict( - argstr='--iterations %s', - sep=',', - ), - maskimage=dict( - argstr='--maskimage %s', - extensions=None, - ), - meshresolution=dict( - argstr='--meshresolution %s', - sep=',', - ), - outputbiasfield=dict( - argstr='--outputbiasfield %s', - hash_files=False, - ), - outputimage=dict( - argstr='--outputimage %s', - hash_files=False, - ), - shrinkfactor=dict(argstr='--shrinkfactor %d', ), - splinedistance=dict(argstr='--splinedistance %f', ), - weightimage=dict( - argstr='--weightimage %s', - extensions=None, - ), + args=dict(argstr="%s",), + bsplineorder=dict(argstr="--bsplineorder %d",), + convergencethreshold=dict(argstr="--convergencethreshold %f",), + environ=dict(nohash=True, usedefault=True,), + histogramsharpening=dict(argstr="--histogramsharpening %s", sep=",",), + inputimage=dict(argstr="--inputimage %s", extensions=None,), + iterations=dict(argstr="--iterations %s", sep=",",), + maskimage=dict(argstr="--maskimage %s", extensions=None,), + meshresolution=dict(argstr="--meshresolution %s", sep=",",), + outputbiasfield=dict(argstr="--outputbiasfield %s", hash_files=False,), + outputimage=dict(argstr="--outputimage %s", hash_files=False,), + shrinkfactor=dict(argstr="--shrinkfactor %d",), + splinedistance=dict(argstr="--splinedistance %f",), + weightimage=dict(argstr="--weightimage %s", extensions=None,), ) inputs = N4ITKBiasFieldCorrection.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_N4ITKBiasFieldCorrection_outputs(): output_map = dict( - outputbiasfield=dict(extensions=None, ), - outputimage=dict(extensions=None, ), + outputbiasfield=dict(extensions=None,), outputimage=dict(extensions=None,), ) outputs = N4ITKBiasFieldCorrection.output_spec() diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_ResampleScalarVectorDWIVolume.py b/nipype/interfaces/slicer/filtering/tests/test_auto_ResampleScalarVectorDWIVolume.py index 2e95aaf01e..d1a28f3374 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_ResampleScalarVectorDWIVolume.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_ResampleScalarVectorDWIVolume.py @@ -4,76 +4,42 @@ def test_ResampleScalarVectorDWIVolume_inputs(): input_map = dict( - Inverse_ITK_Transformation=dict( - argstr='--Inverse_ITK_Transformation ', ), - Reference=dict( - argstr='--Reference %s', - extensions=None, - ), - args=dict(argstr='%s', ), - centered_transform=dict(argstr='--centered_transform ', ), - defField=dict( - argstr='--defField %s', - extensions=None, - ), - default_pixel_value=dict(argstr='--default_pixel_value %f', ), - direction_matrix=dict( - argstr='--direction_matrix %s', - sep=',', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - hfieldtype=dict(argstr='--hfieldtype %s', ), - image_center=dict(argstr='--image_center %s', ), - inputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - interpolation=dict(argstr='--interpolation %s', ), - notbulk=dict(argstr='--notbulk ', ), - number_of_thread=dict(argstr='--number_of_thread %d', ), - origin=dict(argstr='--origin %s', ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - rotation_point=dict(argstr='--rotation_point %s', ), - size=dict( - argstr='--size %s', - sep=',', - ), - spaceChange=dict(argstr='--spaceChange ', ), - spacing=dict( - argstr='--spacing %s', - sep=',', - ), - spline_order=dict(argstr='--spline_order %d', ), - transform=dict(argstr='--transform %s', ), - transform_matrix=dict( - argstr='--transform_matrix %s', - sep=',', - ), - transform_order=dict(argstr='--transform_order %s', ), - transformationFile=dict( - argstr='--transformationFile %s', - extensions=None, - ), - window_function=dict(argstr='--window_function %s', ), + Inverse_ITK_Transformation=dict(argstr="--Inverse_ITK_Transformation ",), + Reference=dict(argstr="--Reference %s", extensions=None,), + args=dict(argstr="%s",), + centered_transform=dict(argstr="--centered_transform ",), + defField=dict(argstr="--defField %s", extensions=None,), + default_pixel_value=dict(argstr="--default_pixel_value %f",), + direction_matrix=dict(argstr="--direction_matrix %s", sep=",",), + environ=dict(nohash=True, usedefault=True,), + hfieldtype=dict(argstr="--hfieldtype %s",), + image_center=dict(argstr="--image_center %s",), + inputVolume=dict(argstr="%s", extensions=None, position=-2,), + interpolation=dict(argstr="--interpolation %s",), + notbulk=dict(argstr="--notbulk ",), + number_of_thread=dict(argstr="--number_of_thread %d",), + origin=dict(argstr="--origin %s",), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), + rotation_point=dict(argstr="--rotation_point %s",), + size=dict(argstr="--size %s", sep=",",), + spaceChange=dict(argstr="--spaceChange ",), + spacing=dict(argstr="--spacing %s", sep=",",), + spline_order=dict(argstr="--spline_order %d",), + transform=dict(argstr="--transform %s",), + transform_matrix=dict(argstr="--transform_matrix %s", sep=",",), + transform_order=dict(argstr="--transform_order %s",), + transformationFile=dict(argstr="--transformationFile %s", extensions=None,), + window_function=dict(argstr="--window_function %s",), ) inputs = ResampleScalarVectorDWIVolume.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ResampleScalarVectorDWIVolume_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = ResampleScalarVectorDWIVolume.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_SubtractScalarVolumes.py b/nipype/interfaces/slicer/filtering/tests/test_auto_SubtractScalarVolumes.py index 70d8908ce0..6d8ae8ad73 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_SubtractScalarVolumes.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_SubtractScalarVolumes.py @@ -4,38 +4,22 @@ def test_SubtractScalarVolumes_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume1=dict( - argstr='%s', - extensions=None, - position=-3, - ), - inputVolume2=dict( - argstr='%s', - extensions=None, - position=-2, - ), - order=dict(argstr='--order %s', ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume1=dict(argstr="%s", extensions=None, position=-3,), + inputVolume2=dict(argstr="%s", extensions=None, position=-2,), + order=dict(argstr="--order %s",), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), ) inputs = SubtractScalarVolumes.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SubtractScalarVolumes_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = SubtractScalarVolumes.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_ThresholdScalarVolume.py b/nipype/interfaces/slicer/filtering/tests/test_auto_ThresholdScalarVolume.py index 51e7d1726d..eec13b435e 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_ThresholdScalarVolume.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_ThresholdScalarVolume.py @@ -4,37 +4,25 @@ def test_ThresholdScalarVolume_inputs(): input_map = dict( - InputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - OutputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - lower=dict(argstr='--lower %d', ), - outsidevalue=dict(argstr='--outsidevalue %d', ), - threshold=dict(argstr='--threshold %d', ), - thresholdtype=dict(argstr='--thresholdtype %s', ), - upper=dict(argstr='--upper %d', ), + InputVolume=dict(argstr="%s", extensions=None, position=-2,), + OutputVolume=dict(argstr="%s", hash_files=False, position=-1,), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + lower=dict(argstr="--lower %d",), + outsidevalue=dict(argstr="--outsidevalue %d",), + threshold=dict(argstr="--threshold %d",), + thresholdtype=dict(argstr="--thresholdtype %s",), + upper=dict(argstr="--upper %d",), ) inputs = ThresholdScalarVolume.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ThresholdScalarVolume_outputs(): - output_map = dict(OutputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(OutputVolume=dict(extensions=None, position=-1,),) outputs = ThresholdScalarVolume.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/tests/test_auto_VotingBinaryHoleFillingImageFilter.py b/nipype/interfaces/slicer/filtering/tests/test_auto_VotingBinaryHoleFillingImageFilter.py index 3ab237831e..d45159cc1b 100644 --- a/nipype/interfaces/slicer/filtering/tests/test_auto_VotingBinaryHoleFillingImageFilter.py +++ b/nipype/interfaces/slicer/filtering/tests/test_auto_VotingBinaryHoleFillingImageFilter.py @@ -4,39 +4,24 @@ def test_VotingBinaryHoleFillingImageFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - background=dict(argstr='--background %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - foreground=dict(argstr='--foreground %d', ), - inputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - majorityThreshold=dict(argstr='--majorityThreshold %d', ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - radius=dict( - argstr='--radius %s', - sep=',', - ), + args=dict(argstr="%s",), + background=dict(argstr="--background %d",), + environ=dict(nohash=True, usedefault=True,), + foreground=dict(argstr="--foreground %d",), + inputVolume=dict(argstr="%s", extensions=None, position=-2,), + majorityThreshold=dict(argstr="--majorityThreshold %d",), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), + radius=dict(argstr="--radius %s", sep=",",), ) inputs = VotingBinaryHoleFillingImageFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_VotingBinaryHoleFillingImageFilter_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = VotingBinaryHoleFillingImageFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/filtering/thresholdscalarvolume.py b/nipype/interfaces/slicer/filtering/thresholdscalarvolume.py index 041ce10990..e72284456b 100644 --- a/nipype/interfaces/slicer/filtering/thresholdscalarvolume.py +++ b/nipype/interfaces/slicer/filtering/thresholdscalarvolume.py @@ -3,39 +3,49 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class ThresholdScalarVolumeInputSpec(CommandLineInputSpec): - InputVolume = File( - position=-2, desc="Input volume", exists=True, argstr="%s") + InputVolume = File(position=-2, desc="Input volume", exists=True, argstr="%s") OutputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Thresholded input volume", - argstr="%s") + argstr="%s", + ) threshold = traits.Int(desc="Threshold value", argstr="--threshold %d") lower = traits.Int(desc="Lower threshold value", argstr="--lower %d") upper = traits.Int(desc="Upper threshold value", argstr="--upper %d") outsidevalue = traits.Int( - desc= - "Set the voxels to this value if they fall outside the threshold range", - argstr="--outsidevalue %d") + desc="Set the voxels to this value if they fall outside the threshold range", + argstr="--outsidevalue %d", + ) thresholdtype = traits.Enum( "Below", "Above", "Outside", - desc= - "What kind of threshold to perform. If Outside is selected, uses Upper and Lower values. If Below is selected, uses the ThresholdValue, if Above is selected, uses the ThresholdValue.", - argstr="--thresholdtype %s") + desc="What kind of threshold to perform. If Outside is selected, uses Upper and Lower values. If Below is selected, uses the ThresholdValue, if Above is selected, uses the ThresholdValue.", + argstr="--thresholdtype %s", + ) class ThresholdScalarVolumeOutputSpec(TraitedSpec): - OutputVolume = File( - position=-1, desc="Thresholded input volume", exists=True) + OutputVolume = File(position=-1, desc="Thresholded input volume", exists=True) class ThresholdScalarVolume(SEMLikeCommandLine): @@ -58,4 +68,4 @@ class ThresholdScalarVolume(SEMLikeCommandLine): input_spec = ThresholdScalarVolumeInputSpec output_spec = ThresholdScalarVolumeOutputSpec _cmd = "ThresholdScalarVolume " - _outputs_filenames = {'OutputVolume': 'OutputVolume.nii'} + _outputs_filenames = {"OutputVolume": "OutputVolume.nii"} diff --git a/nipype/interfaces/slicer/filtering/votingbinaryholefillingimagefilter.py b/nipype/interfaces/slicer/filtering/votingbinaryholefillingimagefilter.py index 9c19799d04..2ed3736d1b 100644 --- a/nipype/interfaces/slicer/filtering/votingbinaryholefillingimagefilter.py +++ b/nipype/interfaces/slicer/filtering/votingbinaryholefillingimagefilter.py @@ -3,7 +3,18 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os @@ -12,29 +23,31 @@ class VotingBinaryHoleFillingImageFilterInputSpec(CommandLineInputSpec): traits.Int, desc="The radius of a hole to be filled", sep=",", - argstr="--radius %s") + argstr="--radius %s", + ) majorityThreshold = traits.Int( - desc= - "The number of pixels over 50% that will decide whether an OFF pixel will become ON or not. For example, if the neighborhood of a pixel has 124 pixels (excluding itself), the 50% will be 62, and if you set a Majority threshold of 5, that means that the filter will require 67 or more neighbor pixels to be ON in order to switch the current OFF pixel to ON.", - argstr="--majorityThreshold %d") + desc="The number of pixels over 50% that will decide whether an OFF pixel will become ON or not. For example, if the neighborhood of a pixel has 124 pixels (excluding itself), the 50% will be 62, and if you set a Majority threshold of 5, that means that the filter will require 67 or more neighbor pixels to be ON in order to switch the current OFF pixel to ON.", + argstr="--majorityThreshold %d", + ) background = traits.Int( desc="The value associated with the background (not object)", - argstr="--background %d") + argstr="--background %d", + ) foreground = traits.Int( desc="The value associated with the foreground (object)", - argstr="--foreground %d") + argstr="--foreground %d", + ) inputVolume = File( - position=-2, - desc="Input volume to be filtered", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", - argstr="%s") + argstr="%s", + ) class VotingBinaryHoleFillingImageFilterOutputSpec(TraitedSpec): @@ -61,4 +74,4 @@ class VotingBinaryHoleFillingImageFilter(SEMLikeCommandLine): input_spec = VotingBinaryHoleFillingImageFilterInputSpec output_spec = VotingBinaryHoleFillingImageFilterOutputSpec _cmd = "VotingBinaryHoleFillingImageFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/generate_classes.py b/nipype/interfaces/slicer/generate_classes.py index 371c957acd..54eeb0b089 100644 --- a/nipype/interfaces/slicer/generate_classes.py +++ b/nipype/interfaces/slicer/generate_classes.py @@ -9,7 +9,10 @@ from shutil import rmtree import keyword -python_keywords = keyword.kwlist # If c++ SEM module uses one of these key words as a command line parameter, we need to modify variable + +python_keywords = ( + keyword.kwlist +) # If c++ SEM module uses one of these key words as a command line parameter, we need to modify variable def force_to_valid_python_variable_name(old_name): @@ -24,18 +27,19 @@ def force_to_valid_python_variable_name(old_name): new_name = old_name new_name = new_name.lstrip().rstrip() if old_name in python_keywords: - new_name = 'opt_' + old_name + new_name = "opt_" + old_name return new_name def add_class_to_package(class_codes, class_names, module_name, package_dir): module_python_filename = os.path.join(package_dir, "%s.py" % module_name) - f_m = open(module_python_filename, 'w') - f_i = open(os.path.join(package_dir, "__init__.py"), 'a+') - f_m.write("""# -*- coding: utf-8 -*- + f_m = open(module_python_filename, "w") + f_i = open(os.path.join(package_dir, "__init__.py"), "a+") + f_m.write( + """# -*- coding: utf-8 -*- \"\"\"Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.\"\"\"\n\n""" - ) + ) imports = """\ from ..base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath) @@ -54,21 +58,19 @@ def crawl_code_struct(code_struct, package_dir): module_name = k.lower() class_name = k class_code = v - add_class_to_package([class_code], [class_name], module_name, - package_dir) + add_class_to_package([class_code], [class_name], module_name, package_dir) else: l1 = {} l2 = {} for key in list(v.keys()): - if (isinstance(v[key], str) - or isinstance(v[key], (str, bytes))): + if isinstance(v[key], str) or isinstance(v[key], (str, bytes)): l1[key] = v[key] else: l2[key] = v[key] if l2: v = l2 subpackages.append(k.lower()) - f_i = open(os.path.join(package_dir, "__init__.py"), 'a+') + f_i = open(os.path.join(package_dir, "__init__.py"), "a+") f_i.write("from %s import *\n" % k.lower()) f_i.close() new_pkg_dir = os.path.join(package_dir, k.lower()) @@ -83,9 +85,10 @@ def crawl_code_struct(code_struct, package_dir): v = l1 module_name = k.lower() add_class_to_package( - list(v.values()), list(v.keys()), module_name, package_dir) + list(v.values()), list(v.keys()), module_name, package_dir + ) if subpackages: - f = open(os.path.join(package_dir, "setup.py"), 'w') + f = open(os.path.join(package_dir, "setup.py"), "w") f.write( """# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: @@ -101,17 +104,22 @@ def configuration(parent_package='',top_path=None): if __name__ == '__main__': from numpy.distutils.core import setup setup(**configuration(top_path='').todict()) -""".format(pkg_name=package_dir.split("/")[-1], - sub_pks="\n ".join([ - "config.add_data_dir('%s')" % sub_pkg for sub_pkg in subpackages - ]))) +""".format( + pkg_name=package_dir.split("/")[-1], + sub_pks="\n ".join( + [ + "config.add_data_dir('%s')" % sub_pkg + for sub_pkg in subpackages + ] + ), + ) + ) f.close() -def generate_all_classes(modules_list=[], - launcher=[], - redirect_x=False, - mipav_hacks=False): +def generate_all_classes( + modules_list=[], launcher=[], redirect_x=False, mipav_hacks=False +): """ modules_list contains all the SEM compliant tools that should have wrappers created for them. launcher containtains the command line prefix wrapper arugments needed to prepare a proper environment for each of the modules. @@ -122,7 +130,8 @@ def generate_all_classes(modules_list=[], print("Generating Definition for module {0}".format(module)) print("^" * 80) package, code, module = generate_class( - module, launcher, redirect_x=redirect_x, mipav_hacks=mipav_hacks) + module, launcher, redirect_x=redirect_x, mipav_hacks=mipav_hacks + ) cur_package = all_code module_name = package.strip().split(" ")[0].split(".")[-1] for package in package.strip().split(" ")[0].split(".")[:-1]: @@ -137,11 +146,9 @@ def generate_all_classes(modules_list=[], crawl_code_struct(all_code, os.getcwd()) -def generate_class(module, - launcher, - strip_module_name_prefix=True, - redirect_x=False, - mipav_hacks=False): +def generate_class( + module, launcher, strip_module_name_prefix=True, redirect_x=False, mipav_hacks=False +): dom = grab_xml(module, launcher, mipav_hacks=mipav_hacks) if strip_module_name_prefix: module_name = module.split(".")[-1] @@ -153,32 +160,39 @@ def generate_class(module, # self._outputs_nodes = [] - class_string = "\"\"\"" + class_string = '"""' for desc_str in [ - 'title', 'category', 'description', 'version', 'documentation-url', - 'license', 'contributor', 'acknowledgements' + "title", + "category", + "description", + "version", + "documentation-url", + "license", + "contributor", + "acknowledgements", ]: el = dom.getElementsByTagName(desc_str) if el and el[0].firstChild and el[0].firstChild.nodeValue.strip(): - class_string += desc_str + ": " + el[0].firstChild.nodeValue.strip( - ) + "\n\n" - if desc_str == 'category': + class_string += ( + desc_str + ": " + el[0].firstChild.nodeValue.strip() + "\n\n" + ) + if desc_str == "category": category = el[0].firstChild.nodeValue.strip() - class_string += "\"\"\"" + class_string += '"""' for paramGroup in dom.getElementsByTagName("parameters"): - indices = paramGroup.getElementsByTagName('index') + indices = paramGroup.getElementsByTagName("index") max_index = 0 for index in indices: if int(index.firstChild.nodeValue) > max_index: max_index = int(index.firstChild.nodeValue) for param in paramGroup.childNodes: - if param.nodeName in ['label', 'description', '#text', '#comment']: + if param.nodeName in ["label", "description", "#text", "#comment"]: continue traitsParams = {} - longFlagNode = param.getElementsByTagName('longflag') + longFlagNode = param.getElementsByTagName("longflag") if longFlagNode: # Prefer to use longFlag as name if it is given, rather than the parameter name longFlagName = longFlagNode[0].firstChild.nodeValue @@ -192,102 +206,113 @@ def generate_class(module, name = force_to_valid_python_variable_name(name) traitsParams["argstr"] = "--" + longFlagName + " " else: - name = param.getElementsByTagName('name')[ - 0].firstChild.nodeValue + name = param.getElementsByTagName("name")[0].firstChild.nodeValue name = force_to_valid_python_variable_name(name) - if param.getElementsByTagName('index'): + if param.getElementsByTagName("index"): traitsParams["argstr"] = "" else: traitsParams["argstr"] = "--" + name + " " - if param.getElementsByTagName( - 'description') and param.getElementsByTagName( - 'description')[0].firstChild: - traitsParams["desc"] = param.getElementsByTagName( - 'description')[0].firstChild.nodeValue.replace( - '"', "\\\"").replace("\n", ", ") + if ( + param.getElementsByTagName("description") + and param.getElementsByTagName("description")[0].firstChild + ): + traitsParams["desc"] = ( + param.getElementsByTagName("description")[0] + .firstChild.nodeValue.replace('"', '\\"') + .replace("\n", ", ") + ) argsDict = { - 'directory': '%s', - 'file': '%s', - 'integer': "%d", - 'double': "%f", - 'float': "%f", - 'image': "%s", - 'transform': "%s", - 'boolean': '', - 'string-enumeration': '%s', - 'string': "%s", - 'integer-enumeration': '%s', - 'table': '%s', - 'point': '%s', - 'region': '%s', - 'geometry': '%s' + "directory": "%s", + "file": "%s", + "integer": "%d", + "double": "%f", + "float": "%f", + "image": "%s", + "transform": "%s", + "boolean": "", + "string-enumeration": "%s", + "string": "%s", + "integer-enumeration": "%s", + "table": "%s", + "point": "%s", + "region": "%s", + "geometry": "%s", } - if param.nodeName.endswith('-vector'): + if param.nodeName.endswith("-vector"): traitsParams["argstr"] += "%s" else: traitsParams["argstr"] += argsDict[param.nodeName] - index = param.getElementsByTagName('index') + index = param.getElementsByTagName("index") if index: - traitsParams["position"] = int( - index[0].firstChild.nodeValue) - (max_index + 1) + traitsParams["position"] = int(index[0].firstChild.nodeValue) - ( + max_index + 1 + ) - desc = param.getElementsByTagName('description') + desc = param.getElementsByTagName("description") if index: traitsParams["desc"] = desc[0].firstChild.nodeValue typesDict = { - 'integer': "traits.Int", - 'double': "traits.Float", - 'float': "traits.Float", - 'image': "File", - 'transform': "File", - 'boolean': "traits.Bool", - 'string': "traits.Str", - 'file': "File", - 'geometry': "File", - 'directory': "Directory", - 'table': "File", - 'point': "traits.List", - 'region': "traits.List" + "integer": "traits.Int", + "double": "traits.Float", + "float": "traits.Float", + "image": "File", + "transform": "File", + "boolean": "traits.Bool", + "string": "traits.Str", + "file": "File", + "geometry": "File", + "directory": "Directory", + "table": "File", + "point": "traits.List", + "region": "traits.List", } - if param.nodeName.endswith('-enumeration'): + if param.nodeName.endswith("-enumeration"): type = "traits.Enum" values = [ - '"%s"' % str(el.firstChild.nodeValue).replace('"', '') - for el in param.getElementsByTagName('element') + '"%s"' % str(el.firstChild.nodeValue).replace('"', "") + for el in param.getElementsByTagName("element") ] - elif param.nodeName.endswith('-vector'): + elif param.nodeName.endswith("-vector"): type = "InputMultiPath" if param.nodeName in [ - 'file', 'directory', 'image', 'geometry', 'transform', - 'table' + "file", + "directory", + "image", + "geometry", + "transform", + "table", ]: values = [ - "%s(exists=True)" % typesDict[param.nodeName.replace( - '-vector', '')] + "%s(exists=True)" + % typesDict[param.nodeName.replace("-vector", "")] ] else: - values = [typesDict[param.nodeName.replace('-vector', '')]] + values = [typesDict[param.nodeName.replace("-vector", "")]] if mipav_hacks is True: traitsParams["sep"] = ";" else: - traitsParams["sep"] = ',' - elif param.getAttribute('multiple') == "true": + traitsParams["sep"] = "," + elif param.getAttribute("multiple") == "true": type = "InputMultiPath" if param.nodeName in [ - 'file', 'directory', 'image', 'geometry', 'transform', - 'table' + "file", + "directory", + "image", + "geometry", + "transform", + "table", ]: values = ["%s(exists=True)" % typesDict[param.nodeName]] - elif param.nodeName in ['point', 'region']: + elif param.nodeName in ["point", "region"]: values = [ - "%s(traits.Float(), minlen=3, maxlen=3)" % - typesDict[param.nodeName] + "%s(traits.Float(), minlen=3, maxlen=3)" + % typesDict[param.nodeName] ] else: values = [typesDict[param.nodeName]] @@ -297,60 +322,85 @@ def generate_class(module, type = typesDict[param.nodeName] if param.nodeName in [ - 'file', 'directory', 'image', 'geometry', 'transform', - 'table' + "file", + "directory", + "image", + "geometry", + "transform", + "table", ]: - if not param.getElementsByTagName('channel'): + if not param.getElementsByTagName("channel"): raise RuntimeError( - "Insufficient XML specification: each element of type 'file', 'directory', 'image', 'geometry', 'transform', or 'table' requires 'channel' field.\n{0}". - format(traitsParams)) - elif param.getElementsByTagName('channel')[ - 0].firstChild.nodeValue == 'output': + "Insufficient XML specification: each element of type 'file', 'directory', 'image', 'geometry', 'transform', or 'table' requires 'channel' field.\n{0}".format( + traitsParams + ) + ) + elif ( + param.getElementsByTagName("channel")[0].firstChild.nodeValue + == "output" + ): traitsParams["hash_files"] = False inputTraits.append( - "%s = traits.Either(traits.Bool, %s(%s), %s)" % - (name, type, - parse_values(values).replace("exists=True", ""), - parse_params(traitsParams))) + "%s = traits.Either(traits.Bool, %s(%s), %s)" + % ( + name, + type, + parse_values(values).replace("exists=True", ""), + parse_params(traitsParams), + ) + ) traitsParams["exists"] = True traitsParams.pop("argstr") traitsParams.pop("hash_files") - outputTraits.append("%s = %s(%s%s)" % - (name, type.replace("Input", "Output"), - parse_values(values), - parse_params(traitsParams))) - - outputs_filenames[name] = gen_filename_from_param( - param, name) - elif param.getElementsByTagName('channel')[ - 0].firstChild.nodeValue == 'input': + outputTraits.append( + "%s = %s(%s%s)" + % ( + name, + type.replace("Input", "Output"), + parse_values(values), + parse_params(traitsParams), + ) + ) + + outputs_filenames[name] = gen_filename_from_param(param, name) + elif ( + param.getElementsByTagName("channel")[0].firstChild.nodeValue + == "input" + ): if param.nodeName in [ - 'file', 'directory', 'image', 'geometry', - 'transform', 'table' + "file", + "directory", + "image", + "geometry", + "transform", + "table", ] and type not in ["InputMultiPath", "traits.List"]: traitsParams["exists"] = True - inputTraits.append("%s = %s(%s%s)" % - (name, type, parse_values(values), - parse_params(traitsParams))) + inputTraits.append( + "%s = %s(%s%s)" + % (name, type, parse_values(values), parse_params(traitsParams)) + ) else: raise RuntimeError( - "Insufficient XML specification: each element of type 'file', 'directory', 'image', 'geometry', 'transform', or 'table' requires 'channel' field to be in ['input','output'].\n{0}". - format(traitsParams)) + "Insufficient XML specification: each element of type 'file', 'directory', 'image', 'geometry', 'transform', or 'table' requires 'channel' field to be in ['input','output'].\n{0}".format( + traitsParams + ) + ) else: # For all other parameter types, they are implicitly only input types - inputTraits.append("%s = %s(%s%s)" % - (name, type, parse_values(values), - parse_params(traitsParams))) + inputTraits.append( + "%s = %s(%s%s)" + % (name, type, parse_values(values), parse_params(traitsParams)) + ) if mipav_hacks: blacklisted_inputs = ["maxMemoryUsage"] inputTraits = [ - trait for trait in inputTraits - if trait.split()[0] not in blacklisted_inputs + trait for trait in inputTraits if trait.split()[0] not in blacklisted_inputs ] compulsory_inputs = [ 'xDefaultMem = traits.Int(desc="Set default maximum heap size", argstr="-xDefaultMem %d")', - 'xMaxProcess = traits.Int(1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True)' + 'xMaxProcess = traits.Int(1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True)', ] inputTraits += compulsory_inputs @@ -366,9 +416,9 @@ def generate_class(module, output_spec_code += " " + trait + "\n" output_filenames_code = "_outputs_filenames = {" - output_filenames_code += ",".join([ - "'%s':'%s'" % (key, value) for key, value in outputs_filenames.items() - ]) + output_filenames_code += ",".join( + ["'%s':'%s'" % (key, value) for key, value in outputs_filenames.items()] + ) output_filenames_code += "}" input_spec_code += "\n\n" @@ -383,10 +433,13 @@ def generate_class(module, %output_filenames_code%\n""" template += " _redirect_x = {0}\n".format(str(redirect_x)) - main_class = template.replace('%class_str%', class_string).replace( - "%module_name%", module_name).replace("%name%", module).replace( - "%output_filenames_code%", output_filenames_code).replace( - "%launcher%", " ".join(launcher)) + main_class = ( + template.replace("%class_str%", class_string) + .replace("%module_name%", module_name) + .replace("%name%", module) + .replace("%output_filenames_code%", output_filenames_code) + .replace("%launcher%", " ".join(launcher)) + ) return category, input_spec_code + output_spec_code + main_class, module_name @@ -398,7 +451,8 @@ def grab_xml(module, launcher, mipav_hacks=False): command_list.extend([module, "--xml"]) final_command = " ".join(command_list) xmlReturnValue = subprocess.Popen( - final_command, stdout=subprocess.PIPE, shell=True).communicate()[0] + final_command, stdout=subprocess.PIPE, shell=True + ).communicate()[0] if mipav_hacks: # workaround for a jist bug https://www.nitrc.org/tracker/index.php?func=detail&aid=7234&group_id=228&atid=942 new_xml = "" @@ -418,10 +472,10 @@ def grab_xml(module, launcher, mipav_hacks=False): # workaround for a JIST bug https://www.nitrc.org/tracker/index.php?func=detail&aid=7233&group_id=228&atid=942 if xmlReturnValue.strip().endswith("XML"): xmlReturnValue = xmlReturnValue.strip()[:-3] - if xmlReturnValue.strip().startswith( - "Error: Unable to set default atlas"): - xmlReturnValue = xmlReturnValue.strip()[len( - "Error: Unable to set default atlas"):] + if xmlReturnValue.strip().startswith("Error: Unable to set default atlas"): + xmlReturnValue = xmlReturnValue.strip()[ + len("Error: Unable to set default atlas") : + ] try: dom = xml.dom.minidom.parseString(xmlReturnValue.strip()) except Exception as e: @@ -442,13 +496,13 @@ def parse_params(params): if isinstance(value, (str, bytes)): list.append('%s="%s"' % (key, value.replace('"', "'"))) else: - list.append('%s=%s' % (key, value)) + list.append("%s=%s" % (key, value)) return ", ".join(list) def parse_values(values): - values = ['%s' % value for value in values] + values = ["%s" % value for value in values] if len(values) > 0: retstr = ", ".join(values) + ", " else: @@ -461,15 +515,15 @@ def gen_filename_from_param(param, base): if fileExtensions: # It is possible that multiple file extensions can be specified in a # comma separated list, This will extract just the first extension - firstFileExtension = fileExtensions.split(',')[0] + firstFileExtension = fileExtensions.split(",")[0] ext = firstFileExtension else: ext = { - 'image': '.nii', - 'transform': '.mat', - 'file': '', - 'directory': '', - 'geometry': '.vtk' + "image": ".nii", + "transform": ".mat", + "file": "", + "directory": "", + "geometry": ".vtk", }[param.nodeName] return base + ext @@ -479,68 +533,68 @@ def gen_filename_from_param(param, base): # every tool in the modules list must be found on the default path # AND calling the module with --xml must be supported and compliant. modules_list = [ - 'MedianImageFilter', - 'CheckerBoardFilter', - 'EMSegmentCommandLine', - 'GrayscaleFillHoleImageFilter', + "MedianImageFilter", + "CheckerBoardFilter", + "EMSegmentCommandLine", + "GrayscaleFillHoleImageFilter", # 'CreateDICOMSeries', #missing channel - 'TractographyLabelMapSeeding', - 'IntensityDifferenceMetric', - 'DWIToDTIEstimation', - 'MaskScalarVolume', - 'ImageLabelCombine', - 'DTIimport', - 'OtsuThresholdImageFilter', - 'ExpertAutomatedRegistration', - 'ThresholdScalarVolume', - 'DWIUnbiasedNonLocalMeansFilter', - 'BRAINSFit', - 'MergeModels', - 'ResampleDTIVolume', - 'MultiplyScalarVolumes', - 'LabelMapSmoothing', - 'RigidRegistration', - 'VotingBinaryHoleFillingImageFilter', - 'BRAINSROIAuto', - 'RobustStatisticsSegmenter', - 'GradientAnisotropicDiffusion', - 'ProbeVolumeWithModel', - 'ModelMaker', - 'ExtractSkeleton', - 'GrayscaleGrindPeakImageFilter', - 'N4ITKBiasFieldCorrection', - 'BRAINSResample', - 'DTIexport', - 'VBRAINSDemonWarp', - 'ResampleScalarVectorDWIVolume', - 'ResampleScalarVolume', - 'OtsuThresholdSegmentation', + "TractographyLabelMapSeeding", + "IntensityDifferenceMetric", + "DWIToDTIEstimation", + "MaskScalarVolume", + "ImageLabelCombine", + "DTIimport", + "OtsuThresholdImageFilter", + "ExpertAutomatedRegistration", + "ThresholdScalarVolume", + "DWIUnbiasedNonLocalMeansFilter", + "BRAINSFit", + "MergeModels", + "ResampleDTIVolume", + "MultiplyScalarVolumes", + "LabelMapSmoothing", + "RigidRegistration", + "VotingBinaryHoleFillingImageFilter", + "BRAINSROIAuto", + "RobustStatisticsSegmenter", + "GradientAnisotropicDiffusion", + "ProbeVolumeWithModel", + "ModelMaker", + "ExtractSkeleton", + "GrayscaleGrindPeakImageFilter", + "N4ITKBiasFieldCorrection", + "BRAINSResample", + "DTIexport", + "VBRAINSDemonWarp", + "ResampleScalarVectorDWIVolume", + "ResampleScalarVolume", + "OtsuThresholdSegmentation", # 'ExecutionModelTour', - 'HistogramMatching', - 'BRAINSDemonWarp', - 'ModelToLabelMap', - 'GaussianBlurImageFilter', - 'DiffusionWeightedVolumeMasking', - 'GrayscaleModelMaker', - 'CastScalarVolume', - 'DicomToNrrdConverter', - 'AffineRegistration', - 'AddScalarVolumes', - 'LinearRegistration', - 'SimpleRegionGrowingSegmentation', - 'DWIJointRicianLMMSEFilter', - 'MultiResolutionAffineRegistration', - 'SubtractScalarVolumes', - 'DWIRicianLMMSEFilter', - 'OrientScalarVolume', - 'FiducialRegistration', - 'BSplineDeformableRegistration', - 'CurvatureAnisotropicDiffusion', - 'PETStandardUptakeValueComputation', - 'DiffusionTensorScalarMeasurements', - 'ACPCTransform', - 'EMSegmentTransformToNewFormat', - 'BSplineToDeformationField' + "HistogramMatching", + "BRAINSDemonWarp", + "ModelToLabelMap", + "GaussianBlurImageFilter", + "DiffusionWeightedVolumeMasking", + "GrayscaleModelMaker", + "CastScalarVolume", + "DicomToNrrdConverter", + "AffineRegistration", + "AddScalarVolumes", + "LinearRegistration", + "SimpleRegionGrowingSegmentation", + "DWIJointRicianLMMSEFilter", + "MultiResolutionAffineRegistration", + "SubtractScalarVolumes", + "DWIRicianLMMSEFilter", + "OrientScalarVolume", + "FiducialRegistration", + "BSplineDeformableRegistration", + "CurvatureAnisotropicDiffusion", + "PETStandardUptakeValueComputation", + "DiffusionTensorScalarMeasurements", + "ACPCTransform", + "EMSegmentTransformToNewFormat", + "BSplineToDeformationField", ] # SlicerExecutionModel compliant tools that are usually statically built, and don't need the Slicer3 --launcher diff --git a/nipype/interfaces/slicer/legacy/__init__.py b/nipype/interfaces/slicer/legacy/__init__.py index 92cbc1ff73..f65d44f058 100644 --- a/nipype/interfaces/slicer/legacy/__init__.py +++ b/nipype/interfaces/slicer/legacy/__init__.py @@ -3,7 +3,11 @@ from .segmentation import OtsuThresholdSegmentation from .filtering import OtsuThresholdImageFilter, ResampleScalarVolume from .converters import BSplineToDeformationField -from .registration import (BSplineDeformableRegistration, AffineRegistration, - MultiResolutionAffineRegistration, - RigidRegistration, LinearRegistration, - ExpertAutomatedRegistration) +from .registration import ( + BSplineDeformableRegistration, + AffineRegistration, + MultiResolutionAffineRegistration, + RigidRegistration, + LinearRegistration, + ExpertAutomatedRegistration, +) diff --git a/nipype/interfaces/slicer/legacy/converters.py b/nipype/interfaces/slicer/legacy/converters.py index f5af1ad29b..aadd840d71 100644 --- a/nipype/interfaces/slicer/legacy/converters.py +++ b/nipype/interfaces/slicer/legacy/converters.py @@ -3,7 +3,18 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os @@ -11,7 +22,8 @@ class BSplineToDeformationFieldInputSpec(CommandLineInputSpec): tfm = File(exists=True, argstr="--tfm %s") refImage = File(exists=True, argstr="--refImage %s") defImage = traits.Either( - traits.Bool, File(), hash_files=False, argstr="--defImage %s") + traits.Bool, File(), hash_files=False, argstr="--defImage %s" + ) class BSplineToDeformationFieldOutputSpec(TraitedSpec): @@ -38,4 +50,4 @@ class BSplineToDeformationField(SEMLikeCommandLine): input_spec = BSplineToDeformationFieldInputSpec output_spec = BSplineToDeformationFieldOutputSpec _cmd = "BSplineToDeformationField " - _outputs_filenames = {'defImage': 'defImage.nii'} + _outputs_filenames = {"defImage": "defImage.nii"} diff --git a/nipype/interfaces/slicer/legacy/diffusion/denoising.py b/nipype/interfaces/slicer/legacy/diffusion/denoising.py index 0cc8cce0f6..0cde8fe64e 100644 --- a/nipype/interfaces/slicer/legacy/diffusion/denoising.py +++ b/nipype/interfaces/slicer/legacy/diffusion/denoising.py @@ -3,46 +3,57 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class DWIUnbiasedNonLocalMeansFilterInputSpec(CommandLineInputSpec): rs = InputMultiPath( traits.Int, - desc= - "The algorithm search for similar voxels in a neighborhood of this size (larger sizes than the default one are extremely slow).", + desc="The algorithm search for similar voxels in a neighborhood of this size (larger sizes than the default one are extremely slow).", sep=",", - argstr="--rs %s") + argstr="--rs %s", + ) rc = InputMultiPath( traits.Int, - desc= - "Similarity between blocks is measured using windows of this size.", + desc="Similarity between blocks is measured using windows of this size.", sep=",", - argstr="--rc %s") + argstr="--rc %s", + ) hp = traits.Float( - desc= - "This parameter is related to noise; the larger the parameter, the more agressive the filtering. Should be near 1, and only values between 0.8 and 1.2 are allowed", - argstr="--hp %f") + desc="This parameter is related to noise; the larger the parameter, the more agressive the filtering. Should be near 1, and only values between 0.8 and 1.2 are allowed", + argstr="--hp %f", + ) ng = traits.Int( - desc= - "The number of the closest gradients that are used to jointly filter a given gradient direction (a maximum of 5 is allowed).", - argstr="--ng %d") + desc="The number of the closest gradients that are used to jointly filter a given gradient direction (a maximum of 5 is allowed).", + argstr="--ng %d", + ) re = InputMultiPath( traits.Int, - desc= - "A neighborhood of this size is used to compute the statistics for noise estimation.", + desc="A neighborhood of this size is used to compute the statistics for noise estimation.", sep=",", - argstr="--re %s") - inputVolume = File( - position=-2, desc="Input DWI volume.", exists=True, argstr="%s") + argstr="--re %s", + ) + inputVolume = File(position=-2, desc="Input DWI volume.", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output DWI volume.", - argstr="%s") + argstr="%s", + ) class DWIUnbiasedNonLocalMeansFilterOutputSpec(TraitedSpec): @@ -73,4 +84,4 @@ class DWIUnbiasedNonLocalMeansFilter(SEMLikeCommandLine): input_spec = DWIUnbiasedNonLocalMeansFilterInputSpec output_spec = DWIUnbiasedNonLocalMeansFilterOutputSpec _cmd = "DWIUnbiasedNonLocalMeansFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/legacy/diffusion/tests/test_auto_DWIUnbiasedNonLocalMeansFilter.py b/nipype/interfaces/slicer/legacy/diffusion/tests/test_auto_DWIUnbiasedNonLocalMeansFilter.py index 0be53e7afc..81f61c1bbc 100644 --- a/nipype/interfaces/slicer/legacy/diffusion/tests/test_auto_DWIUnbiasedNonLocalMeansFilter.py +++ b/nipype/interfaces/slicer/legacy/diffusion/tests/test_auto_DWIUnbiasedNonLocalMeansFilter.py @@ -4,46 +4,25 @@ def test_DWIUnbiasedNonLocalMeansFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - hp=dict(argstr='--hp %f', ), - inputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - ng=dict(argstr='--ng %d', ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - rc=dict( - argstr='--rc %s', - sep=',', - ), - re=dict( - argstr='--re %s', - sep=',', - ), - rs=dict( - argstr='--rs %s', - sep=',', - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + hp=dict(argstr="--hp %f",), + inputVolume=dict(argstr="%s", extensions=None, position=-2,), + ng=dict(argstr="--ng %d",), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), + rc=dict(argstr="--rc %s", sep=",",), + re=dict(argstr="--re %s", sep=",",), + rs=dict(argstr="--rs %s", sep=",",), ) inputs = DWIUnbiasedNonLocalMeansFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DWIUnbiasedNonLocalMeansFilter_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = DWIUnbiasedNonLocalMeansFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/legacy/filtering.py b/nipype/interfaces/slicer/legacy/filtering.py index aaed2350e0..ee041bbc50 100644 --- a/nipype/interfaces/slicer/legacy/filtering.py +++ b/nipype/interfaces/slicer/legacy/filtering.py @@ -2,35 +2,45 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class OtsuThresholdImageFilterInputSpec(CommandLineInputSpec): insideValue = traits.Int( - desc= - "The value assigned to pixels that are inside the computed threshold", - argstr="--insideValue %d") + desc="The value assigned to pixels that are inside the computed threshold", + argstr="--insideValue %d", + ) outsideValue = traits.Int( - desc= - "The value assigned to pixels that are outside the computed threshold", - argstr="--outsideValue %d") + desc="The value assigned to pixels that are outside the computed threshold", + argstr="--outsideValue %d", + ) numberOfBins = traits.Int( - desc= - "This is an advanced parameter. The number of bins in the histogram used to model the probability mass function of the two intensity distributions. Small numbers of bins may result in a more conservative threshold. The default should suffice for most applications. Experimentation is the only way to see the effect of varying this parameter.", - argstr="--numberOfBins %d") + desc="This is an advanced parameter. The number of bins in the histogram used to model the probability mass function of the two intensity distributions. Small numbers of bins may result in a more conservative threshold. The default should suffice for most applications. Experimentation is the only way to see the effect of varying this parameter.", + argstr="--numberOfBins %d", + ) inputVolume = File( - position=-2, - desc="Input volume to be filtered", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", - argstr="%s") + argstr="%s", + ) class OtsuThresholdImageFilterOutputSpec(TraitedSpec): @@ -61,7 +71,7 @@ class OtsuThresholdImageFilter(SEMLikeCommandLine): input_spec = OtsuThresholdImageFilterInputSpec output_spec = OtsuThresholdImageFilterOutputSpec _cmd = "OtsuThresholdImageFilter " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class ResampleScalarVolumeInputSpec(CommandLineInputSpec): @@ -69,7 +79,8 @@ class ResampleScalarVolumeInputSpec(CommandLineInputSpec): traits.Float, desc="Spacing along each dimension (0 means use input spacing)", sep=",", - argstr="--spacing %s") + argstr="--spacing %s", + ) interpolation = traits.Enum( "linear", "nearestNeighbor", @@ -79,21 +90,20 @@ class ResampleScalarVolumeInputSpec(CommandLineInputSpec): "welch", "lanczos", "blackman", - desc= - "Sampling algorithm (linear, nearest neighbor, bspline(cubic) or windowed sinc). There are several sinc algorithms available as described in the following publication: Erik H. W. Meijering, Wiro J. Niessen, Josien P. W. Pluim, Max A. Viergever: Quantitative Comparison of Sinc-Approximating Kernels for Medical Image Interpolation. MICCAI 1999, pp. 210-217. Each window has a radius of 3;", - argstr="--interpolation %s") + desc="Sampling algorithm (linear, nearest neighbor, bspline(cubic) or windowed sinc). There are several sinc algorithms available as described in the following publication: Erik H. W. Meijering, Wiro J. Niessen, Josien P. W. Pluim, Max A. Viergever: Quantitative Comparison of Sinc-Approximating Kernels for Medical Image Interpolation. MICCAI 1999, pp. 210-217. Each window has a radius of 3;", + argstr="--interpolation %s", + ) InputVolume = File( - position=-2, - desc="Input volume to be resampled", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be resampled", exists=True, argstr="%s" + ) OutputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Resampled Volume", - argstr="%s") + argstr="%s", + ) class ResampleScalarVolumeOutputSpec(TraitedSpec): @@ -120,4 +130,4 @@ class ResampleScalarVolume(SEMLikeCommandLine): input_spec = ResampleScalarVolumeInputSpec output_spec = ResampleScalarVolumeOutputSpec _cmd = "ResampleScalarVolume " - _outputs_filenames = {'OutputVolume': 'OutputVolume.nii'} + _outputs_filenames = {"OutputVolume": "OutputVolume.nii"} diff --git a/nipype/interfaces/slicer/legacy/registration.py b/nipype/interfaces/slicer/legacy/registration.py index 7f73d85d82..3bfd2377d4 100644 --- a/nipype/interfaces/slicer/legacy/registration.py +++ b/nipype/interfaces/slicer/legacy/registration.py @@ -3,85 +3,94 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class BSplineDeformableRegistrationInputSpec(CommandLineInputSpec): - iterations = traits.Int( - desc="Number of iterations", argstr="--iterations %d") + iterations = traits.Int(desc="Number of iterations", argstr="--iterations %d") gridSize = traits.Int( - desc= - "Number of grid points on interior of the fixed image. Larger grid sizes allow for finer registrations.", - argstr="--gridSize %d") + desc="Number of grid points on interior of the fixed image. Larger grid sizes allow for finer registrations.", + argstr="--gridSize %d", + ) histogrambins = traits.Int( - desc= - "Number of histogram bins to use for Mattes Mutual Information. Reduce the number of bins if a deformable registration fails. If the number of bins is too large, the estimated PDFs will be a field of impulses and will inhibit reliable registration estimation.", - argstr="--histogrambins %d") + desc="Number of histogram bins to use for Mattes Mutual Information. Reduce the number of bins if a deformable registration fails. If the number of bins is too large, the estimated PDFs will be a field of impulses and will inhibit reliable registration estimation.", + argstr="--histogrambins %d", + ) spatialsamples = traits.Int( - desc= - "Number of spatial samples to use in estimating Mattes Mutual Information. Larger values yield more accurate PDFs and improved registration quality.", - argstr="--spatialsamples %d") + desc="Number of spatial samples to use in estimating Mattes Mutual Information. Larger values yield more accurate PDFs and improved registration quality.", + argstr="--spatialsamples %d", + ) constrain = traits.Bool( - desc= - "Constrain the deformation to the amount specified in Maximum Deformation", - argstr="--constrain ") + desc="Constrain the deformation to the amount specified in Maximum Deformation", + argstr="--constrain ", + ) maximumDeformation = traits.Float( - desc= - "If Constrain Deformation is checked, limit the deformation to this amount.", - argstr="--maximumDeformation %f") + desc="If Constrain Deformation is checked, limit the deformation to this amount.", + argstr="--maximumDeformation %f", + ) default = traits.Int( - desc= - "Default pixel value used if resampling a pixel outside of the volume.", - argstr="--default %d") + desc="Default pixel value used if resampling a pixel outside of the volume.", + argstr="--default %d", + ) initialtransform = File( - desc= - "Initial transform for aligning the fixed and moving image. Maps positions in the fixed coordinate frame to positions in the moving coordinate frame. This transform should be an affine or rigid transform. It is used an a bulk transform for the BSpline. Optional.", + desc="Initial transform for aligning the fixed and moving image. Maps positions in the fixed coordinate frame to positions in the moving coordinate frame. This transform should be an affine or rigid transform. It is used an a bulk transform for the BSpline. Optional.", exists=True, - argstr="--initialtransform %s") + argstr="--initialtransform %s", + ) FixedImageFileName = File( - position=-2, - desc="Fixed image to which to register", - exists=True, - argstr="%s") + position=-2, desc="Fixed image to which to register", exists=True, argstr="%s" + ) MovingImageFileName = File( - position=-1, desc="Moving image", exists=True, argstr="%s") + position=-1, desc="Moving image", exists=True, argstr="%s" + ) outputtransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Transform calculated that aligns the fixed and moving image. Maps positions from the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", - argstr="--outputtransform %s") + desc="Transform calculated that aligns the fixed and moving image. Maps positions from the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", + argstr="--outputtransform %s", + ) outputwarp = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Vector field that applies an equivalent warp as the BSpline. Maps positions from the fixed coordinate frame to the moving coordinate frame. Optional.", - argstr="--outputwarp %s") + desc="Vector field that applies an equivalent warp as the BSpline. Maps positions from the fixed coordinate frame to the moving coordinate frame. Optional.", + argstr="--outputwarp %s", + ) resampledmovingfilename = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Resampled moving image to fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", - argstr="--resampledmovingfilename %s") + desc="Resampled moving image to fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", + argstr="--resampledmovingfilename %s", + ) class BSplineDeformableRegistrationOutputSpec(TraitedSpec): outputtransform = File( - desc= - "Transform calculated that aligns the fixed and moving image. Maps positions from the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", - exists=True) + desc="Transform calculated that aligns the fixed and moving image. Maps positions from the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", + exists=True, + ) outputwarp = File( - desc= - "Vector field that applies an equivalent warp as the BSpline. Maps positions from the fixed coordinate frame to the moving coordinate frame. Optional.", - exists=True) + desc="Vector field that applies an equivalent warp as the BSpline. Maps positions from the fixed coordinate frame to the moving coordinate frame. Optional.", + exists=True, + ) resampledmovingfilename = File( - desc= - "Resampled moving image to fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", - exists=True) + desc="Resampled moving image to fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", + exists=True, + ) class BSplineDeformableRegistration(SEMLikeCommandLine): @@ -105,72 +114,70 @@ class BSplineDeformableRegistration(SEMLikeCommandLine): output_spec = BSplineDeformableRegistrationOutputSpec _cmd = "BSplineDeformableRegistration " _outputs_filenames = { - 'resampledmovingfilename': 'resampledmovingfilename.nii', - 'outputtransform': 'outputtransform.txt', - 'outputwarp': 'outputwarp.nrrd' + "resampledmovingfilename": "resampledmovingfilename.nii", + "outputtransform": "outputtransform.txt", + "outputwarp": "outputwarp.nrrd", } class AffineRegistrationInputSpec(CommandLineInputSpec): fixedsmoothingfactor = traits.Int( - desc= - "Amount of smoothing applied to fixed image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", - argstr="--fixedsmoothingfactor %d") + desc="Amount of smoothing applied to fixed image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", + argstr="--fixedsmoothingfactor %d", + ) movingsmoothingfactor = traits.Int( - desc= - "Amount of smoothing applied to moving image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", - argstr="--movingsmoothingfactor %d") + desc="Amount of smoothing applied to moving image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", + argstr="--movingsmoothingfactor %d", + ) histogrambins = traits.Int( - desc= - "Number of histogram bins to use for Mattes Mutual Information. Reduce the number of bins if a registration fails. If the number of bins is too large, the estimated PDFs will be a field of impulses and will inhibit reliable registration estimation.", - argstr="--histogrambins %d") + desc="Number of histogram bins to use for Mattes Mutual Information. Reduce the number of bins if a registration fails. If the number of bins is too large, the estimated PDFs will be a field of impulses and will inhibit reliable registration estimation.", + argstr="--histogrambins %d", + ) spatialsamples = traits.Int( - desc= - "Number of spatial samples to use in estimating Mattes Mutual Information. Larger values yield more accurate PDFs and improved registration quality.", - argstr="--spatialsamples %d") - iterations = traits.Int( - desc="Number of iterations", argstr="--iterations %d") + desc="Number of spatial samples to use in estimating Mattes Mutual Information. Larger values yield more accurate PDFs and improved registration quality.", + argstr="--spatialsamples %d", + ) + iterations = traits.Int(desc="Number of iterations", argstr="--iterations %d") translationscale = traits.Float( - desc= - "Relative scale of translations to rotations, i.e. a value of 100 means 10mm = 1 degree. (Actual scale used is 1/(TranslationScale^2)). This parameter is used to \'weight\' or \'standardized\' the transform parameters and their effect on the registration objective function.", - argstr="--translationscale %f") + desc="Relative scale of translations to rotations, i.e. a value of 100 means 10mm = 1 degree. (Actual scale used is 1/(TranslationScale^2)). This parameter is used to 'weight' or 'standardized' the transform parameters and their effect on the registration objective function.", + argstr="--translationscale %f", + ) initialtransform = File( - desc= - "Initial transform for aligning the fixed and moving image. Maps positions in the fixed coordinate frame to positions in the moving coordinate frame. Optional.", + desc="Initial transform for aligning the fixed and moving image. Maps positions in the fixed coordinate frame to positions in the moving coordinate frame. Optional.", exists=True, - argstr="--initialtransform %s") + argstr="--initialtransform %s", + ) FixedImageFileName = File( - position=-2, - desc="Fixed image to which to register", - exists=True, - argstr="%s") + position=-2, desc="Fixed image to which to register", exists=True, argstr="%s" + ) MovingImageFileName = File( - position=-1, desc="Moving image", exists=True, argstr="%s") + position=-1, desc="Moving image", exists=True, argstr="%s" + ) outputtransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", - argstr="--outputtransform %s") + desc="Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", + argstr="--outputtransform %s", + ) resampledmovingfilename = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", - argstr="--resampledmovingfilename %s") + desc="Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", + argstr="--resampledmovingfilename %s", + ) class AffineRegistrationOutputSpec(TraitedSpec): outputtransform = File( - desc= - "Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", - exists=True) + desc="Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", + exists=True, + ) resampledmovingfilename = File( - desc= - "Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", - exists=True) + desc="Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", + exists=True, + ) class AffineRegistration(SEMLikeCommandLine): @@ -200,62 +207,70 @@ class AffineRegistration(SEMLikeCommandLine): output_spec = AffineRegistrationOutputSpec _cmd = "AffineRegistration " _outputs_filenames = { - 'resampledmovingfilename': 'resampledmovingfilename.nii', - 'outputtransform': 'outputtransform.txt' + "resampledmovingfilename": "resampledmovingfilename.nii", + "outputtransform": "outputtransform.txt", } class MultiResolutionAffineRegistrationInputSpec(CommandLineInputSpec): fixedImage = File( position=-2, - desc= - "Image which defines the space into which the moving image is registered", + desc="Image which defines the space into which the moving image is registered", exists=True, - argstr="%s") + argstr="%s", + ) movingImage = File( position=-1, - desc= - "The transform goes from the fixed image's space into the moving image's space", + desc="The transform goes from the fixed image's space into the moving image's space", exists=True, - argstr="%s") + argstr="%s", + ) resampledImage = traits.Either( traits.Bool, File(), hash_files=False, desc="Registration results", - argstr="--resampledImage %s") + argstr="--resampledImage %s", + ) saveTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Save the output transform from the registration", - argstr="--saveTransform %s") + argstr="--saveTransform %s", + ) fixedImageMask = File( desc="Label image which defines a mask of interest for the fixed image", exists=True, - argstr="--fixedImageMask %s") + argstr="--fixedImageMask %s", + ) fixedImageROI = traits.List( desc="Label image which defines a ROI of interest for the fixed image", - argstr="--fixedImageROI %s") + argstr="--fixedImageROI %s", + ) numIterations = traits.Int( desc="Number of iterations to run at each resolution level.", - argstr="--numIterations %d") + argstr="--numIterations %d", + ) numLineIterations = traits.Int( desc="Number of iterations to run at each resolution level.", - argstr="--numLineIterations %d") + argstr="--numLineIterations %d", + ) stepSize = traits.Float( - desc="The maximum step size of the optimizer in voxels", - argstr="--stepSize %f") + desc="The maximum step size of the optimizer in voxels", argstr="--stepSize %f" + ) stepTolerance = traits.Float( desc="The maximum step size of the optimizer in voxels", - argstr="--stepTolerance %f") + argstr="--stepTolerance %f", + ) metricTolerance = traits.Float(argstr="--metricTolerance %f") class MultiResolutionAffineRegistrationOutputSpec(TraitedSpec): resampledImage = File(desc="Registration results", exists=True) saveTransform = File( - desc="Save the output transform from the registration", exists=True) + desc="Save the output transform from the registration", exists=True + ) class MultiResolutionAffineRegistration(SEMLikeCommandLine): @@ -279,85 +294,84 @@ class MultiResolutionAffineRegistration(SEMLikeCommandLine): output_spec = MultiResolutionAffineRegistrationOutputSpec _cmd = "MultiResolutionAffineRegistration " _outputs_filenames = { - 'resampledImage': 'resampledImage.nii', - 'saveTransform': 'saveTransform.txt' + "resampledImage": "resampledImage.nii", + "saveTransform": "saveTransform.txt", } class RigidRegistrationInputSpec(CommandLineInputSpec): fixedsmoothingfactor = traits.Int( - desc= - "Amount of smoothing applied to fixed image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", - argstr="--fixedsmoothingfactor %d") + desc="Amount of smoothing applied to fixed image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", + argstr="--fixedsmoothingfactor %d", + ) movingsmoothingfactor = traits.Int( - desc= - "Amount of smoothing applied to moving image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", - argstr="--movingsmoothingfactor %d") + desc="Amount of smoothing applied to moving image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", + argstr="--movingsmoothingfactor %d", + ) testingmode = traits.Bool( - desc= - "Enable testing mode. Input transform will be used to construct floating image. The floating image will be ignored if passed.", - argstr="--testingmode ") + desc="Enable testing mode. Input transform will be used to construct floating image. The floating image will be ignored if passed.", + argstr="--testingmode ", + ) histogrambins = traits.Int( - desc= - "Number of histogram bins to use for Mattes Mutual Information. Reduce the number of bins if a registration fails. If the number of bins is too large, the estimated PDFs will be a field of impulses and will inhibit reliable registration estimation.", - argstr="--histogrambins %d") + desc="Number of histogram bins to use for Mattes Mutual Information. Reduce the number of bins if a registration fails. If the number of bins is too large, the estimated PDFs will be a field of impulses and will inhibit reliable registration estimation.", + argstr="--histogrambins %d", + ) spatialsamples = traits.Int( - desc= - "Number of spatial samples to use in estimating Mattes Mutual Information. Larger values yield more accurate PDFs and improved registration quality.", - argstr="--spatialsamples %d") + desc="Number of spatial samples to use in estimating Mattes Mutual Information. Larger values yield more accurate PDFs and improved registration quality.", + argstr="--spatialsamples %d", + ) iterations = InputMultiPath( traits.Int, - desc= - "Comma separated list of iterations. Must have the same number of elements as the learning rate.", + desc="Comma separated list of iterations. Must have the same number of elements as the learning rate.", sep=",", - argstr="--iterations %s") + argstr="--iterations %s", + ) learningrate = InputMultiPath( traits.Float, - desc= - "Comma separated list of learning rates. Learning rate is a scale factor on the gradient of the registration objective function (gradient with respect to the parameters of the transformation) used to update the parameters of the transformation during optimization. Smaller values cause the optimizer to take smaller steps through the parameter space. Larger values are typically used early in the registration process to take large jumps in parameter space followed by smaller values to home in on the optimum value of the registration objective function. Default is: 0.01, 0.005, 0.0005, 0.0002. Must have the same number of elements as iterations.", + desc="Comma separated list of learning rates. Learning rate is a scale factor on the gradient of the registration objective function (gradient with respect to the parameters of the transformation) used to update the parameters of the transformation during optimization. Smaller values cause the optimizer to take smaller steps through the parameter space. Larger values are typically used early in the registration process to take large jumps in parameter space followed by smaller values to home in on the optimum value of the registration objective function. Default is: 0.01, 0.005, 0.0005, 0.0002. Must have the same number of elements as iterations.", sep=",", - argstr="--learningrate %s") + argstr="--learningrate %s", + ) translationscale = traits.Float( - desc= - "Relative scale of translations to rotations, i.e. a value of 100 means 10mm = 1 degree. (Actual scale used 1/(TranslationScale^2)). This parameter is used to \'weight\' or \'standardized\' the transform parameters and their effect on the registration objective function.", - argstr="--translationscale %f") + desc="Relative scale of translations to rotations, i.e. a value of 100 means 10mm = 1 degree. (Actual scale used 1/(TranslationScale^2)). This parameter is used to 'weight' or 'standardized' the transform parameters and their effect on the registration objective function.", + argstr="--translationscale %f", + ) initialtransform = File( - desc= - "Initial transform for aligning the fixed and moving image. Maps positions in the fixed coordinate frame to positions in the moving coordinate frame. Optional.", + desc="Initial transform for aligning the fixed and moving image. Maps positions in the fixed coordinate frame to positions in the moving coordinate frame. Optional.", exists=True, - argstr="--initialtransform %s") + argstr="--initialtransform %s", + ) FixedImageFileName = File( - position=-2, - desc="Fixed image to which to register", - exists=True, - argstr="%s") + position=-2, desc="Fixed image to which to register", exists=True, argstr="%s" + ) MovingImageFileName = File( - position=-1, desc="Moving image", exists=True, argstr="%s") + position=-1, desc="Moving image", exists=True, argstr="%s" + ) outputtransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", - argstr="--outputtransform %s") + desc="Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", + argstr="--outputtransform %s", + ) resampledmovingfilename = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", - argstr="--resampledmovingfilename %s") + desc="Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", + argstr="--resampledmovingfilename %s", + ) class RigidRegistrationOutputSpec(TraitedSpec): outputtransform = File( - desc= - "Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", - exists=True) + desc="Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", + exists=True, + ) resampledmovingfilename = File( - desc= - "Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", - exists=True) + desc="Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", + exists=True, + ) class RigidRegistration(SEMLikeCommandLine): @@ -391,81 +405,80 @@ class RigidRegistration(SEMLikeCommandLine): output_spec = RigidRegistrationOutputSpec _cmd = "RigidRegistration " _outputs_filenames = { - 'resampledmovingfilename': 'resampledmovingfilename.nii', - 'outputtransform': 'outputtransform.txt' + "resampledmovingfilename": "resampledmovingfilename.nii", + "outputtransform": "outputtransform.txt", } class LinearRegistrationInputSpec(CommandLineInputSpec): fixedsmoothingfactor = traits.Int( - desc= - "Amount of smoothing applied to fixed image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", - argstr="--fixedsmoothingfactor %d") + desc="Amount of smoothing applied to fixed image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", + argstr="--fixedsmoothingfactor %d", + ) movingsmoothingfactor = traits.Int( - desc= - "Amount of smoothing applied to moving image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", - argstr="--movingsmoothingfactor %d") + desc="Amount of smoothing applied to moving image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", + argstr="--movingsmoothingfactor %d", + ) histogrambins = traits.Int( - desc= - "Number of histogram bins to use for Mattes Mutual Information. Reduce the number of bins if a registration fails. If the number of bins is too large, the estimated PDFs will be a field of impulses and will inhibit reliable registration estimation.", - argstr="--histogrambins %d") + desc="Number of histogram bins to use for Mattes Mutual Information. Reduce the number of bins if a registration fails. If the number of bins is too large, the estimated PDFs will be a field of impulses and will inhibit reliable registration estimation.", + argstr="--histogrambins %d", + ) spatialsamples = traits.Int( - desc= - "Number of spatial samples to use in estimating Mattes Mutual Information. Larger values yield more accurate PDFs and improved registration quality.", - argstr="--spatialsamples %d") + desc="Number of spatial samples to use in estimating Mattes Mutual Information. Larger values yield more accurate PDFs and improved registration quality.", + argstr="--spatialsamples %d", + ) iterations = InputMultiPath( traits.Int, - desc= - "Comma separated list of iterations. Must have the same number of elements as the learning rate.", + desc="Comma separated list of iterations. Must have the same number of elements as the learning rate.", sep=",", - argstr="--iterations %s") + argstr="--iterations %s", + ) learningrate = InputMultiPath( traits.Float, - desc= - "Comma separated list of learning rates. Learning rate is a scale factor on the gradient of the registration objective function (gradient with respect to the parameters of the transformation) used to update the parameters of the transformation during optimization. Smaller values cause the optimizer to take smaller steps through the parameter space. Larger values are typically used early in the registration process to take large jumps in parameter space followed by smaller values to home in on the optimum value of the registration objective function. Default is: 0.01, 0.005, 0.0005, 0.0002. Must have the same number of elements as iterations.", + desc="Comma separated list of learning rates. Learning rate is a scale factor on the gradient of the registration objective function (gradient with respect to the parameters of the transformation) used to update the parameters of the transformation during optimization. Smaller values cause the optimizer to take smaller steps through the parameter space. Larger values are typically used early in the registration process to take large jumps in parameter space followed by smaller values to home in on the optimum value of the registration objective function. Default is: 0.01, 0.005, 0.0005, 0.0002. Must have the same number of elements as iterations.", sep=",", - argstr="--learningrate %s") + argstr="--learningrate %s", + ) translationscale = traits.Float( - desc= - "Relative scale of translations to rotations, i.e. a value of 100 means 10mm = 1 degree. (Actual scale used 1/(TranslationScale^2)). This parameter is used to \'weight\' or \'standardized\' the transform parameters and their effect on the registration objective function.", - argstr="--translationscale %f") + desc="Relative scale of translations to rotations, i.e. a value of 100 means 10mm = 1 degree. (Actual scale used 1/(TranslationScale^2)). This parameter is used to 'weight' or 'standardized' the transform parameters and their effect on the registration objective function.", + argstr="--translationscale %f", + ) initialtransform = File( - desc= - "Initial transform for aligning the fixed and moving image. Maps positions in the fixed coordinate frame to positions in the moving coordinate frame. Optional.", + desc="Initial transform for aligning the fixed and moving image. Maps positions in the fixed coordinate frame to positions in the moving coordinate frame. Optional.", exists=True, - argstr="--initialtransform %s") + argstr="--initialtransform %s", + ) FixedImageFileName = File( - position=-2, - desc="Fixed image to which to register", - exists=True, - argstr="%s") + position=-2, desc="Fixed image to which to register", exists=True, argstr="%s" + ) MovingImageFileName = File( - position=-1, desc="Moving image", exists=True, argstr="%s") + position=-1, desc="Moving image", exists=True, argstr="%s" + ) outputtransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", - argstr="--outputtransform %s") + desc="Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", + argstr="--outputtransform %s", + ) resampledmovingfilename = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", - argstr="--resampledmovingfilename %s") + desc="Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", + argstr="--resampledmovingfilename %s", + ) class LinearRegistrationOutputSpec(TraitedSpec): outputtransform = File( - desc= - "Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", - exists=True) + desc="Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", + exists=True, + ) resampledmovingfilename = File( - desc= - "Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", - exists=True) + desc="Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", + exists=True, + ) class LinearRegistration(SEMLikeCommandLine): @@ -489,40 +502,43 @@ class LinearRegistration(SEMLikeCommandLine): output_spec = LinearRegistrationOutputSpec _cmd = "LinearRegistration " _outputs_filenames = { - 'resampledmovingfilename': 'resampledmovingfilename.nii', - 'outputtransform': 'outputtransform.txt' + "resampledmovingfilename": "resampledmovingfilename.nii", + "outputtransform": "outputtransform.txt", } class ExpertAutomatedRegistrationInputSpec(CommandLineInputSpec): fixedImage = File( position=-2, - desc= - "Image which defines the space into which the moving image is registered", + desc="Image which defines the space into which the moving image is registered", exists=True, - argstr="%s") + argstr="%s", + ) movingImage = File( position=-1, - desc= - "The transform goes from the fixed image's space into the moving image's space", + desc="The transform goes from the fixed image's space into the moving image's space", exists=True, - argstr="%s") + argstr="%s", + ) resampledImage = traits.Either( traits.Bool, File(), hash_files=False, desc="Registration results", - argstr="--resampledImage %s") + argstr="--resampledImage %s", + ) loadTransform = File( desc="Load a transform that is immediately applied to the moving image", exists=True, - argstr="--loadTransform %s") + argstr="--loadTransform %s", + ) saveTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Save the transform that results from registration", - argstr="--saveTransform %s") + argstr="--saveTransform %s", + ) initialization = traits.Enum( "None", "Landmarks", @@ -530,7 +546,8 @@ class ExpertAutomatedRegistrationInputSpec(CommandLineInputSpec): "CentersOfMass", "SecondMoments", desc="Method to prime the registration process", - argstr="--initialization %s") + argstr="--initialization %s", + ) registration = traits.Enum( "None", "Initial", @@ -541,92 +558,107 @@ class ExpertAutomatedRegistrationInputSpec(CommandLineInputSpec): "PipelineAffine", "PipelineBSpline", desc="Method for the registration process", - argstr="--registration %s") + argstr="--registration %s", + ) metric = traits.Enum( "MattesMI", "NormCorr", "MeanSqrd", desc="Method to quantify image match", - argstr="--metric %s") + argstr="--metric %s", + ) expectedOffset = traits.Float( - desc="Expected misalignment after initialization", - argstr="--expectedOffset %f") + desc="Expected misalignment after initialization", argstr="--expectedOffset %f" + ) expectedRotation = traits.Float( desc="Expected misalignment after initialization", - argstr="--expectedRotation %f") + argstr="--expectedRotation %f", + ) expectedScale = traits.Float( - desc="Expected misalignment after initialization", - argstr="--expectedScale %f") + desc="Expected misalignment after initialization", argstr="--expectedScale %f" + ) expectedSkew = traits.Float( - desc="Expected misalignment after initialization", - argstr="--expectedSkew %f") + desc="Expected misalignment after initialization", argstr="--expectedSkew %f" + ) verbosityLevel = traits.Enum( "Silent", "Standard", "Verbose", desc="Level of detail of reporting progress", - argstr="--verbosityLevel %s") + argstr="--verbosityLevel %s", + ) sampleFromOverlap = traits.Bool( - desc= - "Limit metric evaluation to the fixed image region overlapped by the moving image", - argstr="--sampleFromOverlap ") + desc="Limit metric evaluation to the fixed image region overlapped by the moving image", + argstr="--sampleFromOverlap ", + ) fixedImageMask = File( desc="Image which defines a mask for the fixed image", exists=True, - argstr="--fixedImageMask %s") + argstr="--fixedImageMask %s", + ) randomNumberSeed = traits.Int( desc="Seed to generate a consistent random number sequence", - argstr="--randomNumberSeed %d") + argstr="--randomNumberSeed %d", + ) numberOfThreads = traits.Int( - desc="Number of CPU threads to use", argstr="--numberOfThreads %d") + desc="Number of CPU threads to use", argstr="--numberOfThreads %d" + ) minimizeMemory = traits.Bool( - desc= - "Reduce the amount of memory required at the cost of increased computation time", - argstr="--minimizeMemory ") + desc="Reduce the amount of memory required at the cost of increased computation time", + argstr="--minimizeMemory ", + ) interpolation = traits.Enum( "NearestNeighbor", "Linear", "BSpline", desc="Method for interpolation within the optimization process", - argstr="--interpolation %s") + argstr="--interpolation %s", + ) fixedLandmarks = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="Ordered list of landmarks in the fixed image", - argstr="--fixedLandmarks %s...") + argstr="--fixedLandmarks %s...", + ) movingLandmarks = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="Ordered list of landmarks in the moving image", - argstr="--movingLandmarks %s...") + argstr="--movingLandmarks %s...", + ) rigidMaxIterations = traits.Int( desc="Maximum number of rigid optimization iterations", - argstr="--rigidMaxIterations %d") + argstr="--rigidMaxIterations %d", + ) rigidSamplingRatio = traits.Float( - desc= - "Portion of the image to use in computing the metric during rigid registration", - argstr="--rigidSamplingRatio %f") + desc="Portion of the image to use in computing the metric during rigid registration", + argstr="--rigidSamplingRatio %f", + ) affineMaxIterations = traits.Int( desc="Maximum number of affine optimization iterations", - argstr="--affineMaxIterations %d") + argstr="--affineMaxIterations %d", + ) affineSamplingRatio = traits.Float( - desc= - "Portion of the image to use in computing the metric during affine registration", - argstr="--affineSamplingRatio %f") + desc="Portion of the image to use in computing the metric during affine registration", + argstr="--affineSamplingRatio %f", + ) bsplineMaxIterations = traits.Int( desc="Maximum number of bspline optimization iterations", - argstr="--bsplineMaxIterations %d") + argstr="--bsplineMaxIterations %d", + ) bsplineSamplingRatio = traits.Float( - desc= - "Portion of the image to use in computing the metric during BSpline registration", - argstr="--bsplineSamplingRatio %f") + desc="Portion of the image to use in computing the metric during BSpline registration", + argstr="--bsplineSamplingRatio %f", + ) controlPointSpacing = traits.Int( desc="Number of pixels between control points", - argstr="--controlPointSpacing %d") + argstr="--controlPointSpacing %d", + ) class ExpertAutomatedRegistrationOutputSpec(TraitedSpec): resampledImage = File(desc="Registration results", exists=True) saveTransform = File( - desc="Save the transform that results from registration", exists=True) + desc="Save the transform that results from registration", exists=True + ) class ExpertAutomatedRegistration(SEMLikeCommandLine): @@ -650,6 +682,6 @@ class ExpertAutomatedRegistration(SEMLikeCommandLine): output_spec = ExpertAutomatedRegistrationOutputSpec _cmd = "ExpertAutomatedRegistration " _outputs_filenames = { - 'resampledImage': 'resampledImage.nii', - 'saveTransform': 'saveTransform.txt' + "resampledImage": "resampledImage.nii", + "saveTransform": "saveTransform.txt", } diff --git a/nipype/interfaces/slicer/legacy/segmentation.py b/nipype/interfaces/slicer/legacy/segmentation.py index 3500d50d50..118ffbcb77 100644 --- a/nipype/interfaces/slicer/legacy/segmentation.py +++ b/nipype/interfaces/slicer/legacy/segmentation.py @@ -3,39 +3,49 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class OtsuThresholdSegmentationInputSpec(CommandLineInputSpec): brightObjects = traits.Bool( - desc= - "Segmenting bright objects on a dark background or dark objects on a bright background.", - argstr="--brightObjects ") + desc="Segmenting bright objects on a dark background or dark objects on a bright background.", + argstr="--brightObjects ", + ) numberOfBins = traits.Int( - desc= - "This is an advanced parameter. The number of bins in the histogram used to model the probability mass function of the two intensity distributions. Small numbers of bins may result in a more conservative threshold. The default should suffice for most applications. Experimentation is the only way to see the effect of varying this parameter.", - argstr="--numberOfBins %d") + desc="This is an advanced parameter. The number of bins in the histogram used to model the probability mass function of the two intensity distributions. Small numbers of bins may result in a more conservative threshold. The default should suffice for most applications. Experimentation is the only way to see the effect of varying this parameter.", + argstr="--numberOfBins %d", + ) faceConnected = traits.Bool( - desc= - "This is an advanced parameter. Adjacent voxels are face connected. This affects the connected component algorithm. If this parameter is false, more regions are likely to be identified.", - argstr="--faceConnected ") + desc="This is an advanced parameter. Adjacent voxels are face connected. This affects the connected component algorithm. If this parameter is false, more regions are likely to be identified.", + argstr="--faceConnected ", + ) minimumObjectSize = traits.Int( - desc= - "Minimum size of object to retain. This parameter can be used to get rid of small regions in noisy images.", - argstr="--minimumObjectSize %d") + desc="Minimum size of object to retain. This parameter can be used to get rid of small regions in noisy images.", + argstr="--minimumObjectSize %d", + ) inputVolume = File( - position=-2, - desc="Input volume to be segmented", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be segmented", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", - argstr="%s") + argstr="%s", + ) class OtsuThresholdSegmentationOutputSpec(TraitedSpec): @@ -62,4 +72,4 @@ class OtsuThresholdSegmentation(SEMLikeCommandLine): input_spec = OtsuThresholdSegmentationInputSpec output_spec = OtsuThresholdSegmentationOutputSpec _cmd = "OtsuThresholdSegmentation " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/legacy/tests/test_auto_AffineRegistration.py b/nipype/interfaces/slicer/legacy/tests/test_auto_AffineRegistration.py index 2ca1242922..44857bf3a8 100644 --- a/nipype/interfaces/slicer/legacy/tests/test_auto_AffineRegistration.py +++ b/nipype/interfaces/slicer/legacy/tests/test_auto_AffineRegistration.py @@ -4,49 +4,33 @@ def test_AffineRegistration_inputs(): input_map = dict( - FixedImageFileName=dict( - argstr='%s', - extensions=None, - position=-2, - ), - MovingImageFileName=dict( - argstr='%s', - extensions=None, - position=-1, - ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixedsmoothingfactor=dict(argstr='--fixedsmoothingfactor %d', ), - histogrambins=dict(argstr='--histogrambins %d', ), - initialtransform=dict( - argstr='--initialtransform %s', - extensions=None, - ), - iterations=dict(argstr='--iterations %d', ), - movingsmoothingfactor=dict(argstr='--movingsmoothingfactor %d', ), - outputtransform=dict( - argstr='--outputtransform %s', - hash_files=False, - ), + FixedImageFileName=dict(argstr="%s", extensions=None, position=-2,), + MovingImageFileName=dict(argstr="%s", extensions=None, position=-1,), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + fixedsmoothingfactor=dict(argstr="--fixedsmoothingfactor %d",), + histogrambins=dict(argstr="--histogrambins %d",), + initialtransform=dict(argstr="--initialtransform %s", extensions=None,), + iterations=dict(argstr="--iterations %d",), + movingsmoothingfactor=dict(argstr="--movingsmoothingfactor %d",), + outputtransform=dict(argstr="--outputtransform %s", hash_files=False,), resampledmovingfilename=dict( - argstr='--resampledmovingfilename %s', - hash_files=False, + argstr="--resampledmovingfilename %s", hash_files=False, ), - spatialsamples=dict(argstr='--spatialsamples %d', ), - translationscale=dict(argstr='--translationscale %f', ), + spatialsamples=dict(argstr="--spatialsamples %d",), + translationscale=dict(argstr="--translationscale %f",), ) inputs = AffineRegistration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_AffineRegistration_outputs(): output_map = dict( - outputtransform=dict(extensions=None, ), - resampledmovingfilename=dict(extensions=None, ), + outputtransform=dict(extensions=None,), + resampledmovingfilename=dict(extensions=None,), ) outputs = AffineRegistration.output_spec() diff --git a/nipype/interfaces/slicer/legacy/tests/test_auto_BSplineDeformableRegistration.py b/nipype/interfaces/slicer/legacy/tests/test_auto_BSplineDeformableRegistration.py index 889992c9ec..7c777ebca3 100644 --- a/nipype/interfaces/slicer/legacy/tests/test_auto_BSplineDeformableRegistration.py +++ b/nipype/interfaces/slicer/legacy/tests/test_auto_BSplineDeformableRegistration.py @@ -4,55 +4,36 @@ def test_BSplineDeformableRegistration_inputs(): input_map = dict( - FixedImageFileName=dict( - argstr='%s', - extensions=None, - position=-2, - ), - MovingImageFileName=dict( - argstr='%s', - extensions=None, - position=-1, - ), - args=dict(argstr='%s', ), - constrain=dict(argstr='--constrain ', ), - default=dict(argstr='--default %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - gridSize=dict(argstr='--gridSize %d', ), - histogrambins=dict(argstr='--histogrambins %d', ), - initialtransform=dict( - argstr='--initialtransform %s', - extensions=None, - ), - iterations=dict(argstr='--iterations %d', ), - maximumDeformation=dict(argstr='--maximumDeformation %f', ), - outputtransform=dict( - argstr='--outputtransform %s', - hash_files=False, - ), - outputwarp=dict( - argstr='--outputwarp %s', - hash_files=False, - ), + FixedImageFileName=dict(argstr="%s", extensions=None, position=-2,), + MovingImageFileName=dict(argstr="%s", extensions=None, position=-1,), + args=dict(argstr="%s",), + constrain=dict(argstr="--constrain ",), + default=dict(argstr="--default %d",), + environ=dict(nohash=True, usedefault=True,), + gridSize=dict(argstr="--gridSize %d",), + histogrambins=dict(argstr="--histogrambins %d",), + initialtransform=dict(argstr="--initialtransform %s", extensions=None,), + iterations=dict(argstr="--iterations %d",), + maximumDeformation=dict(argstr="--maximumDeformation %f",), + outputtransform=dict(argstr="--outputtransform %s", hash_files=False,), + outputwarp=dict(argstr="--outputwarp %s", hash_files=False,), resampledmovingfilename=dict( - argstr='--resampledmovingfilename %s', - hash_files=False, + argstr="--resampledmovingfilename %s", hash_files=False, ), - spatialsamples=dict(argstr='--spatialsamples %d', ), + spatialsamples=dict(argstr="--spatialsamples %d",), ) inputs = BSplineDeformableRegistration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BSplineDeformableRegistration_outputs(): output_map = dict( - outputtransform=dict(extensions=None, ), - outputwarp=dict(extensions=None, ), - resampledmovingfilename=dict(extensions=None, ), + outputtransform=dict(extensions=None,), + outputwarp=dict(extensions=None,), + resampledmovingfilename=dict(extensions=None,), ) outputs = BSplineDeformableRegistration.output_spec() diff --git a/nipype/interfaces/slicer/legacy/tests/test_auto_BSplineToDeformationField.py b/nipype/interfaces/slicer/legacy/tests/test_auto_BSplineToDeformationField.py index 54dd8157f1..c4dc0f8969 100644 --- a/nipype/interfaces/slicer/legacy/tests/test_auto_BSplineToDeformationField.py +++ b/nipype/interfaces/slicer/legacy/tests/test_auto_BSplineToDeformationField.py @@ -4,31 +4,21 @@ def test_BSplineToDeformationField_inputs(): input_map = dict( - args=dict(argstr='%s', ), - defImage=dict( - argstr='--defImage %s', - hash_files=False, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - refImage=dict( - argstr='--refImage %s', - extensions=None, - ), - tfm=dict( - argstr='--tfm %s', - extensions=None, - ), + args=dict(argstr="%s",), + defImage=dict(argstr="--defImage %s", hash_files=False,), + environ=dict(nohash=True, usedefault=True,), + refImage=dict(argstr="--refImage %s", extensions=None,), + tfm=dict(argstr="--tfm %s", extensions=None,), ) inputs = BSplineToDeformationField.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BSplineToDeformationField_outputs(): - output_map = dict(defImage=dict(extensions=None, ), ) + output_map = dict(defImage=dict(extensions=None,),) outputs = BSplineToDeformationField.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/legacy/tests/test_auto_ExpertAutomatedRegistration.py b/nipype/interfaces/slicer/legacy/tests/test_auto_ExpertAutomatedRegistration.py index 8289fc924b..6536d699b0 100644 --- a/nipype/interfaces/slicer/legacy/tests/test_auto_ExpertAutomatedRegistration.py +++ b/nipype/interfaces/slicer/legacy/tests/test_auto_ExpertAutomatedRegistration.py @@ -4,69 +4,47 @@ def test_ExpertAutomatedRegistration_inputs(): input_map = dict( - affineMaxIterations=dict(argstr='--affineMaxIterations %d', ), - affineSamplingRatio=dict(argstr='--affineSamplingRatio %f', ), - args=dict(argstr='%s', ), - bsplineMaxIterations=dict(argstr='--bsplineMaxIterations %d', ), - bsplineSamplingRatio=dict(argstr='--bsplineSamplingRatio %f', ), - controlPointSpacing=dict(argstr='--controlPointSpacing %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - expectedOffset=dict(argstr='--expectedOffset %f', ), - expectedRotation=dict(argstr='--expectedRotation %f', ), - expectedScale=dict(argstr='--expectedScale %f', ), - expectedSkew=dict(argstr='--expectedSkew %f', ), - fixedImage=dict( - argstr='%s', - extensions=None, - position=-2, - ), - fixedImageMask=dict( - argstr='--fixedImageMask %s', - extensions=None, - ), - fixedLandmarks=dict(argstr='--fixedLandmarks %s...', ), - initialization=dict(argstr='--initialization %s', ), - interpolation=dict(argstr='--interpolation %s', ), - loadTransform=dict( - argstr='--loadTransform %s', - extensions=None, - ), - metric=dict(argstr='--metric %s', ), - minimizeMemory=dict(argstr='--minimizeMemory ', ), - movingImage=dict( - argstr='%s', - extensions=None, - position=-1, - ), - movingLandmarks=dict(argstr='--movingLandmarks %s...', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - randomNumberSeed=dict(argstr='--randomNumberSeed %d', ), - registration=dict(argstr='--registration %s', ), - resampledImage=dict( - argstr='--resampledImage %s', - hash_files=False, - ), - rigidMaxIterations=dict(argstr='--rigidMaxIterations %d', ), - rigidSamplingRatio=dict(argstr='--rigidSamplingRatio %f', ), - sampleFromOverlap=dict(argstr='--sampleFromOverlap ', ), - saveTransform=dict( - argstr='--saveTransform %s', - hash_files=False, - ), - verbosityLevel=dict(argstr='--verbosityLevel %s', ), + affineMaxIterations=dict(argstr="--affineMaxIterations %d",), + affineSamplingRatio=dict(argstr="--affineSamplingRatio %f",), + args=dict(argstr="%s",), + bsplineMaxIterations=dict(argstr="--bsplineMaxIterations %d",), + bsplineSamplingRatio=dict(argstr="--bsplineSamplingRatio %f",), + controlPointSpacing=dict(argstr="--controlPointSpacing %d",), + environ=dict(nohash=True, usedefault=True,), + expectedOffset=dict(argstr="--expectedOffset %f",), + expectedRotation=dict(argstr="--expectedRotation %f",), + expectedScale=dict(argstr="--expectedScale %f",), + expectedSkew=dict(argstr="--expectedSkew %f",), + fixedImage=dict(argstr="%s", extensions=None, position=-2,), + fixedImageMask=dict(argstr="--fixedImageMask %s", extensions=None,), + fixedLandmarks=dict(argstr="--fixedLandmarks %s...",), + initialization=dict(argstr="--initialization %s",), + interpolation=dict(argstr="--interpolation %s",), + loadTransform=dict(argstr="--loadTransform %s", extensions=None,), + metric=dict(argstr="--metric %s",), + minimizeMemory=dict(argstr="--minimizeMemory ",), + movingImage=dict(argstr="%s", extensions=None, position=-1,), + movingLandmarks=dict(argstr="--movingLandmarks %s...",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + randomNumberSeed=dict(argstr="--randomNumberSeed %d",), + registration=dict(argstr="--registration %s",), + resampledImage=dict(argstr="--resampledImage %s", hash_files=False,), + rigidMaxIterations=dict(argstr="--rigidMaxIterations %d",), + rigidSamplingRatio=dict(argstr="--rigidSamplingRatio %f",), + sampleFromOverlap=dict(argstr="--sampleFromOverlap ",), + saveTransform=dict(argstr="--saveTransform %s", hash_files=False,), + verbosityLevel=dict(argstr="--verbosityLevel %s",), ) inputs = ExpertAutomatedRegistration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ExpertAutomatedRegistration_outputs(): output_map = dict( - resampledImage=dict(extensions=None, ), - saveTransform=dict(extensions=None, ), + resampledImage=dict(extensions=None,), saveTransform=dict(extensions=None,), ) outputs = ExpertAutomatedRegistration.output_spec() diff --git a/nipype/interfaces/slicer/legacy/tests/test_auto_LinearRegistration.py b/nipype/interfaces/slicer/legacy/tests/test_auto_LinearRegistration.py index 454f645088..b19d3be344 100644 --- a/nipype/interfaces/slicer/legacy/tests/test_auto_LinearRegistration.py +++ b/nipype/interfaces/slicer/legacy/tests/test_auto_LinearRegistration.py @@ -4,56 +4,34 @@ def test_LinearRegistration_inputs(): input_map = dict( - FixedImageFileName=dict( - argstr='%s', - extensions=None, - position=-2, - ), - MovingImageFileName=dict( - argstr='%s', - extensions=None, - position=-1, - ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixedsmoothingfactor=dict(argstr='--fixedsmoothingfactor %d', ), - histogrambins=dict(argstr='--histogrambins %d', ), - initialtransform=dict( - argstr='--initialtransform %s', - extensions=None, - ), - iterations=dict( - argstr='--iterations %s', - sep=',', - ), - learningrate=dict( - argstr='--learningrate %s', - sep=',', - ), - movingsmoothingfactor=dict(argstr='--movingsmoothingfactor %d', ), - outputtransform=dict( - argstr='--outputtransform %s', - hash_files=False, - ), + FixedImageFileName=dict(argstr="%s", extensions=None, position=-2,), + MovingImageFileName=dict(argstr="%s", extensions=None, position=-1,), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + fixedsmoothingfactor=dict(argstr="--fixedsmoothingfactor %d",), + histogrambins=dict(argstr="--histogrambins %d",), + initialtransform=dict(argstr="--initialtransform %s", extensions=None,), + iterations=dict(argstr="--iterations %s", sep=",",), + learningrate=dict(argstr="--learningrate %s", sep=",",), + movingsmoothingfactor=dict(argstr="--movingsmoothingfactor %d",), + outputtransform=dict(argstr="--outputtransform %s", hash_files=False,), resampledmovingfilename=dict( - argstr='--resampledmovingfilename %s', - hash_files=False, + argstr="--resampledmovingfilename %s", hash_files=False, ), - spatialsamples=dict(argstr='--spatialsamples %d', ), - translationscale=dict(argstr='--translationscale %f', ), + spatialsamples=dict(argstr="--spatialsamples %d",), + translationscale=dict(argstr="--translationscale %f",), ) inputs = LinearRegistration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LinearRegistration_outputs(): output_map = dict( - outputtransform=dict(extensions=None, ), - resampledmovingfilename=dict(extensions=None, ), + outputtransform=dict(extensions=None,), + resampledmovingfilename=dict(extensions=None,), ) outputs = LinearRegistration.output_spec() diff --git a/nipype/interfaces/slicer/legacy/tests/test_auto_MultiResolutionAffineRegistration.py b/nipype/interfaces/slicer/legacy/tests/test_auto_MultiResolutionAffineRegistration.py index 605db755f6..ad7f89e35d 100644 --- a/nipype/interfaces/slicer/legacy/tests/test_auto_MultiResolutionAffineRegistration.py +++ b/nipype/interfaces/slicer/legacy/tests/test_auto_MultiResolutionAffineRegistration.py @@ -4,49 +4,30 @@ def test_MultiResolutionAffineRegistration_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixedImage=dict( - argstr='%s', - extensions=None, - position=-2, - ), - fixedImageMask=dict( - argstr='--fixedImageMask %s', - extensions=None, - ), - fixedImageROI=dict(argstr='--fixedImageROI %s', ), - metricTolerance=dict(argstr='--metricTolerance %f', ), - movingImage=dict( - argstr='%s', - extensions=None, - position=-1, - ), - numIterations=dict(argstr='--numIterations %d', ), - numLineIterations=dict(argstr='--numLineIterations %d', ), - resampledImage=dict( - argstr='--resampledImage %s', - hash_files=False, - ), - saveTransform=dict( - argstr='--saveTransform %s', - hash_files=False, - ), - stepSize=dict(argstr='--stepSize %f', ), - stepTolerance=dict(argstr='--stepTolerance %f', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + fixedImage=dict(argstr="%s", extensions=None, position=-2,), + fixedImageMask=dict(argstr="--fixedImageMask %s", extensions=None,), + fixedImageROI=dict(argstr="--fixedImageROI %s",), + metricTolerance=dict(argstr="--metricTolerance %f",), + movingImage=dict(argstr="%s", extensions=None, position=-1,), + numIterations=dict(argstr="--numIterations %d",), + numLineIterations=dict(argstr="--numLineIterations %d",), + resampledImage=dict(argstr="--resampledImage %s", hash_files=False,), + saveTransform=dict(argstr="--saveTransform %s", hash_files=False,), + stepSize=dict(argstr="--stepSize %f",), + stepTolerance=dict(argstr="--stepTolerance %f",), ) inputs = MultiResolutionAffineRegistration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MultiResolutionAffineRegistration_outputs(): output_map = dict( - resampledImage=dict(extensions=None, ), - saveTransform=dict(extensions=None, ), + resampledImage=dict(extensions=None,), saveTransform=dict(extensions=None,), ) outputs = MultiResolutionAffineRegistration.output_spec() diff --git a/nipype/interfaces/slicer/legacy/tests/test_auto_OtsuThresholdImageFilter.py b/nipype/interfaces/slicer/legacy/tests/test_auto_OtsuThresholdImageFilter.py index 50782aec03..853fbb5caa 100644 --- a/nipype/interfaces/slicer/legacy/tests/test_auto_OtsuThresholdImageFilter.py +++ b/nipype/interfaces/slicer/legacy/tests/test_auto_OtsuThresholdImageFilter.py @@ -4,35 +4,23 @@ def test_OtsuThresholdImageFilter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - insideValue=dict(argstr='--insideValue %d', ), - numberOfBins=dict(argstr='--numberOfBins %d', ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - outsideValue=dict(argstr='--outsideValue %d', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="%s", extensions=None, position=-2,), + insideValue=dict(argstr="--insideValue %d",), + numberOfBins=dict(argstr="--numberOfBins %d",), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), + outsideValue=dict(argstr="--outsideValue %d",), ) inputs = OtsuThresholdImageFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_OtsuThresholdImageFilter_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = OtsuThresholdImageFilter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/legacy/tests/test_auto_OtsuThresholdSegmentation.py b/nipype/interfaces/slicer/legacy/tests/test_auto_OtsuThresholdSegmentation.py index 5340c37639..561bb246f2 100644 --- a/nipype/interfaces/slicer/legacy/tests/test_auto_OtsuThresholdSegmentation.py +++ b/nipype/interfaces/slicer/legacy/tests/test_auto_OtsuThresholdSegmentation.py @@ -4,36 +4,24 @@ def test_OtsuThresholdSegmentation_inputs(): input_map = dict( - args=dict(argstr='%s', ), - brightObjects=dict(argstr='--brightObjects ', ), - environ=dict( - nohash=True, - usedefault=True, - ), - faceConnected=dict(argstr='--faceConnected ', ), - inputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - minimumObjectSize=dict(argstr='--minimumObjectSize %d', ), - numberOfBins=dict(argstr='--numberOfBins %d', ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), + args=dict(argstr="%s",), + brightObjects=dict(argstr="--brightObjects ",), + environ=dict(nohash=True, usedefault=True,), + faceConnected=dict(argstr="--faceConnected ",), + inputVolume=dict(argstr="%s", extensions=None, position=-2,), + minimumObjectSize=dict(argstr="--minimumObjectSize %d",), + numberOfBins=dict(argstr="--numberOfBins %d",), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), ) inputs = OtsuThresholdSegmentation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_OtsuThresholdSegmentation_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = OtsuThresholdSegmentation.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/legacy/tests/test_auto_ResampleScalarVolume.py b/nipype/interfaces/slicer/legacy/tests/test_auto_ResampleScalarVolume.py index 49c2ac2ffb..a95967feca 100644 --- a/nipype/interfaces/slicer/legacy/tests/test_auto_ResampleScalarVolume.py +++ b/nipype/interfaces/slicer/legacy/tests/test_auto_ResampleScalarVolume.py @@ -4,37 +4,22 @@ def test_ResampleScalarVolume_inputs(): input_map = dict( - InputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - OutputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - interpolation=dict(argstr='--interpolation %s', ), - spacing=dict( - argstr='--spacing %s', - sep=',', - ), + InputVolume=dict(argstr="%s", extensions=None, position=-2,), + OutputVolume=dict(argstr="%s", hash_files=False, position=-1,), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + interpolation=dict(argstr="--interpolation %s",), + spacing=dict(argstr="--spacing %s", sep=",",), ) inputs = ResampleScalarVolume.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ResampleScalarVolume_outputs(): - output_map = dict(OutputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(OutputVolume=dict(extensions=None, position=-1,),) outputs = ResampleScalarVolume.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/legacy/tests/test_auto_RigidRegistration.py b/nipype/interfaces/slicer/legacy/tests/test_auto_RigidRegistration.py index 92a8af7dfb..ea4b5eda53 100644 --- a/nipype/interfaces/slicer/legacy/tests/test_auto_RigidRegistration.py +++ b/nipype/interfaces/slicer/legacy/tests/test_auto_RigidRegistration.py @@ -4,57 +4,35 @@ def test_RigidRegistration_inputs(): input_map = dict( - FixedImageFileName=dict( - argstr='%s', - extensions=None, - position=-2, - ), - MovingImageFileName=dict( - argstr='%s', - extensions=None, - position=-1, - ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixedsmoothingfactor=dict(argstr='--fixedsmoothingfactor %d', ), - histogrambins=dict(argstr='--histogrambins %d', ), - initialtransform=dict( - argstr='--initialtransform %s', - extensions=None, - ), - iterations=dict( - argstr='--iterations %s', - sep=',', - ), - learningrate=dict( - argstr='--learningrate %s', - sep=',', - ), - movingsmoothingfactor=dict(argstr='--movingsmoothingfactor %d', ), - outputtransform=dict( - argstr='--outputtransform %s', - hash_files=False, - ), + FixedImageFileName=dict(argstr="%s", extensions=None, position=-2,), + MovingImageFileName=dict(argstr="%s", extensions=None, position=-1,), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + fixedsmoothingfactor=dict(argstr="--fixedsmoothingfactor %d",), + histogrambins=dict(argstr="--histogrambins %d",), + initialtransform=dict(argstr="--initialtransform %s", extensions=None,), + iterations=dict(argstr="--iterations %s", sep=",",), + learningrate=dict(argstr="--learningrate %s", sep=",",), + movingsmoothingfactor=dict(argstr="--movingsmoothingfactor %d",), + outputtransform=dict(argstr="--outputtransform %s", hash_files=False,), resampledmovingfilename=dict( - argstr='--resampledmovingfilename %s', - hash_files=False, + argstr="--resampledmovingfilename %s", hash_files=False, ), - spatialsamples=dict(argstr='--spatialsamples %d', ), - testingmode=dict(argstr='--testingmode ', ), - translationscale=dict(argstr='--translationscale %f', ), + spatialsamples=dict(argstr="--spatialsamples %d",), + testingmode=dict(argstr="--testingmode ",), + translationscale=dict(argstr="--translationscale %f",), ) inputs = RigidRegistration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RigidRegistration_outputs(): output_map = dict( - outputtransform=dict(extensions=None, ), - resampledmovingfilename=dict(extensions=None, ), + outputtransform=dict(extensions=None,), + resampledmovingfilename=dict(extensions=None,), ) outputs = RigidRegistration.output_spec() diff --git a/nipype/interfaces/slicer/quantification/changequantification.py b/nipype/interfaces/slicer/quantification/changequantification.py index 5abf1b1287..c0e36b9bf9 100644 --- a/nipype/interfaces/slicer/quantification/changequantification.py +++ b/nipype/interfaces/slicer/quantification/changequantification.py @@ -3,55 +3,68 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class IntensityDifferenceMetricInputSpec(CommandLineInputSpec): sensitivityThreshold = traits.Float( - desc= - "This parameter should be between 0 and 1, and defines how sensitive the metric should be to the intensity changes.", - argstr="--sensitivityThreshold %f") + desc="This parameter should be between 0 and 1, and defines how sensitive the metric should be to the intensity changes.", + argstr="--sensitivityThreshold %f", + ) changingBandSize = traits.Int( - desc= - "How far (in mm) from the boundary of the segmentation should the intensity changes be considered.", - argstr="--changingBandSize %d") + desc="How far (in mm) from the boundary of the segmentation should the intensity changes be considered.", + argstr="--changingBandSize %d", + ) baselineVolume = File( - position=-4, - desc="Baseline volume to be compared to", - exists=True, - argstr="%s") + position=-4, desc="Baseline volume to be compared to", exists=True, argstr="%s" + ) baselineSegmentationVolume = File( position=-3, - desc= - "Label volume that contains segmentation of the structure of interest in the baseline volume.", + desc="Label volume that contains segmentation of the structure of interest in the baseline volume.", exists=True, - argstr="%s") + argstr="%s", + ) followupVolume = File( position=-2, desc="Followup volume to be compare to the baseline", exists=True, - argstr="%s") + argstr="%s", + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output volume to keep the results of change quantification.", - argstr="%s") + argstr="%s", + ) reportFileName = traits.Either( traits.Bool, File(), hash_files=False, desc="Report file name", - argstr="--reportFileName %s") + argstr="--reportFileName %s", + ) class IntensityDifferenceMetricOutputSpec(TraitedSpec): outputVolume = File( position=-1, desc="Output volume to keep the results of change quantification.", - exists=True) + exists=True, + ) reportFileName = File(desc="Report file name", exists=True) @@ -81,6 +94,6 @@ class IntensityDifferenceMetric(SEMLikeCommandLine): output_spec = IntensityDifferenceMetricOutputSpec _cmd = "IntensityDifferenceMetric " _outputs_filenames = { - 'outputVolume': 'outputVolume.nii', - 'reportFileName': 'reportFileName' + "outputVolume": "outputVolume.nii", + "reportFileName": "reportFileName", } diff --git a/nipype/interfaces/slicer/quantification/petstandarduptakevaluecomputation.py b/nipype/interfaces/slicer/quantification/petstandarduptakevaluecomputation.py index 0edfca3fbb..9cf01c5359 100644 --- a/nipype/interfaces/slicer/quantification/petstandarduptakevaluecomputation.py +++ b/nipype/interfaces/slicer/quantification/petstandarduptakevaluecomputation.py @@ -3,53 +3,67 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class PETStandardUptakeValueComputationInputSpec(CommandLineInputSpec): petDICOMPath = Directory( - desc= - "Input path to a directory containing a PET volume containing DICOM header information for SUV computation", + desc="Input path to a directory containing a PET volume containing DICOM header information for SUV computation", exists=True, - argstr="--petDICOMPath %s") + argstr="--petDICOMPath %s", + ) petVolume = File( - desc= - "Input PET volume for SUVbw computation (must be the same volume as pointed to by the DICOM path!).", + desc="Input PET volume for SUVbw computation (must be the same volume as pointed to by the DICOM path!).", exists=True, - argstr="--petVolume %s") + argstr="--petVolume %s", + ) labelMap = File( desc="Input label volume containing the volumes of interest", exists=True, - argstr="--labelMap %s") + argstr="--labelMap %s", + ) color = File( desc="Color table to to map labels to colors and names", exists=True, - argstr="--color %s") + argstr="--color %s", + ) csvFile = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "A file holding the output SUV values in comma separated lines, one per label. Optional.", - argstr="--csvFile %s") + desc="A file holding the output SUV values in comma separated lines, one per label. Optional.", + argstr="--csvFile %s", + ) OutputLabel = traits.Str( desc="List of labels for which SUV values were computed", - argstr="--OutputLabel %s") + argstr="--OutputLabel %s", + ) OutputLabelValue = traits.Str( desc="List of label values for which SUV values were computed", - argstr="--OutputLabelValue %s") + argstr="--OutputLabelValue %s", + ) SUVMax = traits.Str(desc="SUV max for each label", argstr="--SUVMax %s") SUVMean = traits.Str(desc="SUV mean for each label", argstr="--SUVMean %s") - SUVMin = traits.Str( - desc="SUV minimum for each label", argstr="--SUVMin %s") + SUVMin = traits.Str(desc="SUV minimum for each label", argstr="--SUVMin %s") class PETStandardUptakeValueComputationOutputSpec(TraitedSpec): csvFile = File( - desc= - "A file holding the output SUV values in comma separated lines, one per label. Optional.", - exists=True) + desc="A file holding the output SUV values in comma separated lines, one per label. Optional.", + exists=True, + ) class PETStandardUptakeValueComputation(SEMLikeCommandLine): @@ -72,4 +86,4 @@ class PETStandardUptakeValueComputation(SEMLikeCommandLine): input_spec = PETStandardUptakeValueComputationInputSpec output_spec = PETStandardUptakeValueComputationOutputSpec _cmd = "PETStandardUptakeValueComputation " - _outputs_filenames = {'csvFile': 'csvFile.csv'} + _outputs_filenames = {"csvFile": "csvFile.csv"} diff --git a/nipype/interfaces/slicer/quantification/tests/test_auto_IntensityDifferenceMetric.py b/nipype/interfaces/slicer/quantification/tests/test_auto_IntensityDifferenceMetric.py index 6462d2ff48..512991571e 100644 --- a/nipype/interfaces/slicer/quantification/tests/test_auto_IntensityDifferenceMetric.py +++ b/nipype/interfaces/slicer/quantification/tests/test_auto_IntensityDifferenceMetric.py @@ -4,50 +4,27 @@ def test_IntensityDifferenceMetric_inputs(): input_map = dict( - args=dict(argstr='%s', ), - baselineSegmentationVolume=dict( - argstr='%s', - extensions=None, - position=-3, - ), - baselineVolume=dict( - argstr='%s', - extensions=None, - position=-4, - ), - changingBandSize=dict(argstr='--changingBandSize %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - followupVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - reportFileName=dict( - argstr='--reportFileName %s', - hash_files=False, - ), - sensitivityThreshold=dict(argstr='--sensitivityThreshold %f', ), + args=dict(argstr="%s",), + baselineSegmentationVolume=dict(argstr="%s", extensions=None, position=-3,), + baselineVolume=dict(argstr="%s", extensions=None, position=-4,), + changingBandSize=dict(argstr="--changingBandSize %d",), + environ=dict(nohash=True, usedefault=True,), + followupVolume=dict(argstr="%s", extensions=None, position=-2,), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), + reportFileName=dict(argstr="--reportFileName %s", hash_files=False,), + sensitivityThreshold=dict(argstr="--sensitivityThreshold %f",), ) inputs = IntensityDifferenceMetric.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_IntensityDifferenceMetric_outputs(): output_map = dict( - outputVolume=dict( - extensions=None, - position=-1, - ), - reportFileName=dict(extensions=None, ), + outputVolume=dict(extensions=None, position=-1,), + reportFileName=dict(extensions=None,), ) outputs = IntensityDifferenceMetric.output_spec() diff --git a/nipype/interfaces/slicer/quantification/tests/test_auto_PETStandardUptakeValueComputation.py b/nipype/interfaces/slicer/quantification/tests/test_auto_PETStandardUptakeValueComputation.py index c30fc0c0cf..61ee94ec6e 100644 --- a/nipype/interfaces/slicer/quantification/tests/test_auto_PETStandardUptakeValueComputation.py +++ b/nipype/interfaces/slicer/quantification/tests/test_auto_PETStandardUptakeValueComputation.py @@ -4,41 +4,28 @@ def test_PETStandardUptakeValueComputation_inputs(): input_map = dict( - OutputLabel=dict(argstr='--OutputLabel %s', ), - OutputLabelValue=dict(argstr='--OutputLabelValue %s', ), - SUVMax=dict(argstr='--SUVMax %s', ), - SUVMean=dict(argstr='--SUVMean %s', ), - SUVMin=dict(argstr='--SUVMin %s', ), - args=dict(argstr='%s', ), - color=dict( - argstr='--color %s', - extensions=None, - ), - csvFile=dict( - argstr='--csvFile %s', - hash_files=False, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - labelMap=dict( - argstr='--labelMap %s', - extensions=None, - ), - petDICOMPath=dict(argstr='--petDICOMPath %s', ), - petVolume=dict( - argstr='--petVolume %s', - extensions=None, - ), + OutputLabel=dict(argstr="--OutputLabel %s",), + OutputLabelValue=dict(argstr="--OutputLabelValue %s",), + SUVMax=dict(argstr="--SUVMax %s",), + SUVMean=dict(argstr="--SUVMean %s",), + SUVMin=dict(argstr="--SUVMin %s",), + args=dict(argstr="%s",), + color=dict(argstr="--color %s", extensions=None,), + csvFile=dict(argstr="--csvFile %s", hash_files=False,), + environ=dict(nohash=True, usedefault=True,), + labelMap=dict(argstr="--labelMap %s", extensions=None,), + petDICOMPath=dict(argstr="--petDICOMPath %s",), + petVolume=dict(argstr="--petVolume %s", extensions=None,), ) inputs = PETStandardUptakeValueComputation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PETStandardUptakeValueComputation_outputs(): - output_map = dict(csvFile=dict(extensions=None, ), ) + output_map = dict(csvFile=dict(extensions=None,),) outputs = PETStandardUptakeValueComputation.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/registration/__init__.py b/nipype/interfaces/slicer/registration/__init__.py index 2d03aabe03..faa3c92b2f 100644 --- a/nipype/interfaces/slicer/registration/__init__.py +++ b/nipype/interfaces/slicer/registration/__init__.py @@ -1,5 +1,9 @@ # -*- coding: utf-8 -*- -from .specialized import (ACPCTransform, FiducialRegistration, - VBRAINSDemonWarp, BRAINSDemonWarp) +from .specialized import ( + ACPCTransform, + FiducialRegistration, + VBRAINSDemonWarp, + BRAINSDemonWarp, +) from .brainsresample import BRAINSResample from .brainsfit import BRAINSFit diff --git a/nipype/interfaces/slicer/registration/brainsfit.py b/nipype/interfaces/slicer/registration/brainsfit.py index adbd733976..2ca7f09d3c 100644 --- a/nipype/interfaces/slicer/registration/brainsfit.py +++ b/nipype/interfaces/slicer/registration/brainsfit.py @@ -3,121 +3,134 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class BRAINSFitInputSpec(CommandLineInputSpec): fixedVolume = File( - desc= - "The fixed image for registration by mutual information optimization.", + desc="The fixed image for registration by mutual information optimization.", exists=True, - argstr="--fixedVolume %s") + argstr="--fixedVolume %s", + ) movingVolume = File( - desc= - "The moving image for registration by mutual information optimization.", + desc="The moving image for registration by mutual information optimization.", exists=True, - argstr="--movingVolume %s") + argstr="--movingVolume %s", + ) bsplineTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "(optional) Filename to which save the estimated transform. NOTE: You must set at least one output object (either a deformed image or a transform. NOTE: USE THIS ONLY IF THE FINAL TRANSFORM IS BSpline", - argstr="--bsplineTransform %s") + desc="(optional) Filename to which save the estimated transform. NOTE: You must set at least one output object (either a deformed image or a transform. NOTE: USE THIS ONLY IF THE FINAL TRANSFORM IS BSpline", + argstr="--bsplineTransform %s", + ) linearTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "(optional) Filename to which save the estimated transform. NOTE: You must set at least one output object (either a deformed image or a transform. NOTE: USE THIS ONLY IF THE FINAL TRANSFORM IS ---NOT--- BSpline", - argstr="--linearTransform %s") + desc="(optional) Filename to which save the estimated transform. NOTE: You must set at least one output object (either a deformed image or a transform. NOTE: USE THIS ONLY IF THE FINAL TRANSFORM IS ---NOT--- BSpline", + argstr="--linearTransform %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "(optional) Output image for registration. NOTE: You must select either the outputTransform or the outputVolume option.", - argstr="--outputVolume %s") + desc="(optional) Output image for registration. NOTE: You must select either the outputTransform or the outputVolume option.", + argstr="--outputVolume %s", + ) initialTransform = File( - desc= - "Filename of transform used to initialize the registration. This CAN NOT be used with either CenterOfHeadLAlign, MomentsAlign, GeometryAlign, or initialTransform file.", + desc="Filename of transform used to initialize the registration. This CAN NOT be used with either CenterOfHeadLAlign, MomentsAlign, GeometryAlign, or initialTransform file.", exists=True, - argstr="--initialTransform %s") + argstr="--initialTransform %s", + ) initializeTransformMode = traits.Enum( "Off", "useMomentsAlign", "useCenterOfHeadAlign", "useGeometryAlign", "useCenterOfROIAlign", - desc= - "Determine how to initialize the transform center. GeometryAlign on assumes that the center of the voxel lattice of the images represent similar structures. MomentsAlign assumes that the center of mass of the images represent similar structures. useCenterOfHeadAlign attempts to use the top of head and shape of neck to drive a center of mass estimate. Off assumes that the physical space of the images are close, and that centering in terms of the image Origins is a good starting point. This flag is mutually exclusive with the initialTransform flag.", - argstr="--initializeTransformMode %s") + desc="Determine how to initialize the transform center. GeometryAlign on assumes that the center of the voxel lattice of the images represent similar structures. MomentsAlign assumes that the center of mass of the images represent similar structures. useCenterOfHeadAlign attempts to use the top of head and shape of neck to drive a center of mass estimate. Off assumes that the physical space of the images are close, and that centering in terms of the image Origins is a good starting point. This flag is mutually exclusive with the initialTransform flag.", + argstr="--initializeTransformMode %s", + ) useRigid = traits.Bool( - desc= - "Perform a rigid registration as part of the sequential registration steps. This family of options superceeds the use of transformType if any of them are set.", - argstr="--useRigid ") + desc="Perform a rigid registration as part of the sequential registration steps. This family of options superceeds the use of transformType if any of them are set.", + argstr="--useRigid ", + ) useScaleVersor3D = traits.Bool( - desc= - "Perform a ScaleVersor3D registration as part of the sequential registration steps. This family of options superceeds the use of transformType if any of them are set.", - argstr="--useScaleVersor3D ") + desc="Perform a ScaleVersor3D registration as part of the sequential registration steps. This family of options superceeds the use of transformType if any of them are set.", + argstr="--useScaleVersor3D ", + ) useScaleSkewVersor3D = traits.Bool( - desc= - "Perform a ScaleSkewVersor3D registration as part of the sequential registration steps. This family of options superceeds the use of transformType if any of them are set.", - argstr="--useScaleSkewVersor3D ") + desc="Perform a ScaleSkewVersor3D registration as part of the sequential registration steps. This family of options superceeds the use of transformType if any of them are set.", + argstr="--useScaleSkewVersor3D ", + ) useAffine = traits.Bool( - desc= - "Perform an Affine registration as part of the sequential registration steps. This family of options superceeds the use of transformType if any of them are set.", - argstr="--useAffine ") + desc="Perform an Affine registration as part of the sequential registration steps. This family of options superceeds the use of transformType if any of them are set.", + argstr="--useAffine ", + ) useBSpline = traits.Bool( - desc= - "Perform a BSpline registration as part of the sequential registration steps. This family of options superceeds the use of transformType if any of them are set.", - argstr="--useBSpline ") + desc="Perform a BSpline registration as part of the sequential registration steps. This family of options superceeds the use of transformType if any of them are set.", + argstr="--useBSpline ", + ) numberOfSamples = traits.Int( - desc= - "The number of voxels sampled for mutual information computation. Increase this for a slower, more careful fit. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation.", - argstr="--numberOfSamples %d") + desc="The number of voxels sampled for mutual information computation. Increase this for a slower, more careful fit. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation.", + argstr="--numberOfSamples %d", + ) splineGridSize = InputMultiPath( traits.Int, - desc= - "The number of subdivisions of the BSpline Grid to be centered on the image space. Each dimension must have at least 3 subdivisions for the BSpline to be correctly computed. ", + desc="The number of subdivisions of the BSpline Grid to be centered on the image space. Each dimension must have at least 3 subdivisions for the BSpline to be correctly computed. ", sep=",", - argstr="--splineGridSize %s") + argstr="--splineGridSize %s", + ) numberOfIterations = InputMultiPath( traits.Int, - desc= - "The maximum number of iterations to try before failing to converge. Use an explicit limit like 500 or 1000 to manage risk of divergence", + desc="The maximum number of iterations to try before failing to converge. Use an explicit limit like 500 or 1000 to manage risk of divergence", sep=",", - argstr="--numberOfIterations %s") + argstr="--numberOfIterations %s", + ) maskProcessingMode = traits.Enum( "NOMASK", "ROIAUTO", "ROI", - desc= - "What mode to use for using the masks. If ROIAUTO is choosen, then the mask is implicitly defined using a otsu forground and hole filling algorithm. The Region Of Interest mode (choose ROI) uses the masks to define what parts of the image should be used for computing the transform.", - argstr="--maskProcessingMode %s") + desc="What mode to use for using the masks. If ROIAUTO is choosen, then the mask is implicitly defined using a otsu forground and hole filling algorithm. The Region Of Interest mode (choose ROI) uses the masks to define what parts of the image should be used for computing the transform.", + argstr="--maskProcessingMode %s", + ) fixedBinaryVolume = File( desc="Fixed Image binary mask volume, ONLY FOR MANUAL ROI mode.", exists=True, - argstr="--fixedBinaryVolume %s") + argstr="--fixedBinaryVolume %s", + ) movingBinaryVolume = File( desc="Moving Image binary mask volume, ONLY FOR MANUAL ROI mode.", exists=True, - argstr="--movingBinaryVolume %s") + argstr="--movingBinaryVolume %s", + ) outputFixedVolumeROI = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "The ROI automatically found in fixed image, ONLY FOR ROIAUTO mode.", - argstr="--outputFixedVolumeROI %s") + desc="The ROI automatically found in fixed image, ONLY FOR ROIAUTO mode.", + argstr="--outputFixedVolumeROI %s", + ) outputMovingVolumeROI = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "The ROI automatically found in moving image, ONLY FOR ROIAUTO mode.", - argstr="--outputMovingVolumeROI %s") + desc="The ROI automatically found in moving image, ONLY FOR ROIAUTO mode.", + argstr="--outputMovingVolumeROI %s", + ) outputVolumePixelType = traits.Enum( "float", "short", @@ -125,20 +138,21 @@ class BRAINSFitInputSpec(CommandLineInputSpec): "int", "uint", "uchar", - desc= - "The output image Pixel Type is the scalar datatype for representation of the Output Volume.", - argstr="--outputVolumePixelType %s") + desc="The output image Pixel Type is the scalar datatype for representation of the Output Volume.", + argstr="--outputVolumePixelType %s", + ) backgroundFillValue = traits.Float( desc="Background fill value for output image.", - argstr="--backgroundFillValue %f") + argstr="--backgroundFillValue %f", + ) maskInferiorCutOffFromCenter = traits.Float( - desc= - "For use with --useCenterOfHeadAlign (and --maskProcessingMode ROIAUTO): the cut-off below the image centers, in millimeters, ", - argstr="--maskInferiorCutOffFromCenter %f") + desc="For use with --useCenterOfHeadAlign (and --maskProcessingMode ROIAUTO): the cut-off below the image centers, in millimeters, ", + argstr="--maskInferiorCutOffFromCenter %f", + ) scaleOutputValues = traits.Bool( - desc= - "If true, and the voxel values do not fit within the minimum and maximum values of the desired outputVolumePixelType, then linearly scale the min/max output image voxel values to fit within the min/max range of the outputVolumePixelType.", - argstr="--scaleOutputValues ") + desc="If true, and the voxel values do not fit within the minimum and maximum values of the desired outputVolumePixelType, then linearly scale the min/max output image voxel values to fit within the min/max range of the outputVolumePixelType.", + argstr="--scaleOutputValues ", + ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", @@ -150,201 +164,202 @@ class BRAINSFitInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, NearestNeighbor, BSpline, WindowedSinc, or ResampleInPlace. The ResampleInPlace option will create an image with the same discrete voxel values and will adjust the origin and direction of the physical space interpretation.", - argstr="--interpolationMode %s") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, NearestNeighbor, BSpline, WindowedSinc, or ResampleInPlace. The ResampleInPlace option will create an image with the same discrete voxel values and will adjust the origin and direction of the physical space interpretation.", + argstr="--interpolationMode %s", + ) minimumStepLength = InputMultiPath( traits.Float, - desc= - "Each step in the optimization takes steps at least this big. When none are possible, registration is complete.", + desc="Each step in the optimization takes steps at least this big. When none are possible, registration is complete.", sep=",", - argstr="--minimumStepLength %s") + argstr="--minimumStepLength %s", + ) translationScale = traits.Float( - desc= - "How much to scale up changes in position compared to unit rotational changes in radians -- decrease this to put more rotation in the search pattern.", - argstr="--translationScale %f") + desc="How much to scale up changes in position compared to unit rotational changes in radians -- decrease this to put more rotation in the search pattern.", + argstr="--translationScale %f", + ) reproportionScale = traits.Float( - desc= - "ScaleVersor3D 'Scale' compensation factor. Increase this to put more rescaling in a ScaleVersor3D or ScaleSkewVersor3D search pattern. 1.0 works well with a translationScale of 1000.0", - argstr="--reproportionScale %f") + desc="ScaleVersor3D 'Scale' compensation factor. Increase this to put more rescaling in a ScaleVersor3D or ScaleSkewVersor3D search pattern. 1.0 works well with a translationScale of 1000.0", + argstr="--reproportionScale %f", + ) skewScale = traits.Float( - desc= - "ScaleSkewVersor3D Skew compensation factor. Increase this to put more skew in a ScaleSkewVersor3D search pattern. 1.0 works well with a translationScale of 1000.0", - argstr="--skewScale %f") + desc="ScaleSkewVersor3D Skew compensation factor. Increase this to put more skew in a ScaleSkewVersor3D search pattern. 1.0 works well with a translationScale of 1000.0", + argstr="--skewScale %f", + ) maxBSplineDisplacement = traits.Float( - desc= - " Sets the maximum allowed displacements in image physical coordinates for BSpline control grid along each axis. A value of 0.0 indicates that the problem should be unbounded. NOTE: This only constrains the BSpline portion, and does not limit the displacement from the associated bulk transform. This can lead to a substantial reduction in computation time in the BSpline optimizer., ", - argstr="--maxBSplineDisplacement %f") + desc=" Sets the maximum allowed displacements in image physical coordinates for BSpline control grid along each axis. A value of 0.0 indicates that the problem should be unbounded. NOTE: This only constrains the BSpline portion, and does not limit the displacement from the associated bulk transform. This can lead to a substantial reduction in computation time in the BSpline optimizer., ", + argstr="--maxBSplineDisplacement %f", + ) histogramMatch = traits.Bool( - desc= - "Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile. Do NOT use if registering images from different modailties.", - argstr="--histogramMatch ") + desc="Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile. Do NOT use if registering images from different modailties.", + argstr="--histogramMatch ", + ) numberOfHistogramBins = traits.Int( - desc="The number of histogram levels", - argstr="--numberOfHistogramBins %d") + desc="The number of histogram levels", argstr="--numberOfHistogramBins %d" + ) numberOfMatchPoints = traits.Int( - desc="the number of match points", argstr="--numberOfMatchPoints %d") + desc="the number of match points", argstr="--numberOfMatchPoints %d" + ) strippedOutputTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "File name for the rigid component of the estimated affine transform. Can be used to rigidly register the moving image to the fixed image. NOTE: This value is overwritten if either bsplineTransform or linearTransform is set.", - argstr="--strippedOutputTransform %s") + desc="File name for the rigid component of the estimated affine transform. Can be used to rigidly register the moving image to the fixed image. NOTE: This value is overwritten if either bsplineTransform or linearTransform is set.", + argstr="--strippedOutputTransform %s", + ) transformType = InputMultiPath( traits.Str, - desc= - "Specifies a list of registration types to be used. The valid types are, Rigid, ScaleVersor3D, ScaleSkewVersor3D, Affine, and BSpline. Specifiying more than one in a comma separated list will initialize the next stage with the previous results. If registrationClass flag is used, it overrides this parameter setting.", + desc="Specifies a list of registration types to be used. The valid types are, Rigid, ScaleVersor3D, ScaleSkewVersor3D, Affine, and BSpline. Specifiying more than one in a comma separated list will initialize the next stage with the previous results. If registrationClass flag is used, it overrides this parameter setting.", sep=",", - argstr="--transformType %s") + argstr="--transformType %s", + ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "(optional) Filename to which save the (optional) estimated transform. NOTE: You must select either the outputTransform or the outputVolume option.", - argstr="--outputTransform %s") + desc="(optional) Filename to which save the (optional) estimated transform. NOTE: You must select either the outputTransform or the outputVolume option.", + argstr="--outputTransform %s", + ) fixedVolumeTimeIndex = traits.Int( - desc= - "The index in the time series for the 3D fixed image to fit, if 4-dimensional.", - argstr="--fixedVolumeTimeIndex %d") + desc="The index in the time series for the 3D fixed image to fit, if 4-dimensional.", + argstr="--fixedVolumeTimeIndex %d", + ) movingVolumeTimeIndex = traits.Int( - desc= - "The index in the time series for the 3D moving image to fit, if 4-dimensional.", - argstr="--movingVolumeTimeIndex %d") + desc="The index in the time series for the 3D moving image to fit, if 4-dimensional.", + argstr="--movingVolumeTimeIndex %d", + ) medianFilterSize = InputMultiPath( traits.Int, - desc= - "The radius for the optional MedianImageFilter preprocessing in all 3 directions.", + desc="The radius for the optional MedianImageFilter preprocessing in all 3 directions.", sep=",", - argstr="--medianFilterSize %s") + argstr="--medianFilterSize %s", + ) removeIntensityOutliers = traits.Float( - desc= - "The half percentage to decide outliers of image intensities. The default value is zero, which means no outlier removal. If the value of 0.005 is given, the moduel will throw away 0.005 % of both tails, so 0.01% of intensities in total would be ignored in its statistic calculation. ", - argstr="--removeIntensityOutliers %f") + desc="The half percentage to decide outliers of image intensities. The default value is zero, which means no outlier removal. If the value of 0.005 is given, the moduel will throw away 0.005 % of both tails, so 0.01% of intensities in total would be ignored in its statistic calculation. ", + argstr="--removeIntensityOutliers %f", + ) useCachingOfBSplineWeightsMode = traits.Enum( "ON", "OFF", - desc= - "This is a 5x speed advantage at the expense of requiring much more memory. Only relevant when transformType is BSpline.", - argstr="--useCachingOfBSplineWeightsMode %s") + desc="This is a 5x speed advantage at the expense of requiring much more memory. Only relevant when transformType is BSpline.", + argstr="--useCachingOfBSplineWeightsMode %s", + ) useExplicitPDFDerivativesMode = traits.Enum( "AUTO", "ON", "OFF", - desc= - "Using mode AUTO means OFF for BSplineDeformableTransforms and ON for the linear transforms. The ON alternative uses more memory to sometimes do a better job.", - argstr="--useExplicitPDFDerivativesMode %s") + desc="Using mode AUTO means OFF for BSplineDeformableTransforms and ON for the linear transforms. The ON alternative uses more memory to sometimes do a better job.", + argstr="--useExplicitPDFDerivativesMode %s", + ) ROIAutoDilateSize = traits.Float( - desc= - "This flag is only relavent when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. At setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.", - argstr="--ROIAutoDilateSize %f") + desc="This flag is only relavent when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. At setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.", + argstr="--ROIAutoDilateSize %f", + ) ROIAutoClosingSize = traits.Float( - desc= - "This flag is only relavent when using ROIAUTO mode for initializing masks. It defines the hole closing size in mm. It is rounded up to the nearest whole pixel size in each direction. The default is to use a closing size of 9mm. For mouse data this value may need to be reset to 0.9 or smaller.", - argstr="--ROIAutoClosingSize %f") + desc="This flag is only relavent when using ROIAUTO mode for initializing masks. It defines the hole closing size in mm. It is rounded up to the nearest whole pixel size in each direction. The default is to use a closing size of 9mm. For mouse data this value may need to be reset to 0.9 or smaller.", + argstr="--ROIAutoClosingSize %f", + ) relaxationFactor = traits.Float( - desc= - "Internal debugging parameter, and should probably never be used from the command line. This will be removed in the future.", - argstr="--relaxationFactor %f") + desc="Internal debugging parameter, and should probably never be used from the command line. This will be removed in the future.", + argstr="--relaxationFactor %f", + ) maximumStepLength = traits.Float( - desc= - "Internal debugging parameter, and should probably never be used from the command line. This will be removed in the future.", - argstr="--maximumStepLength %f") + desc="Internal debugging parameter, and should probably never be used from the command line. This will be removed in the future.", + argstr="--maximumStepLength %f", + ) failureExitCode = traits.Int( - desc= - "If the fit fails, exit with this status code. (It can be used to force a successfult exit status of (0) if the registration fails due to reaching the maximum number of iterations.", - argstr="--failureExitCode %d") + desc="If the fit fails, exit with this status code. (It can be used to force a successfult exit status of (0) if the registration fails due to reaching the maximum number of iterations.", + argstr="--failureExitCode %d", + ) writeTransformOnFailure = traits.Bool( - desc= - "Flag to save the final transform even if the numberOfIterations are reached without convergence. (Intended for use when --failureExitCode 0 )", - argstr="--writeTransformOnFailure ") + desc="Flag to save the final transform even if the numberOfIterations are reached without convergence. (Intended for use when --failureExitCode 0 )", + argstr="--writeTransformOnFailure ", + ) numberOfThreads = traits.Int( - desc= - "Explicitly specify the maximum number of threads to use. (default is auto-detected)", - argstr="--numberOfThreads %d") + desc="Explicitly specify the maximum number of threads to use. (default is auto-detected)", + argstr="--numberOfThreads %d", + ) forceMINumberOfThreads = traits.Int( - desc= - "Force the the maximum number of threads to use for non thread safe MI metric. CAUTION: Inconsistent results my arise!", - argstr="--forceMINumberOfThreads %d") + desc="Force the the maximum number of threads to use for non thread safe MI metric. CAUTION: Inconsistent results my arise!", + argstr="--forceMINumberOfThreads %d", + ) debugLevel = traits.Int( - desc= - "Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", - argstr="--debugLevel %d") + desc="Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", + argstr="--debugLevel %d", + ) costFunctionConvergenceFactor = traits.Float( - desc= - " From itkLBFGSBOptimizer.h: Set/Get the CostFunctionConvergenceFactor. Algorithm terminates when the reduction in cost function is less than (factor * epsmcj) where epsmch is the machine precision. Typical values for factor: 1e+12 for low accuracy; 1e+7 for moderate accuracy and 1e+1 for extremely high accuracy. 1e+9 seems to work well., ", - argstr="--costFunctionConvergenceFactor %f") + desc=" From itkLBFGSBOptimizer.h: Set/Get the CostFunctionConvergenceFactor. Algorithm terminates when the reduction in cost function is less than (factor * epsmcj) where epsmch is the machine precision. Typical values for factor: 1e+12 for low accuracy; 1e+7 for moderate accuracy and 1e+1 for extremely high accuracy. 1e+9 seems to work well., ", + argstr="--costFunctionConvergenceFactor %f", + ) projectedGradientTolerance = traits.Float( - desc= - " From itkLBFGSBOptimizer.h: Set/Get the ProjectedGradientTolerance. Algorithm terminates when the project gradient is below the tolerance. Default lbfgsb value is 1e-5, but 1e-4 seems to work well., ", - argstr="--projectedGradientTolerance %f") + desc=" From itkLBFGSBOptimizer.h: Set/Get the ProjectedGradientTolerance. Algorithm terminates when the project gradient is below the tolerance. Default lbfgsb value is 1e-5, but 1e-4 seems to work well., ", + argstr="--projectedGradientTolerance %f", + ) gui = traits.Bool( - desc= - "Display intermediate image volumes for debugging. NOTE: This is not part of the standard build sytem, and probably does nothing on your installation.", - argstr="--gui ") + desc="Display intermediate image volumes for debugging. NOTE: This is not part of the standard build sytem, and probably does nothing on your installation.", + argstr="--gui ", + ) promptUser = traits.Bool( - desc= - "Prompt the user to hit enter each time an image is sent to the DebugImageViewer", - argstr="--promptUser ") + desc="Prompt the user to hit enter each time an image is sent to the DebugImageViewer", + argstr="--promptUser ", + ) NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_00 = traits.Bool( - desc="DO NOT USE THIS FLAG", - argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_00 ") + desc="DO NOT USE THIS FLAG", argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_00 " + ) NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_01 = traits.Bool( - desc="DO NOT USE THIS FLAG", - argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_01 ") + desc="DO NOT USE THIS FLAG", argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_01 " + ) NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_02 = traits.Bool( - desc="DO NOT USE THIS FLAG", - argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_02 ") + desc="DO NOT USE THIS FLAG", argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_02 " + ) permitParameterVariation = InputMultiPath( traits.Int, - desc= - "A bit vector to permit linear transform parameters to vary under optimization. The vector order corresponds with transform parameters, and beyond the end ones fill in as a default. For instance, you can choose to rotate only in x (pitch) with 1,0,0; this is mostly for expert use in turning on and off individual degrees of freedom in rotation, translation or scaling without multiplying the number of transform representations; this trick is probably meaningless when tried with the general affine transform.", + desc="A bit vector to permit linear transform parameters to vary under optimization. The vector order corresponds with transform parameters, and beyond the end ones fill in as a default. For instance, you can choose to rotate only in x (pitch) with 1,0,0; this is mostly for expert use in turning on and off individual degrees of freedom in rotation, translation or scaling without multiplying the number of transform representations; this trick is probably meaningless when tried with the general affine transform.", sep=",", - argstr="--permitParameterVariation %s") + argstr="--permitParameterVariation %s", + ) costMetric = traits.Enum( "MMI", "MSE", "NC", "MC", - desc= - "The cost metric to be used during fitting. Defaults to MMI. Options are MMI (Mattes Mutual Information), MSE (Mean Square Error), NC (Normalized Correlation), MC (Match Cardinality for binary images)", - argstr="--costMetric %s") + desc="The cost metric to be used during fitting. Defaults to MMI. Options are MMI (Mattes Mutual Information), MSE (Mean Square Error), NC (Normalized Correlation), MC (Match Cardinality for binary images)", + argstr="--costMetric %s", + ) writeOutputTransformInFloat = traits.Bool( - desc= - "By default, the output registration transforms (either the output composite transform or each transform component) are written to the disk in double precision. If this flag is ON, the output transforms will be written in single (float) precision. It is especially important if the output transform is a displacement field transform, or it is a composite transform that includes several displacement fields.", - argstr="--writeOutputTransformInFloat ") + desc="By default, the output registration transforms (either the output composite transform or each transform component) are written to the disk in double precision. If this flag is ON, the output transforms will be written in single (float) precision. It is especially important if the output transform is a displacement field transform, or it is a composite transform that includes several displacement fields.", + argstr="--writeOutputTransformInFloat ", + ) class BRAINSFitOutputSpec(TraitedSpec): bsplineTransform = File( - desc= - "(optional) Filename to which save the estimated transform. NOTE: You must set at least one output object (either a deformed image or a transform. NOTE: USE THIS ONLY IF THE FINAL TRANSFORM IS BSpline", - exists=True) + desc="(optional) Filename to which save the estimated transform. NOTE: You must set at least one output object (either a deformed image or a transform. NOTE: USE THIS ONLY IF THE FINAL TRANSFORM IS BSpline", + exists=True, + ) linearTransform = File( - desc= - "(optional) Filename to which save the estimated transform. NOTE: You must set at least one output object (either a deformed image or a transform. NOTE: USE THIS ONLY IF THE FINAL TRANSFORM IS ---NOT--- BSpline", - exists=True) + desc="(optional) Filename to which save the estimated transform. NOTE: You must set at least one output object (either a deformed image or a transform. NOTE: USE THIS ONLY IF THE FINAL TRANSFORM IS ---NOT--- BSpline", + exists=True, + ) outputVolume = File( - desc= - "(optional) Output image for registration. NOTE: You must select either the outputTransform or the outputVolume option.", - exists=True) + desc="(optional) Output image for registration. NOTE: You must select either the outputTransform or the outputVolume option.", + exists=True, + ) outputFixedVolumeROI = File( - desc= - "The ROI automatically found in fixed image, ONLY FOR ROIAUTO mode.", - exists=True) + desc="The ROI automatically found in fixed image, ONLY FOR ROIAUTO mode.", + exists=True, + ) outputMovingVolumeROI = File( - desc= - "The ROI automatically found in moving image, ONLY FOR ROIAUTO mode.", - exists=True) + desc="The ROI automatically found in moving image, ONLY FOR ROIAUTO mode.", + exists=True, + ) strippedOutputTransform = File( - desc= - "File name for the rigid component of the estimated affine transform. Can be used to rigidly register the moving image to the fixed image. NOTE: This value is overwritten if either bsplineTransform or linearTransform is set.", - exists=True) + desc="File name for the rigid component of the estimated affine transform. Can be used to rigidly register the moving image to the fixed image. NOTE: This value is overwritten if either bsplineTransform or linearTransform is set.", + exists=True, + ) outputTransform = File( - desc= - "(optional) Filename to which save the (optional) estimated transform. NOTE: You must select either the outputTransform or the outputVolume option.", - exists=True) + desc="(optional) Filename to which save the (optional) estimated transform. NOTE: You must select either the outputTransform or the outputVolume option.", + exists=True, + ) class BRAINSFit(SEMLikeCommandLine): @@ -370,11 +385,11 @@ class BRAINSFit(SEMLikeCommandLine): output_spec = BRAINSFitOutputSpec _cmd = "BRAINSFit " _outputs_filenames = { - 'outputVolume': 'outputVolume.nii', - 'bsplineTransform': 'bsplineTransform.mat', - 'outputTransform': 'outputTransform.mat', - 'outputFixedVolumeROI': 'outputFixedVolumeROI.nii', - 'strippedOutputTransform': 'strippedOutputTransform.mat', - 'outputMovingVolumeROI': 'outputMovingVolumeROI.nii', - 'linearTransform': 'linearTransform.mat' + "outputVolume": "outputVolume.nii", + "bsplineTransform": "bsplineTransform.mat", + "outputTransform": "outputTransform.mat", + "outputFixedVolumeROI": "outputFixedVolumeROI.nii", + "strippedOutputTransform": "strippedOutputTransform.mat", + "outputMovingVolumeROI": "outputMovingVolumeROI.nii", + "linearTransform": "linearTransform.mat", } diff --git a/nipype/interfaces/slicer/registration/brainsresample.py b/nipype/interfaces/slicer/registration/brainsresample.py index a3b79681fd..9031b86d8c 100644 --- a/nipype/interfaces/slicer/registration/brainsresample.py +++ b/nipype/interfaces/slicer/registration/brainsresample.py @@ -3,24 +3,35 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class BRAINSResampleInputSpec(CommandLineInputSpec): - inputVolume = File( - desc="Image To Warp", exists=True, argstr="--inputVolume %s") + inputVolume = File(desc="Image To Warp", exists=True, argstr="--inputVolume %s") referenceVolume = File( - desc= - "Reference image used only to define the output space. If not specified, the warping is done in the same space as the image to warp.", + desc="Reference image used only to define the output space. If not specified, the warping is done in the same space as the image to warp.", exists=True, - argstr="--referenceVolume %s") + argstr="--referenceVolume %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Resulting deformed image", - argstr="--outputVolume %s") + argstr="--outputVolume %s", + ) pixelType = traits.Enum( "float", "short", @@ -29,18 +40,19 @@ class BRAINSResampleInputSpec(CommandLineInputSpec): "uint", "uchar", "binary", - desc= - "Specifies the pixel type for the input/output images. The \'binary\' pixel type uses a modified algorithm whereby the image is read in as unsigned char, a signed distance map is created, signed distance map is resampled, and then a thresholded image of type unsigned char is written to disk.", - argstr="--pixelType %s") + desc="Specifies the pixel type for the input/output images. The 'binary' pixel type uses a modified algorithm whereby the image is read in as unsigned char, a signed distance map is created, signed distance map is resampled, and then a thresholded image of type unsigned char is written to disk.", + argstr="--pixelType %s", + ) deformationVolume = File( desc="Displacement Field to be used to warp the image", exists=True, - argstr="--deformationVolume %s") + argstr="--deformationVolume %s", + ) warpTransform = File( - desc= - "Filename for the BRAINSFit transform used in place of the deformation field", + desc="Filename for the BRAINSFit transform used in place of the deformation field", exists=True, - argstr="--warpTransform %s") + argstr="--warpTransform %s", + ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", @@ -52,24 +64,24 @@ class BRAINSResampleInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", - argstr="--interpolationMode %s") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", + argstr="--interpolationMode %s", + ) inverseTransform = traits.Bool( - desc= - "True/False is to compute inverse of given transformation. Default is false", - argstr="--inverseTransform ") - defaultValue = traits.Float( - desc="Default voxel value", argstr="--defaultValue %f") + desc="True/False is to compute inverse of given transformation. Default is false", + argstr="--inverseTransform ", + ) + defaultValue = traits.Float(desc="Default voxel value", argstr="--defaultValue %f") gridSpacing = InputMultiPath( traits.Int, - desc= - "Add warped grid to output image to help show the deformation that occured with specified spacing. A spacing of 0 in a dimension indicates that grid lines should be rendered to fall exactly (i.e. do not allow displacements off that plane). This is useful for makeing a 2D image of grid lines from the 3D space ", + desc="Add warped grid to output image to help show the deformation that occured with specified spacing. A spacing of 0 in a dimension indicates that grid lines should be rendered to fall exactly (i.e. do not allow displacements off that plane). This is useful for makeing a 2D image of grid lines from the 3D space ", sep=",", - argstr="--gridSpacing %s") + argstr="--gridSpacing %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSResampleOutputSpec(TraitedSpec): @@ -100,4 +112,4 @@ class BRAINSResample(SEMLikeCommandLine): input_spec = BRAINSResampleInputSpec output_spec = BRAINSResampleOutputSpec _cmd = "BRAINSResample " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/registration/specialized.py b/nipype/interfaces/slicer/registration/specialized.py index 9c6c3f5f20..12835b0cef 100644 --- a/nipype/interfaces/slicer/registration/specialized.py +++ b/nipype/interfaces/slicer/registration/specialized.py @@ -3,37 +3,49 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class ACPCTransformInputSpec(CommandLineInputSpec): acpc = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), - desc= - "ACPC line, two fiducial points, one at the anterior commissure and one at the posterior commissure.", - argstr="--acpc %s...") + desc="ACPC line, two fiducial points, one at the anterior commissure and one at the posterior commissure.", + argstr="--acpc %s...", + ) midline = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), - desc= - "The midline is a series of points defining the division between the hemispheres of the brain (the mid sagittal plane).", - argstr="--midline %s...") + desc="The midline is a series of points defining the division between the hemispheres of the brain (the mid sagittal plane).", + argstr="--midline %s...", + ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "A transform filled in from the ACPC and Midline registration calculation", - argstr="--outputTransform %s") + desc="A transform filled in from the ACPC and Midline registration calculation", + argstr="--outputTransform %s", + ) debugSwitch = traits.Bool( - desc="Click if wish to see debugging output", argstr="--debugSwitch ") + desc="Click if wish to see debugging output", argstr="--debugSwitch " + ) class ACPCTransformOutputSpec(TraitedSpec): outputTransform = File( - desc= - "A transform filled in from the ACPC and Midline registration calculation", - exists=True) + desc="A transform filled in from the ACPC and Midline registration calculation", + exists=True, + ) class ACPCTransform(SEMLikeCommandLine): @@ -58,39 +70,44 @@ class ACPCTransform(SEMLikeCommandLine): input_spec = ACPCTransformInputSpec output_spec = ACPCTransformOutputSpec _cmd = "ACPCTransform " - _outputs_filenames = {'outputTransform': 'outputTransform.mat'} + _outputs_filenames = {"outputTransform": "outputTransform.mat"} class FiducialRegistrationInputSpec(CommandLineInputSpec): fixedLandmarks = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="Ordered list of landmarks in the fixed image", - argstr="--fixedLandmarks %s...") + argstr="--fixedLandmarks %s...", + ) movingLandmarks = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="Ordered list of landmarks in the moving image", - argstr="--movingLandmarks %s...") + argstr="--movingLandmarks %s...", + ) saveTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Save the transform that results from registration", - argstr="--saveTransform %s") + argstr="--saveTransform %s", + ) transformType = traits.Enum( "Translation", "Rigid", "Similarity", desc="Type of transform to produce", - argstr="--transformType %s") + argstr="--transformType %s", + ) rms = traits.Float(desc="Display RMS Error.", argstr="--rms %f") outputMessage = traits.Str( - desc="Provides more information on the output", - argstr="--outputMessage %s") + desc="Provides more information on the output", argstr="--outputMessage %s" + ) class FiducialRegistrationOutputSpec(TraitedSpec): saveTransform = File( - desc="Save the transform that results from registration", exists=True) + desc="Save the transform that results from registration", exists=True + ) class FiducialRegistration(SEMLikeCommandLine): @@ -113,50 +130,52 @@ class FiducialRegistration(SEMLikeCommandLine): input_spec = FiducialRegistrationInputSpec output_spec = FiducialRegistrationOutputSpec _cmd = "FiducialRegistration " - _outputs_filenames = {'saveTransform': 'saveTransform.txt'} + _outputs_filenames = {"saveTransform": "saveTransform.txt"} class VBRAINSDemonWarpInputSpec(CommandLineInputSpec): movingVolume = InputMultiPath( File(exists=True), desc="Required: input moving image", - argstr="--movingVolume %s...") + argstr="--movingVolume %s...", + ) fixedVolume = InputMultiPath( File(exists=True), desc="Required: input fixed (target) image", - argstr="--fixedVolume %s...") + argstr="--fixedVolume %s...", + ) inputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", - desc= - "Input volumes will be typecast to this format: float|short|ushort|int|uchar", - argstr="--inputPixelType %s") + desc="Input volumes will be typecast to this format: float|short|ushort|int|uchar", + argstr="--inputPixelType %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: output resampled moving image (will have the same physical space as the fixedVolume).", - argstr="--outputVolume %s") + desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", + argstr="--outputVolume %s", + ) outputDisplacementFieldVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Output deformation field vector image (will have the same physical space as the fixedVolume).", - argstr="--outputDisplacementFieldVolume %s") + desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", + argstr="--outputDisplacementFieldVolume %s", + ) outputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", - desc= - "outputVolume will be typecast to this format: float|short|ushort|int|uchar", - argstr="--outputPixelType %s") + desc="outputVolume will be typecast to this format: float|short|ushort|int|uchar", + argstr="--outputPixelType %s", + ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", @@ -168,171 +187,185 @@ class VBRAINSDemonWarpInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", - argstr="--interpolationMode %s") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", + argstr="--interpolationMode %s", + ) registrationFilterType = traits.Enum( "Demons", "FastSymmetricForces", "Diffeomorphic", "LogDemons", "SymmetricLogDemons", - desc= - "Registration Filter Type: Demons|FastSymmetricForces|Diffeomorphic|LogDemons|SymmetricLogDemons", - argstr="--registrationFilterType %s") + desc="Registration Filter Type: Demons|FastSymmetricForces|Diffeomorphic|LogDemons|SymmetricLogDemons", + argstr="--registrationFilterType %s", + ) smoothDisplacementFieldSigma = traits.Float( - desc= - "A gaussian smoothing value to be applied to the deformation feild at each iteration.", - argstr="--smoothDisplacementFieldSigma %f") + desc="A gaussian smoothing value to be applied to the deformation feild at each iteration.", + argstr="--smoothDisplacementFieldSigma %f", + ) numberOfPyramidLevels = traits.Int( - desc= - "Number of image pyramid levels to use in the multi-resolution registration.", - argstr="--numberOfPyramidLevels %d") + desc="Number of image pyramid levels to use in the multi-resolution registration.", + argstr="--numberOfPyramidLevels %d", + ) minimumFixedPyramid = InputMultiPath( traits.Int, - desc= - "The shrink factor for the first level of the fixed image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", + desc="The shrink factor for the first level of the fixed image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", - argstr="--minimumFixedPyramid %s") + argstr="--minimumFixedPyramid %s", + ) minimumMovingPyramid = InputMultiPath( traits.Int, - desc= - "The shrink factor for the first level of the moving image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", + desc="The shrink factor for the first level of the moving image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", - argstr="--minimumMovingPyramid %s") + argstr="--minimumMovingPyramid %s", + ) arrayOfPyramidLevelIterations = InputMultiPath( traits.Int, desc="The number of iterations for each pyramid level", sep=",", - argstr="--arrayOfPyramidLevelIterations %s") + argstr="--arrayOfPyramidLevelIterations %s", + ) histogramMatch = traits.Bool( - desc= - "Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile.", - argstr="--histogramMatch ") + desc="Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile.", + argstr="--histogramMatch ", + ) numberOfHistogramBins = traits.Int( - desc="The number of histogram levels", - argstr="--numberOfHistogramBins %d") + desc="The number of histogram levels", argstr="--numberOfHistogramBins %d" + ) numberOfMatchPoints = traits.Int( desc="The number of match points for histrogramMatch", - argstr="--numberOfMatchPoints %d") + argstr="--numberOfMatchPoints %d", + ) medianFilterSize = InputMultiPath( traits.Int, - desc= - "Median filter radius in all 3 directions. When images have a lot of salt and pepper noise, this step can improve the registration.", + desc="Median filter radius in all 3 directions. When images have a lot of salt and pepper noise, this step can improve the registration.", sep=",", - argstr="--medianFilterSize %s") + argstr="--medianFilterSize %s", + ) initializeWithDisplacementField = File( desc="Initial deformation field vector image file name", exists=True, - argstr="--initializeWithDisplacementField %s") + argstr="--initializeWithDisplacementField %s", + ) initializeWithTransform = File( desc="Initial Transform filename", exists=True, - argstr="--initializeWithTransform %s") + argstr="--initializeWithTransform %s", + ) makeBOBF = traits.Bool( - desc= - "Flag to make Brain-Only Background-Filled versions of the input and target volumes.", - argstr="--makeBOBF ") + desc="Flag to make Brain-Only Background-Filled versions of the input and target volumes.", + argstr="--makeBOBF ", + ) fixedBinaryVolume = File( desc="Mask filename for desired region of interest in the Fixed image.", exists=True, - argstr="--fixedBinaryVolume %s") + argstr="--fixedBinaryVolume %s", + ) movingBinaryVolume = File( - desc= - "Mask filename for desired region of interest in the Moving image.", + desc="Mask filename for desired region of interest in the Moving image.", exists=True, - argstr="--movingBinaryVolume %s") + argstr="--movingBinaryVolume %s", + ) lowerThresholdForBOBF = traits.Int( - desc="Lower threshold for performing BOBF", - argstr="--lowerThresholdForBOBF %d") + desc="Lower threshold for performing BOBF", argstr="--lowerThresholdForBOBF %d" + ) upperThresholdForBOBF = traits.Int( - desc="Upper threshold for performing BOBF", - argstr="--upperThresholdForBOBF %d") + desc="Upper threshold for performing BOBF", argstr="--upperThresholdForBOBF %d" + ) backgroundFillValue = traits.Int( desc="Replacement value to overwrite background when performing BOBF", - argstr="--backgroundFillValue %d") + argstr="--backgroundFillValue %d", + ) seedForBOBF = InputMultiPath( traits.Int, desc="coordinates in all 3 directions for Seed when performing BOBF", sep=",", - argstr="--seedForBOBF %s") + argstr="--seedForBOBF %s", + ) neighborhoodForBOBF = InputMultiPath( traits.Int, - desc= - "neighborhood in all 3 directions to be included when performing BOBF", + desc="neighborhood in all 3 directions to be included when performing BOBF", sep=",", - argstr="--neighborhoodForBOBF %s") + argstr="--neighborhoodForBOBF %s", + ) outputDisplacementFieldPrefix = traits.Str( - desc= - "Displacement field filename prefix for writing separate x, y, and z component images", - argstr="--outputDisplacementFieldPrefix %s") + desc="Displacement field filename prefix for writing separate x, y, and z component images", + argstr="--outputDisplacementFieldPrefix %s", + ) outputCheckerboardVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", - argstr="--outputCheckerboardVolume %s") + desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", + argstr="--outputCheckerboardVolume %s", + ) checkerboardPatternSubdivisions = InputMultiPath( traits.Int, desc="Number of Checkerboard subdivisions in all 3 directions", sep=",", - argstr="--checkerboardPatternSubdivisions %s") + argstr="--checkerboardPatternSubdivisions %s", + ) outputNormalized = traits.Bool( - desc= - "Flag to warp and write the normalized images to output. In normalized images the image values are fit-scaled to be between 0 and the maximum storage type value.", - argstr="--outputNormalized ") + desc="Flag to warp and write the normalized images to output. In normalized images the image values are fit-scaled to be between 0 and the maximum storage type value.", + argstr="--outputNormalized ", + ) outputDebug = traits.Bool( - desc="Flag to write debugging images after each step.", - argstr="--outputDebug ") + desc="Flag to write debugging images after each step.", argstr="--outputDebug " + ) weightFactors = InputMultiPath( traits.Float, desc="Weight fatctors for each input images", sep=",", - argstr="--weightFactors %s") + argstr="--weightFactors %s", + ) gradient_type = traits.Enum( "0", "1", "2", - desc= - "Type of gradient used for computing the demons force (0 is symmetrized, 1 is fixed image, 2 is moving image)", - argstr="--gradient_type %s") + desc="Type of gradient used for computing the demons force (0 is symmetrized, 1 is fixed image, 2 is moving image)", + argstr="--gradient_type %s", + ) upFieldSmoothing = traits.Float( desc="Smoothing sigma for the update field at each iteration", - argstr="--upFieldSmoothing %f") + argstr="--upFieldSmoothing %f", + ) max_step_length = traits.Float( desc="Maximum length of an update vector (0: no restriction)", - argstr="--max_step_length %f") + argstr="--max_step_length %f", + ) use_vanilla_dem = traits.Bool( - desc="Run vanilla demons algorithm", argstr="--use_vanilla_dem ") + desc="Run vanilla demons algorithm", argstr="--use_vanilla_dem " + ) gui = traits.Bool( - desc="Display intermediate image volumes for debugging", - argstr="--gui ") + desc="Display intermediate image volumes for debugging", argstr="--gui " + ) promptUser = traits.Bool( - desc= - "Prompt the user to hit enter each time an image is sent to the DebugImageViewer", - argstr="--promptUser ") + desc="Prompt the user to hit enter each time an image is sent to the DebugImageViewer", + argstr="--promptUser ", + ) numberOfBCHApproximationTerms = traits.Int( desc="Number of terms in the BCH expansion", - argstr="--numberOfBCHApproximationTerms %d") + argstr="--numberOfBCHApproximationTerms %d", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class VBRAINSDemonWarpOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: output resampled moving image (will have the same physical space as the fixedVolume).", - exists=True) + desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", + exists=True, + ) outputDisplacementFieldVolume = File( - desc= - "Output deformation field vector image (will have the same physical space as the fixedVolume).", - exists=True) + desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", + exists=True, + ) outputCheckerboardVolume = File( - desc= - "Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", - exists=True) + desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", + exists=True, + ) class VBRAINSDemonWarp(SEMLikeCommandLine): @@ -361,53 +394,53 @@ class VBRAINSDemonWarp(SEMLikeCommandLine): output_spec = VBRAINSDemonWarpOutputSpec _cmd = "VBRAINSDemonWarp " _outputs_filenames = { - 'outputVolume': 'outputVolume.nii', - 'outputCheckerboardVolume': 'outputCheckerboardVolume.nii', - 'outputDisplacementFieldVolume': 'outputDisplacementFieldVolume.nrrd' + "outputVolume": "outputVolume.nii", + "outputCheckerboardVolume": "outputCheckerboardVolume.nii", + "outputDisplacementFieldVolume": "outputDisplacementFieldVolume.nrrd", } class BRAINSDemonWarpInputSpec(CommandLineInputSpec): movingVolume = File( - desc="Required: input moving image", - exists=True, - argstr="--movingVolume %s") + desc="Required: input moving image", exists=True, argstr="--movingVolume %s" + ) fixedVolume = File( desc="Required: input fixed (target) image", exists=True, - argstr="--fixedVolume %s") + argstr="--fixedVolume %s", + ) inputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", - desc= - "Input volumes will be typecast to this format: float|short|ushort|int|uchar", - argstr="--inputPixelType %s") + desc="Input volumes will be typecast to this format: float|short|ushort|int|uchar", + argstr="--inputPixelType %s", + ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Required: output resampled moving image (will have the same physical space as the fixedVolume).", - argstr="--outputVolume %s") + desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", + argstr="--outputVolume %s", + ) outputDisplacementFieldVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Output deformation field vector image (will have the same physical space as the fixedVolume).", - argstr="--outputDisplacementFieldVolume %s") + desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", + argstr="--outputDisplacementFieldVolume %s", + ) outputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", - desc= - "outputVolume will be typecast to this format: float|short|ushort|int|uchar", - argstr="--outputPixelType %s") + desc="outputVolume will be typecast to this format: float|short|ushort|int|uchar", + argstr="--outputPixelType %s", + ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", @@ -419,168 +452,181 @@ class BRAINSDemonWarpInputSpec(CommandLineInputSpec): "Welch", "Lanczos", "Blackman", - desc= - "Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", - argstr="--interpolationMode %s") + desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", + argstr="--interpolationMode %s", + ) registrationFilterType = traits.Enum( "Demons", "FastSymmetricForces", "Diffeomorphic", - desc= - "Registration Filter Type: Demons|FastSymmetricForces|Diffeomorphic", - argstr="--registrationFilterType %s") + desc="Registration Filter Type: Demons|FastSymmetricForces|Diffeomorphic", + argstr="--registrationFilterType %s", + ) smoothDisplacementFieldSigma = traits.Float( - desc= - "A gaussian smoothing value to be applied to the deformation feild at each iteration.", - argstr="--smoothDisplacementFieldSigma %f") + desc="A gaussian smoothing value to be applied to the deformation feild at each iteration.", + argstr="--smoothDisplacementFieldSigma %f", + ) numberOfPyramidLevels = traits.Int( - desc= - "Number of image pyramid levels to use in the multi-resolution registration.", - argstr="--numberOfPyramidLevels %d") + desc="Number of image pyramid levels to use in the multi-resolution registration.", + argstr="--numberOfPyramidLevels %d", + ) minimumFixedPyramid = InputMultiPath( traits.Int, - desc= - "The shrink factor for the first level of the fixed image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", + desc="The shrink factor for the first level of the fixed image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", - argstr="--minimumFixedPyramid %s") + argstr="--minimumFixedPyramid %s", + ) minimumMovingPyramid = InputMultiPath( traits.Int, - desc= - "The shrink factor for the first level of the moving image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", + desc="The shrink factor for the first level of the moving image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", - argstr="--minimumMovingPyramid %s") + argstr="--minimumMovingPyramid %s", + ) arrayOfPyramidLevelIterations = InputMultiPath( traits.Int, desc="The number of iterations for each pyramid level", sep=",", - argstr="--arrayOfPyramidLevelIterations %s") + argstr="--arrayOfPyramidLevelIterations %s", + ) histogramMatch = traits.Bool( - desc= - "Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile.", - argstr="--histogramMatch ") + desc="Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile.", + argstr="--histogramMatch ", + ) numberOfHistogramBins = traits.Int( - desc="The number of histogram levels", - argstr="--numberOfHistogramBins %d") + desc="The number of histogram levels", argstr="--numberOfHistogramBins %d" + ) numberOfMatchPoints = traits.Int( desc="The number of match points for histrogramMatch", - argstr="--numberOfMatchPoints %d") + argstr="--numberOfMatchPoints %d", + ) medianFilterSize = InputMultiPath( traits.Int, - desc= - "Median filter radius in all 3 directions. When images have a lot of salt and pepper noise, this step can improve the registration.", + desc="Median filter radius in all 3 directions. When images have a lot of salt and pepper noise, this step can improve the registration.", sep=",", - argstr="--medianFilterSize %s") + argstr="--medianFilterSize %s", + ) initializeWithDisplacementField = File( desc="Initial deformation field vector image file name", exists=True, - argstr="--initializeWithDisplacementField %s") + argstr="--initializeWithDisplacementField %s", + ) initializeWithTransform = File( desc="Initial Transform filename", exists=True, - argstr="--initializeWithTransform %s") + argstr="--initializeWithTransform %s", + ) maskProcessingMode = traits.Enum( "NOMASK", "ROIAUTO", "ROI", "BOBF", - desc= - "What mode to use for using the masks: NOMASK|ROIAUTO|ROI|BOBF. If ROIAUTO is choosen, then the mask is implicitly defined using a otsu forground and hole filling algorithm. Where the Region Of Interest mode uses the masks to define what parts of the image should be used for computing the deformation field. Brain Only Background Fill uses the masks to pre-process the input images by clipping and filling in the background with a predefined value.", - argstr="--maskProcessingMode %s") + desc="What mode to use for using the masks: NOMASK|ROIAUTO|ROI|BOBF. If ROIAUTO is choosen, then the mask is implicitly defined using a otsu forground and hole filling algorithm. Where the Region Of Interest mode uses the masks to define what parts of the image should be used for computing the deformation field. Brain Only Background Fill uses the masks to pre-process the input images by clipping and filling in the background with a predefined value.", + argstr="--maskProcessingMode %s", + ) fixedBinaryVolume = File( desc="Mask filename for desired region of interest in the Fixed image.", exists=True, - argstr="--fixedBinaryVolume %s") + argstr="--fixedBinaryVolume %s", + ) movingBinaryVolume = File( - desc= - "Mask filename for desired region of interest in the Moving image.", + desc="Mask filename for desired region of interest in the Moving image.", exists=True, - argstr="--movingBinaryVolume %s") + argstr="--movingBinaryVolume %s", + ) lowerThresholdForBOBF = traits.Int( - desc="Lower threshold for performing BOBF", - argstr="--lowerThresholdForBOBF %d") + desc="Lower threshold for performing BOBF", argstr="--lowerThresholdForBOBF %d" + ) upperThresholdForBOBF = traits.Int( - desc="Upper threshold for performing BOBF", - argstr="--upperThresholdForBOBF %d") + desc="Upper threshold for performing BOBF", argstr="--upperThresholdForBOBF %d" + ) backgroundFillValue = traits.Int( desc="Replacement value to overwrite background when performing BOBF", - argstr="--backgroundFillValue %d") + argstr="--backgroundFillValue %d", + ) seedForBOBF = InputMultiPath( traits.Int, desc="coordinates in all 3 directions for Seed when performing BOBF", sep=",", - argstr="--seedForBOBF %s") + argstr="--seedForBOBF %s", + ) neighborhoodForBOBF = InputMultiPath( traits.Int, - desc= - "neighborhood in all 3 directions to be included when performing BOBF", + desc="neighborhood in all 3 directions to be included when performing BOBF", sep=",", - argstr="--neighborhoodForBOBF %s") + argstr="--neighborhoodForBOBF %s", + ) outputDisplacementFieldPrefix = traits.Str( - desc= - "Displacement field filename prefix for writing separate x, y, and z component images", - argstr="--outputDisplacementFieldPrefix %s") + desc="Displacement field filename prefix for writing separate x, y, and z component images", + argstr="--outputDisplacementFieldPrefix %s", + ) outputCheckerboardVolume = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", - argstr="--outputCheckerboardVolume %s") + desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", + argstr="--outputCheckerboardVolume %s", + ) checkerboardPatternSubdivisions = InputMultiPath( traits.Int, desc="Number of Checkerboard subdivisions in all 3 directions", sep=",", - argstr="--checkerboardPatternSubdivisions %s") + argstr="--checkerboardPatternSubdivisions %s", + ) outputNormalized = traits.Bool( - desc= - "Flag to warp and write the normalized images to output. In normalized images the image values are fit-scaled to be between 0 and the maximum storage type value.", - argstr="--outputNormalized ") + desc="Flag to warp and write the normalized images to output. In normalized images the image values are fit-scaled to be between 0 and the maximum storage type value.", + argstr="--outputNormalized ", + ) outputDebug = traits.Bool( - desc="Flag to write debugging images after each step.", - argstr="--outputDebug ") + desc="Flag to write debugging images after each step.", argstr="--outputDebug " + ) gradient_type = traits.Enum( "0", "1", "2", - desc= - "Type of gradient used for computing the demons force (0 is symmetrized, 1 is fixed image, 2 is moving image)", - argstr="--gradient_type %s") + desc="Type of gradient used for computing the demons force (0 is symmetrized, 1 is fixed image, 2 is moving image)", + argstr="--gradient_type %s", + ) upFieldSmoothing = traits.Float( desc="Smoothing sigma for the update field at each iteration", - argstr="--upFieldSmoothing %f") + argstr="--upFieldSmoothing %f", + ) max_step_length = traits.Float( desc="Maximum length of an update vector (0: no restriction)", - argstr="--max_step_length %f") + argstr="--max_step_length %f", + ) use_vanilla_dem = traits.Bool( - desc="Run vanilla demons algorithm", argstr="--use_vanilla_dem ") + desc="Run vanilla demons algorithm", argstr="--use_vanilla_dem " + ) gui = traits.Bool( - desc="Display intermediate image volumes for debugging", - argstr="--gui ") + desc="Display intermediate image volumes for debugging", argstr="--gui " + ) promptUser = traits.Bool( - desc= - "Prompt the user to hit enter each time an image is sent to the DebugImageViewer", - argstr="--promptUser ") + desc="Prompt the user to hit enter each time an image is sent to the DebugImageViewer", + argstr="--promptUser ", + ) numberOfBCHApproximationTerms = traits.Int( desc="Number of terms in the BCH expansion", - argstr="--numberOfBCHApproximationTerms %d") + argstr="--numberOfBCHApproximationTerms %d", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSDemonWarpOutputSpec(TraitedSpec): outputVolume = File( - desc= - "Required: output resampled moving image (will have the same physical space as the fixedVolume).", - exists=True) + desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", + exists=True, + ) outputDisplacementFieldVolume = File( - desc= - "Output deformation field vector image (will have the same physical space as the fixedVolume).", - exists=True) + desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", + exists=True, + ) outputCheckerboardVolume = File( - desc= - "Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", - exists=True) + desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", + exists=True, + ) class BRAINSDemonWarp(SEMLikeCommandLine): @@ -609,7 +655,7 @@ class BRAINSDemonWarp(SEMLikeCommandLine): output_spec = BRAINSDemonWarpOutputSpec _cmd = "BRAINSDemonWarp " _outputs_filenames = { - 'outputVolume': 'outputVolume.nii', - 'outputCheckerboardVolume': 'outputCheckerboardVolume.nii', - 'outputDisplacementFieldVolume': 'outputDisplacementFieldVolume.nrrd' + "outputVolume": "outputVolume.nii", + "outputCheckerboardVolume": "outputCheckerboardVolume.nii", + "outputDisplacementFieldVolume": "outputDisplacementFieldVolume.nrrd", } diff --git a/nipype/interfaces/slicer/registration/tests/test_auto_ACPCTransform.py b/nipype/interfaces/slicer/registration/tests/test_auto_ACPCTransform.py index f60ab0866b..e36498110b 100644 --- a/nipype/interfaces/slicer/registration/tests/test_auto_ACPCTransform.py +++ b/nipype/interfaces/slicer/registration/tests/test_auto_ACPCTransform.py @@ -4,26 +4,22 @@ def test_ACPCTransform_inputs(): input_map = dict( - acpc=dict(argstr='--acpc %s...', ), - args=dict(argstr='%s', ), - debugSwitch=dict(argstr='--debugSwitch ', ), - environ=dict( - nohash=True, - usedefault=True, - ), - midline=dict(argstr='--midline %s...', ), - outputTransform=dict( - argstr='--outputTransform %s', - hash_files=False, - ), + acpc=dict(argstr="--acpc %s...",), + args=dict(argstr="%s",), + debugSwitch=dict(argstr="--debugSwitch ",), + environ=dict(nohash=True, usedefault=True,), + midline=dict(argstr="--midline %s...",), + outputTransform=dict(argstr="--outputTransform %s", hash_files=False,), ) inputs = ACPCTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ACPCTransform_outputs(): - output_map = dict(outputTransform=dict(extensions=None, ), ) + output_map = dict(outputTransform=dict(extensions=None,),) outputs = ACPCTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSDemonWarp.py b/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSDemonWarp.py index efe6d50fce..4136c8105d 100644 --- a/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSDemonWarp.py +++ b/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSDemonWarp.py @@ -4,113 +4,77 @@ def test_BRAINSDemonWarp_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), arrayOfPyramidLevelIterations=dict( - argstr='--arrayOfPyramidLevelIterations %s', - sep=',', + argstr="--arrayOfPyramidLevelIterations %s", sep=",", ), - backgroundFillValue=dict(argstr='--backgroundFillValue %d', ), + backgroundFillValue=dict(argstr="--backgroundFillValue %d",), checkerboardPatternSubdivisions=dict( - argstr='--checkerboardPatternSubdivisions %s', - sep=',', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixedBinaryVolume=dict( - argstr='--fixedBinaryVolume %s', - extensions=None, - ), - fixedVolume=dict( - argstr='--fixedVolume %s', - extensions=None, - ), - gradient_type=dict(argstr='--gradient_type %s', ), - gui=dict(argstr='--gui ', ), - histogramMatch=dict(argstr='--histogramMatch ', ), + argstr="--checkerboardPatternSubdivisions %s", sep=",", + ), + environ=dict(nohash=True, usedefault=True,), + fixedBinaryVolume=dict(argstr="--fixedBinaryVolume %s", extensions=None,), + fixedVolume=dict(argstr="--fixedVolume %s", extensions=None,), + gradient_type=dict(argstr="--gradient_type %s",), + gui=dict(argstr="--gui ",), + histogramMatch=dict(argstr="--histogramMatch ",), initializeWithDisplacementField=dict( - argstr='--initializeWithDisplacementField %s', - extensions=None, + argstr="--initializeWithDisplacementField %s", extensions=None, ), initializeWithTransform=dict( - argstr='--initializeWithTransform %s', - extensions=None, - ), - inputPixelType=dict(argstr='--inputPixelType %s', ), - interpolationMode=dict(argstr='--interpolationMode %s', ), - lowerThresholdForBOBF=dict(argstr='--lowerThresholdForBOBF %d', ), - maskProcessingMode=dict(argstr='--maskProcessingMode %s', ), - max_step_length=dict(argstr='--max_step_length %f', ), - medianFilterSize=dict( - argstr='--medianFilterSize %s', - sep=',', - ), - minimumFixedPyramid=dict( - argstr='--minimumFixedPyramid %s', - sep=',', - ), - minimumMovingPyramid=dict( - argstr='--minimumMovingPyramid %s', - sep=',', - ), - movingBinaryVolume=dict( - argstr='--movingBinaryVolume %s', - extensions=None, - ), - movingVolume=dict( - argstr='--movingVolume %s', - extensions=None, - ), - neighborhoodForBOBF=dict( - argstr='--neighborhoodForBOBF %s', - sep=',', - ), + argstr="--initializeWithTransform %s", extensions=None, + ), + inputPixelType=dict(argstr="--inputPixelType %s",), + interpolationMode=dict(argstr="--interpolationMode %s",), + lowerThresholdForBOBF=dict(argstr="--lowerThresholdForBOBF %d",), + maskProcessingMode=dict(argstr="--maskProcessingMode %s",), + max_step_length=dict(argstr="--max_step_length %f",), + medianFilterSize=dict(argstr="--medianFilterSize %s", sep=",",), + minimumFixedPyramid=dict(argstr="--minimumFixedPyramid %s", sep=",",), + minimumMovingPyramid=dict(argstr="--minimumMovingPyramid %s", sep=",",), + movingBinaryVolume=dict(argstr="--movingBinaryVolume %s", extensions=None,), + movingVolume=dict(argstr="--movingVolume %s", extensions=None,), + neighborhoodForBOBF=dict(argstr="--neighborhoodForBOBF %s", sep=",",), numberOfBCHApproximationTerms=dict( - argstr='--numberOfBCHApproximationTerms %d', ), - numberOfHistogramBins=dict(argstr='--numberOfHistogramBins %d', ), - numberOfMatchPoints=dict(argstr='--numberOfMatchPoints %d', ), - numberOfPyramidLevels=dict(argstr='--numberOfPyramidLevels %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + argstr="--numberOfBCHApproximationTerms %d", + ), + numberOfHistogramBins=dict(argstr="--numberOfHistogramBins %d",), + numberOfMatchPoints=dict(argstr="--numberOfMatchPoints %d",), + numberOfPyramidLevels=dict(argstr="--numberOfPyramidLevels %d",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), outputCheckerboardVolume=dict( - argstr='--outputCheckerboardVolume %s', - hash_files=False, + argstr="--outputCheckerboardVolume %s", hash_files=False, ), - outputDebug=dict(argstr='--outputDebug ', ), + outputDebug=dict(argstr="--outputDebug ",), outputDisplacementFieldPrefix=dict( - argstr='--outputDisplacementFieldPrefix %s', ), - outputDisplacementFieldVolume=dict( - argstr='--outputDisplacementFieldVolume %s', - hash_files=False, - ), - outputNormalized=dict(argstr='--outputNormalized ', ), - outputPixelType=dict(argstr='--outputPixelType %s', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, + argstr="--outputDisplacementFieldPrefix %s", ), - promptUser=dict(argstr='--promptUser ', ), - registrationFilterType=dict(argstr='--registrationFilterType %s', ), - seedForBOBF=dict( - argstr='--seedForBOBF %s', - sep=',', - ), - smoothDisplacementFieldSigma=dict( - argstr='--smoothDisplacementFieldSigma %f', ), - upFieldSmoothing=dict(argstr='--upFieldSmoothing %f', ), - upperThresholdForBOBF=dict(argstr='--upperThresholdForBOBF %d', ), - use_vanilla_dem=dict(argstr='--use_vanilla_dem ', ), + outputDisplacementFieldVolume=dict( + argstr="--outputDisplacementFieldVolume %s", hash_files=False, + ), + outputNormalized=dict(argstr="--outputNormalized ",), + outputPixelType=dict(argstr="--outputPixelType %s",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + promptUser=dict(argstr="--promptUser ",), + registrationFilterType=dict(argstr="--registrationFilterType %s",), + seedForBOBF=dict(argstr="--seedForBOBF %s", sep=",",), + smoothDisplacementFieldSigma=dict(argstr="--smoothDisplacementFieldSigma %f",), + upFieldSmoothing=dict(argstr="--upFieldSmoothing %f",), + upperThresholdForBOBF=dict(argstr="--upperThresholdForBOBF %d",), + use_vanilla_dem=dict(argstr="--use_vanilla_dem ",), ) inputs = BRAINSDemonWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSDemonWarp_outputs(): output_map = dict( - outputCheckerboardVolume=dict(extensions=None, ), - outputDisplacementFieldVolume=dict(extensions=None, ), - outputVolume=dict(extensions=None, ), + outputCheckerboardVolume=dict(extensions=None,), + outputDisplacementFieldVolume=dict(extensions=None,), + outputVolume=dict(extensions=None,), ) outputs = BRAINSDemonWarp.output_spec() diff --git a/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSFit.py b/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSFit.py index 58c903d144..9d558f1e68 100644 --- a/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSFit.py +++ b/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSFit.py @@ -5,149 +5,103 @@ def test_BRAINSFit_inputs(): input_map = dict( NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_00=dict( - argstr='--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_00 ', ), + argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_00 ", + ), NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_01=dict( - argstr='--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_01 ', ), + argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_01 ", + ), NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_02=dict( - argstr='--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_02 ', ), - ROIAutoClosingSize=dict(argstr='--ROIAutoClosingSize %f', ), - ROIAutoDilateSize=dict(argstr='--ROIAutoDilateSize %f', ), - args=dict(argstr='%s', ), - backgroundFillValue=dict(argstr='--backgroundFillValue %f', ), - bsplineTransform=dict( - argstr='--bsplineTransform %s', - hash_files=False, + argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_02 ", ), + ROIAutoClosingSize=dict(argstr="--ROIAutoClosingSize %f",), + ROIAutoDilateSize=dict(argstr="--ROIAutoDilateSize %f",), + args=dict(argstr="%s",), + backgroundFillValue=dict(argstr="--backgroundFillValue %f",), + bsplineTransform=dict(argstr="--bsplineTransform %s", hash_files=False,), costFunctionConvergenceFactor=dict( - argstr='--costFunctionConvergenceFactor %f', ), - costMetric=dict(argstr='--costMetric %s', ), - debugLevel=dict(argstr='--debugLevel %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - failureExitCode=dict(argstr='--failureExitCode %d', ), - fixedBinaryVolume=dict( - argstr='--fixedBinaryVolume %s', - extensions=None, - ), - fixedVolume=dict( - argstr='--fixedVolume %s', - extensions=None, - ), - fixedVolumeTimeIndex=dict(argstr='--fixedVolumeTimeIndex %d', ), - forceMINumberOfThreads=dict(argstr='--forceMINumberOfThreads %d', ), - gui=dict(argstr='--gui ', ), - histogramMatch=dict(argstr='--histogramMatch ', ), - initialTransform=dict( - argstr='--initialTransform %s', - extensions=None, - ), - initializeTransformMode=dict(argstr='--initializeTransformMode %s', ), - interpolationMode=dict(argstr='--interpolationMode %s', ), - linearTransform=dict( - argstr='--linearTransform %s', - hash_files=False, - ), - maskInferiorCutOffFromCenter=dict( - argstr='--maskInferiorCutOffFromCenter %f', ), - maskProcessingMode=dict(argstr='--maskProcessingMode %s', ), - maxBSplineDisplacement=dict(argstr='--maxBSplineDisplacement %f', ), - maximumStepLength=dict(argstr='--maximumStepLength %f', ), - medianFilterSize=dict( - argstr='--medianFilterSize %s', - sep=',', - ), - minimumStepLength=dict( - argstr='--minimumStepLength %s', - sep=',', - ), - movingBinaryVolume=dict( - argstr='--movingBinaryVolume %s', - extensions=None, - ), - movingVolume=dict( - argstr='--movingVolume %s', - extensions=None, - ), - movingVolumeTimeIndex=dict(argstr='--movingVolumeTimeIndex %d', ), - numberOfHistogramBins=dict(argstr='--numberOfHistogramBins %d', ), - numberOfIterations=dict( - argstr='--numberOfIterations %s', - sep=',', - ), - numberOfMatchPoints=dict(argstr='--numberOfMatchPoints %d', ), - numberOfSamples=dict(argstr='--numberOfSamples %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + argstr="--costFunctionConvergenceFactor %f", + ), + costMetric=dict(argstr="--costMetric %s",), + debugLevel=dict(argstr="--debugLevel %d",), + environ=dict(nohash=True, usedefault=True,), + failureExitCode=dict(argstr="--failureExitCode %d",), + fixedBinaryVolume=dict(argstr="--fixedBinaryVolume %s", extensions=None,), + fixedVolume=dict(argstr="--fixedVolume %s", extensions=None,), + fixedVolumeTimeIndex=dict(argstr="--fixedVolumeTimeIndex %d",), + forceMINumberOfThreads=dict(argstr="--forceMINumberOfThreads %d",), + gui=dict(argstr="--gui ",), + histogramMatch=dict(argstr="--histogramMatch ",), + initialTransform=dict(argstr="--initialTransform %s", extensions=None,), + initializeTransformMode=dict(argstr="--initializeTransformMode %s",), + interpolationMode=dict(argstr="--interpolationMode %s",), + linearTransform=dict(argstr="--linearTransform %s", hash_files=False,), + maskInferiorCutOffFromCenter=dict(argstr="--maskInferiorCutOffFromCenter %f",), + maskProcessingMode=dict(argstr="--maskProcessingMode %s",), + maxBSplineDisplacement=dict(argstr="--maxBSplineDisplacement %f",), + maximumStepLength=dict(argstr="--maximumStepLength %f",), + medianFilterSize=dict(argstr="--medianFilterSize %s", sep=",",), + minimumStepLength=dict(argstr="--minimumStepLength %s", sep=",",), + movingBinaryVolume=dict(argstr="--movingBinaryVolume %s", extensions=None,), + movingVolume=dict(argstr="--movingVolume %s", extensions=None,), + movingVolumeTimeIndex=dict(argstr="--movingVolumeTimeIndex %d",), + numberOfHistogramBins=dict(argstr="--numberOfHistogramBins %d",), + numberOfIterations=dict(argstr="--numberOfIterations %s", sep=",",), + numberOfMatchPoints=dict(argstr="--numberOfMatchPoints %d",), + numberOfSamples=dict(argstr="--numberOfSamples %d",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), outputFixedVolumeROI=dict( - argstr='--outputFixedVolumeROI %s', - hash_files=False, + argstr="--outputFixedVolumeROI %s", hash_files=False, ), outputMovingVolumeROI=dict( - argstr='--outputMovingVolumeROI %s', - hash_files=False, - ), - outputTransform=dict( - argstr='--outputTransform %s', - hash_files=False, - ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - outputVolumePixelType=dict(argstr='--outputVolumePixelType %s', ), - permitParameterVariation=dict( - argstr='--permitParameterVariation %s', - sep=',', - ), - projectedGradientTolerance=dict( - argstr='--projectedGradientTolerance %f', ), - promptUser=dict(argstr='--promptUser ', ), - relaxationFactor=dict(argstr='--relaxationFactor %f', ), - removeIntensityOutliers=dict(argstr='--removeIntensityOutliers %f', ), - reproportionScale=dict(argstr='--reproportionScale %f', ), - scaleOutputValues=dict(argstr='--scaleOutputValues ', ), - skewScale=dict(argstr='--skewScale %f', ), - splineGridSize=dict( - argstr='--splineGridSize %s', - sep=',', - ), + argstr="--outputMovingVolumeROI %s", hash_files=False, + ), + outputTransform=dict(argstr="--outputTransform %s", hash_files=False,), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + outputVolumePixelType=dict(argstr="--outputVolumePixelType %s",), + permitParameterVariation=dict(argstr="--permitParameterVariation %s", sep=",",), + projectedGradientTolerance=dict(argstr="--projectedGradientTolerance %f",), + promptUser=dict(argstr="--promptUser ",), + relaxationFactor=dict(argstr="--relaxationFactor %f",), + removeIntensityOutliers=dict(argstr="--removeIntensityOutliers %f",), + reproportionScale=dict(argstr="--reproportionScale %f",), + scaleOutputValues=dict(argstr="--scaleOutputValues ",), + skewScale=dict(argstr="--skewScale %f",), + splineGridSize=dict(argstr="--splineGridSize %s", sep=",",), strippedOutputTransform=dict( - argstr='--strippedOutputTransform %s', - hash_files=False, + argstr="--strippedOutputTransform %s", hash_files=False, ), - transformType=dict( - argstr='--transformType %s', - sep=',', - ), - translationScale=dict(argstr='--translationScale %f', ), - useAffine=dict(argstr='--useAffine ', ), - useBSpline=dict(argstr='--useBSpline ', ), + transformType=dict(argstr="--transformType %s", sep=",",), + translationScale=dict(argstr="--translationScale %f",), + useAffine=dict(argstr="--useAffine ",), + useBSpline=dict(argstr="--useBSpline ",), useCachingOfBSplineWeightsMode=dict( - argstr='--useCachingOfBSplineWeightsMode %s', ), + argstr="--useCachingOfBSplineWeightsMode %s", + ), useExplicitPDFDerivativesMode=dict( - argstr='--useExplicitPDFDerivativesMode %s', ), - useRigid=dict(argstr='--useRigid ', ), - useScaleSkewVersor3D=dict(argstr='--useScaleSkewVersor3D ', ), - useScaleVersor3D=dict(argstr='--useScaleVersor3D ', ), - writeOutputTransformInFloat=dict( - argstr='--writeOutputTransformInFloat ', ), - writeTransformOnFailure=dict(argstr='--writeTransformOnFailure ', ), + argstr="--useExplicitPDFDerivativesMode %s", + ), + useRigid=dict(argstr="--useRigid ",), + useScaleSkewVersor3D=dict(argstr="--useScaleSkewVersor3D ",), + useScaleVersor3D=dict(argstr="--useScaleVersor3D ",), + writeOutputTransformInFloat=dict(argstr="--writeOutputTransformInFloat ",), + writeTransformOnFailure=dict(argstr="--writeTransformOnFailure ",), ) inputs = BRAINSFit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSFit_outputs(): output_map = dict( - bsplineTransform=dict(extensions=None, ), - linearTransform=dict(extensions=None, ), - outputFixedVolumeROI=dict(extensions=None, ), - outputMovingVolumeROI=dict(extensions=None, ), - outputTransform=dict(extensions=None, ), - outputVolume=dict(extensions=None, ), - strippedOutputTransform=dict(extensions=None, ), + bsplineTransform=dict(extensions=None,), + linearTransform=dict(extensions=None,), + outputFixedVolumeROI=dict(extensions=None,), + outputMovingVolumeROI=dict(extensions=None,), + outputTransform=dict(extensions=None,), + outputVolume=dict(extensions=None,), + strippedOutputTransform=dict(extensions=None,), ) outputs = BRAINSFit.output_spec() diff --git a/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSResample.py b/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSResample.py index 092689442d..2d94f19a5f 100644 --- a/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSResample.py +++ b/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSResample.py @@ -4,48 +4,29 @@ def test_BRAINSResample_inputs(): input_map = dict( - args=dict(argstr='%s', ), - defaultValue=dict(argstr='--defaultValue %f', ), - deformationVolume=dict( - argstr='--deformationVolume %s', - extensions=None, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - gridSpacing=dict( - argstr='--gridSpacing %s', - sep=',', - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - interpolationMode=dict(argstr='--interpolationMode %s', ), - inverseTransform=dict(argstr='--inverseTransform ', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - pixelType=dict(argstr='--pixelType %s', ), - referenceVolume=dict( - argstr='--referenceVolume %s', - extensions=None, - ), - warpTransform=dict( - argstr='--warpTransform %s', - extensions=None, - ), + args=dict(argstr="%s",), + defaultValue=dict(argstr="--defaultValue %f",), + deformationVolume=dict(argstr="--deformationVolume %s", extensions=None,), + environ=dict(nohash=True, usedefault=True,), + gridSpacing=dict(argstr="--gridSpacing %s", sep=",",), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + interpolationMode=dict(argstr="--interpolationMode %s",), + inverseTransform=dict(argstr="--inverseTransform ",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + pixelType=dict(argstr="--pixelType %s",), + referenceVolume=dict(argstr="--referenceVolume %s", extensions=None,), + warpTransform=dict(argstr="--warpTransform %s", extensions=None,), ) inputs = BRAINSResample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSResample_outputs(): - output_map = dict(outputVolume=dict(extensions=None, ), ) + output_map = dict(outputVolume=dict(extensions=None,),) outputs = BRAINSResample.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/registration/tests/test_auto_FiducialRegistration.py b/nipype/interfaces/slicer/registration/tests/test_auto_FiducialRegistration.py index e3e7124535..65c6016db6 100644 --- a/nipype/interfaces/slicer/registration/tests/test_auto_FiducialRegistration.py +++ b/nipype/interfaces/slicer/registration/tests/test_auto_FiducialRegistration.py @@ -4,28 +4,24 @@ def test_FiducialRegistration_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixedLandmarks=dict(argstr='--fixedLandmarks %s...', ), - movingLandmarks=dict(argstr='--movingLandmarks %s...', ), - outputMessage=dict(argstr='--outputMessage %s', ), - rms=dict(argstr='--rms %f', ), - saveTransform=dict( - argstr='--saveTransform %s', - hash_files=False, - ), - transformType=dict(argstr='--transformType %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + fixedLandmarks=dict(argstr="--fixedLandmarks %s...",), + movingLandmarks=dict(argstr="--movingLandmarks %s...",), + outputMessage=dict(argstr="--outputMessage %s",), + rms=dict(argstr="--rms %f",), + saveTransform=dict(argstr="--saveTransform %s", hash_files=False,), + transformType=dict(argstr="--transformType %s",), ) inputs = FiducialRegistration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FiducialRegistration_outputs(): - output_map = dict(saveTransform=dict(extensions=None, ), ) + output_map = dict(saveTransform=dict(extensions=None,),) outputs = FiducialRegistration.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/registration/tests/test_auto_VBRAINSDemonWarp.py b/nipype/interfaces/slicer/registration/tests/test_auto_VBRAINSDemonWarp.py index fb0a3dd88e..098360c1e3 100644 --- a/nipype/interfaces/slicer/registration/tests/test_auto_VBRAINSDemonWarp.py +++ b/nipype/interfaces/slicer/registration/tests/test_auto_VBRAINSDemonWarp.py @@ -4,111 +4,78 @@ def test_VBRAINSDemonWarp_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), arrayOfPyramidLevelIterations=dict( - argstr='--arrayOfPyramidLevelIterations %s', - sep=',', + argstr="--arrayOfPyramidLevelIterations %s", sep=",", ), - backgroundFillValue=dict(argstr='--backgroundFillValue %d', ), + backgroundFillValue=dict(argstr="--backgroundFillValue %d",), checkerboardPatternSubdivisions=dict( - argstr='--checkerboardPatternSubdivisions %s', - sep=',', - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fixedBinaryVolume=dict( - argstr='--fixedBinaryVolume %s', - extensions=None, - ), - fixedVolume=dict(argstr='--fixedVolume %s...', ), - gradient_type=dict(argstr='--gradient_type %s', ), - gui=dict(argstr='--gui ', ), - histogramMatch=dict(argstr='--histogramMatch ', ), + argstr="--checkerboardPatternSubdivisions %s", sep=",", + ), + environ=dict(nohash=True, usedefault=True,), + fixedBinaryVolume=dict(argstr="--fixedBinaryVolume %s", extensions=None,), + fixedVolume=dict(argstr="--fixedVolume %s...",), + gradient_type=dict(argstr="--gradient_type %s",), + gui=dict(argstr="--gui ",), + histogramMatch=dict(argstr="--histogramMatch ",), initializeWithDisplacementField=dict( - argstr='--initializeWithDisplacementField %s', - extensions=None, + argstr="--initializeWithDisplacementField %s", extensions=None, ), initializeWithTransform=dict( - argstr='--initializeWithTransform %s', - extensions=None, - ), - inputPixelType=dict(argstr='--inputPixelType %s', ), - interpolationMode=dict(argstr='--interpolationMode %s', ), - lowerThresholdForBOBF=dict(argstr='--lowerThresholdForBOBF %d', ), - makeBOBF=dict(argstr='--makeBOBF ', ), - max_step_length=dict(argstr='--max_step_length %f', ), - medianFilterSize=dict( - argstr='--medianFilterSize %s', - sep=',', - ), - minimumFixedPyramid=dict( - argstr='--minimumFixedPyramid %s', - sep=',', - ), - minimumMovingPyramid=dict( - argstr='--minimumMovingPyramid %s', - sep=',', - ), - movingBinaryVolume=dict( - argstr='--movingBinaryVolume %s', - extensions=None, - ), - movingVolume=dict(argstr='--movingVolume %s...', ), - neighborhoodForBOBF=dict( - argstr='--neighborhoodForBOBF %s', - sep=',', - ), + argstr="--initializeWithTransform %s", extensions=None, + ), + inputPixelType=dict(argstr="--inputPixelType %s",), + interpolationMode=dict(argstr="--interpolationMode %s",), + lowerThresholdForBOBF=dict(argstr="--lowerThresholdForBOBF %d",), + makeBOBF=dict(argstr="--makeBOBF ",), + max_step_length=dict(argstr="--max_step_length %f",), + medianFilterSize=dict(argstr="--medianFilterSize %s", sep=",",), + minimumFixedPyramid=dict(argstr="--minimumFixedPyramid %s", sep=",",), + minimumMovingPyramid=dict(argstr="--minimumMovingPyramid %s", sep=",",), + movingBinaryVolume=dict(argstr="--movingBinaryVolume %s", extensions=None,), + movingVolume=dict(argstr="--movingVolume %s...",), + neighborhoodForBOBF=dict(argstr="--neighborhoodForBOBF %s", sep=",",), numberOfBCHApproximationTerms=dict( - argstr='--numberOfBCHApproximationTerms %d', ), - numberOfHistogramBins=dict(argstr='--numberOfHistogramBins %d', ), - numberOfMatchPoints=dict(argstr='--numberOfMatchPoints %d', ), - numberOfPyramidLevels=dict(argstr='--numberOfPyramidLevels %d', ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), + argstr="--numberOfBCHApproximationTerms %d", + ), + numberOfHistogramBins=dict(argstr="--numberOfHistogramBins %d",), + numberOfMatchPoints=dict(argstr="--numberOfMatchPoints %d",), + numberOfPyramidLevels=dict(argstr="--numberOfPyramidLevels %d",), + numberOfThreads=dict(argstr="--numberOfThreads %d",), outputCheckerboardVolume=dict( - argstr='--outputCheckerboardVolume %s', - hash_files=False, + argstr="--outputCheckerboardVolume %s", hash_files=False, ), - outputDebug=dict(argstr='--outputDebug ', ), + outputDebug=dict(argstr="--outputDebug ",), outputDisplacementFieldPrefix=dict( - argstr='--outputDisplacementFieldPrefix %s', ), - outputDisplacementFieldVolume=dict( - argstr='--outputDisplacementFieldVolume %s', - hash_files=False, - ), - outputNormalized=dict(argstr='--outputNormalized ', ), - outputPixelType=dict(argstr='--outputPixelType %s', ), - outputVolume=dict( - argstr='--outputVolume %s', - hash_files=False, - ), - promptUser=dict(argstr='--promptUser ', ), - registrationFilterType=dict(argstr='--registrationFilterType %s', ), - seedForBOBF=dict( - argstr='--seedForBOBF %s', - sep=',', - ), - smoothDisplacementFieldSigma=dict( - argstr='--smoothDisplacementFieldSigma %f', ), - upFieldSmoothing=dict(argstr='--upFieldSmoothing %f', ), - upperThresholdForBOBF=dict(argstr='--upperThresholdForBOBF %d', ), - use_vanilla_dem=dict(argstr='--use_vanilla_dem ', ), - weightFactors=dict( - argstr='--weightFactors %s', - sep=',', + argstr="--outputDisplacementFieldPrefix %s", ), + outputDisplacementFieldVolume=dict( + argstr="--outputDisplacementFieldVolume %s", hash_files=False, + ), + outputNormalized=dict(argstr="--outputNormalized ",), + outputPixelType=dict(argstr="--outputPixelType %s",), + outputVolume=dict(argstr="--outputVolume %s", hash_files=False,), + promptUser=dict(argstr="--promptUser ",), + registrationFilterType=dict(argstr="--registrationFilterType %s",), + seedForBOBF=dict(argstr="--seedForBOBF %s", sep=",",), + smoothDisplacementFieldSigma=dict(argstr="--smoothDisplacementFieldSigma %f",), + upFieldSmoothing=dict(argstr="--upFieldSmoothing %f",), + upperThresholdForBOBF=dict(argstr="--upperThresholdForBOBF %d",), + use_vanilla_dem=dict(argstr="--use_vanilla_dem ",), + weightFactors=dict(argstr="--weightFactors %s", sep=",",), ) inputs = VBRAINSDemonWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_VBRAINSDemonWarp_outputs(): output_map = dict( - outputCheckerboardVolume=dict(extensions=None, ), - outputDisplacementFieldVolume=dict(extensions=None, ), - outputVolume=dict(extensions=None, ), + outputCheckerboardVolume=dict(extensions=None,), + outputDisplacementFieldVolume=dict(extensions=None,), + outputVolume=dict(extensions=None,), ) outputs = VBRAINSDemonWarp.output_spec() diff --git a/nipype/interfaces/slicer/segmentation/__init__.py b/nipype/interfaces/slicer/segmentation/__init__.py index 5b3cf6d468..48fdc62f8c 100644 --- a/nipype/interfaces/slicer/segmentation/__init__.py +++ b/nipype/interfaces/slicer/segmentation/__init__.py @@ -1,4 +1,3 @@ # -*- coding: utf-8 -*- -from .specialized import (RobustStatisticsSegmenter, EMSegmentCommandLine, - BRAINSROIAuto) +from .specialized import RobustStatisticsSegmenter, EMSegmentCommandLine, BRAINSROIAuto from .simpleregiongrowingsegmentation import SimpleRegionGrowingSegmentation diff --git a/nipype/interfaces/slicer/segmentation/simpleregiongrowingsegmentation.py b/nipype/interfaces/slicer/segmentation/simpleregiongrowingsegmentation.py index d466ccc1ac..a32a8dde5c 100644 --- a/nipype/interfaces/slicer/segmentation/simpleregiongrowingsegmentation.py +++ b/nipype/interfaces/slicer/segmentation/simpleregiongrowingsegmentation.py @@ -3,46 +3,57 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class SimpleRegionGrowingSegmentationInputSpec(CommandLineInputSpec): smoothingIterations = traits.Int( - desc="Number of smoothing iterations", - argstr="--smoothingIterations %d") - timestep = traits.Float( - desc="Timestep for curvature flow", argstr="--timestep %f") + desc="Number of smoothing iterations", argstr="--smoothingIterations %d" + ) + timestep = traits.Float(desc="Timestep for curvature flow", argstr="--timestep %f") iterations = traits.Int( - desc="Number of iterations of region growing", - argstr="--iterations %d") + desc="Number of iterations of region growing", argstr="--iterations %d" + ) multiplier = traits.Float( desc="Number of standard deviations to include in intensity model", - argstr="--multiplier %f") + argstr="--multiplier %f", + ) neighborhood = traits.Int( - desc= - "The radius of the neighborhood over which to calculate intensity model", - argstr="--neighborhood %d") + desc="The radius of the neighborhood over which to calculate intensity model", + argstr="--neighborhood %d", + ) labelvalue = traits.Int( - desc= - "The integer value (0-255) to use for the segmentation results. This will determine the color of the segmentation that will be generated by the Region growing algorithm", - argstr="--labelvalue %d") + desc="The integer value (0-255) to use for the segmentation results. This will determine the color of the segmentation that will be generated by the Region growing algorithm", + argstr="--labelvalue %d", + ) seed = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="Seed point(s) for region growing", - argstr="--seed %s...") + argstr="--seed %s...", + ) inputVolume = File( - position=-2, - desc="Input volume to be filtered", - exists=True, - argstr="%s") + position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", - argstr="%s") + argstr="%s", + ) class SimpleRegionGrowingSegmentationOutputSpec(TraitedSpec): @@ -69,4 +80,4 @@ class SimpleRegionGrowingSegmentation(SEMLikeCommandLine): input_spec = SimpleRegionGrowingSegmentationInputSpec output_spec = SimpleRegionGrowingSegmentationOutputSpec _cmd = "SimpleRegionGrowingSegmentation " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} diff --git a/nipype/interfaces/slicer/segmentation/specialized.py b/nipype/interfaces/slicer/segmentation/specialized.py index fdfeb74e37..0ae90d0334 100644 --- a/nipype/interfaces/slicer/segmentation/specialized.py +++ b/nipype/interfaces/slicer/segmentation/specialized.py @@ -3,49 +3,59 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class RobustStatisticsSegmenterInputSpec(CommandLineInputSpec): expectedVolume = traits.Float( desc="The approximate volume of the object, in mL.", - argstr="--expectedVolume %f") + argstr="--expectedVolume %f", + ) intensityHomogeneity = traits.Float( - desc= - "What is the homogeneity of intensity within the object? Given constant intensity at 1.0 score and extreme fluctuating intensity at 0.", - argstr="--intensityHomogeneity %f") + desc="What is the homogeneity of intensity within the object? Given constant intensity at 1.0 score and extreme fluctuating intensity at 0.", + argstr="--intensityHomogeneity %f", + ) curvatureWeight = traits.Float( - desc= - "Given sphere 1.0 score and extreme rough bounday/surface 0 score, what is the expected smoothness of the object?", - argstr="--curvatureWeight %f") + desc="Given sphere 1.0 score and extreme rough bounday/surface 0 score, what is the expected smoothness of the object?", + argstr="--curvatureWeight %f", + ) labelValue = traits.Int( - desc="Label value of the output image", argstr="--labelValue %d") + desc="Label value of the output image", argstr="--labelValue %d" + ) maxRunningTime = traits.Float( desc="The program will stop if this time is reached.", - argstr="--maxRunningTime %f") + argstr="--maxRunningTime %f", + ) originalImageFileName = File( - position=-3, - desc="Original image to be segmented", - exists=True, - argstr="%s") + position=-3, desc="Original image to be segmented", exists=True, argstr="%s" + ) labelImageFileName = File( - position=-2, - desc="Label image for initialization", - exists=True, - argstr="%s") + position=-2, desc="Label image for initialization", exists=True, argstr="%s" + ) segmentedImageFileName = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Segmented image", - argstr="%s") + argstr="%s", + ) class RobustStatisticsSegmenterOutputSpec(TraitedSpec): - segmentedImageFileName = File( - position=-1, desc="Segmented image", exists=True) + segmentedImageFileName = File(position=-1, desc="Segmented image", exists=True) class RobustStatisticsSegmenter(SEMLikeCommandLine): @@ -68,116 +78,118 @@ class RobustStatisticsSegmenter(SEMLikeCommandLine): input_spec = RobustStatisticsSegmenterInputSpec output_spec = RobustStatisticsSegmenterOutputSpec _cmd = "RobustStatisticsSegmenter " - _outputs_filenames = { - 'segmentedImageFileName': 'segmentedImageFileName.nii' - } + _outputs_filenames = {"segmentedImageFileName": "segmentedImageFileName.nii"} class EMSegmentCommandLineInputSpec(CommandLineInputSpec): mrmlSceneFileName = File( desc="Active MRML scene that contains EMSegment algorithm parameters.", exists=True, - argstr="--mrmlSceneFileName %s") + argstr="--mrmlSceneFileName %s", + ) resultVolumeFileName = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "The file name that the segmentation result volume will be written to.", - argstr="--resultVolumeFileName %s") + desc="The file name that the segmentation result volume will be written to.", + argstr="--resultVolumeFileName %s", + ) targetVolumeFileNames = InputMultiPath( File(exists=True), - desc= - "File names of target volumes (to be segmented). The number of target images must be equal to the number of target images specified in the parameter set, and these images must be spatially aligned.", - argstr="--targetVolumeFileNames %s...") + desc="File names of target volumes (to be segmented). The number of target images must be equal to the number of target images specified in the parameter set, and these images must be spatially aligned.", + argstr="--targetVolumeFileNames %s...", + ) intermediateResultsDirectory = Directory( - desc= - "Directory where EMSegmenter will write intermediate data (e.g., aligned atlas data).", + desc="Directory where EMSegmenter will write intermediate data (e.g., aligned atlas data).", exists=True, - argstr="--intermediateResultsDirectory %s") + argstr="--intermediateResultsDirectory %s", + ) parametersMRMLNodeName = traits.Str( - desc= - "The name of the EMSegment parameters node within the active MRML scene. Leave blank for default.", - argstr="--parametersMRMLNodeName %s") + desc="The name of the EMSegment parameters node within the active MRML scene. Leave blank for default.", + argstr="--parametersMRMLNodeName %s", + ) disableMultithreading = traits.Int( - desc= - "Disable multithreading for the EMSegmenter algorithm only! Preprocessing might still run in multi-threaded mode. -1: Do not overwrite default value. 0: Disable. 1: Enable.", - argstr="--disableMultithreading %d") + desc="Disable multithreading for the EMSegmenter algorithm only! Preprocessing might still run in multi-threaded mode. -1: Do not overwrite default value. 0: Disable. 1: Enable.", + argstr="--disableMultithreading %d", + ) dontUpdateIntermediateData = traits.Int( - desc= - "Disable update of intermediate results. -1: Do not overwrite default value. 0: Disable. 1: Enable.", - argstr="--dontUpdateIntermediateData %d") + desc="Disable update of intermediate results. -1: Do not overwrite default value. 0: Disable. 1: Enable.", + argstr="--dontUpdateIntermediateData %d", + ) verbose = traits.Bool(desc="Enable verbose output.", argstr="--verbose ") loadTargetCentered = traits.Bool( - desc="Read target files centered.", argstr="--loadTargetCentered ") + desc="Read target files centered.", argstr="--loadTargetCentered " + ) loadAtlasNonCentered = traits.Bool( - desc="Read atlas files non-centered.", - argstr="--loadAtlasNonCentered ") + desc="Read atlas files non-centered.", argstr="--loadAtlasNonCentered " + ) taskPreProcessingSetting = traits.Str( desc="Specifies the different task parameter. Leave blank for default.", - argstr="--taskPreProcessingSetting %s") + argstr="--taskPreProcessingSetting %s", + ) keepTempFiles = traits.Bool( - desc= - "If flag is set then at the end of command the temporary files are not removed", - argstr="--keepTempFiles ") + desc="If flag is set then at the end of command the temporary files are not removed", + argstr="--keepTempFiles ", + ) resultStandardVolumeFileName = File( - desc= - "Used for testing. Compare segmentation results to this image and return EXIT_FAILURE if they do not match.", + desc="Used for testing. Compare segmentation results to this image and return EXIT_FAILURE if they do not match.", exists=True, - argstr="--resultStandardVolumeFileName %s") + argstr="--resultStandardVolumeFileName %s", + ) dontWriteResults = traits.Bool( - desc= - "Used for testing. Don't actually write the resulting labelmap to disk.", - argstr="--dontWriteResults ") + desc="Used for testing. Don't actually write the resulting labelmap to disk.", + argstr="--dontWriteResults ", + ) generateEmptyMRMLSceneAndQuit = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Used for testing. Only write a scene with default mrml parameters.", - argstr="--generateEmptyMRMLSceneAndQuit %s") + desc="Used for testing. Only write a scene with default mrml parameters.", + argstr="--generateEmptyMRMLSceneAndQuit %s", + ) resultMRMLSceneFileName = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Write out the MRML scene after command line substitutions have been made.", - argstr="--resultMRMLSceneFileName %s") + desc="Write out the MRML scene after command line substitutions have been made.", + argstr="--resultMRMLSceneFileName %s", + ) disableCompression = traits.Bool( desc="Don't use compression when writing result image to disk.", - argstr="--disableCompression ") + argstr="--disableCompression ", + ) atlasVolumeFileNames = InputMultiPath( File(exists=True), - desc= - "Use an alternative atlas to the one that is specified by the mrml file - note the order matters ! ", - argstr="--atlasVolumeFileNames %s...") + desc="Use an alternative atlas to the one that is specified by the mrml file - note the order matters ! ", + argstr="--atlasVolumeFileNames %s...", + ) registrationPackage = traits.Str( - desc= - "specify the registration package for preprocessing (CMTK or BRAINS or PLASTIMATCH or DEMONS)", - argstr="--registrationPackage %s") + desc="specify the registration package for preprocessing (CMTK or BRAINS or PLASTIMATCH or DEMONS)", + argstr="--registrationPackage %s", + ) registrationAffineType = traits.Int( - desc= - "specify the accuracy of the affine registration. -2: Do not overwrite default, -1: Test, 0: Disable, 1: Fast, 2: Accurate", - argstr="--registrationAffineType %d") + desc="specify the accuracy of the affine registration. -2: Do not overwrite default, -1: Test, 0: Disable, 1: Fast, 2: Accurate", + argstr="--registrationAffineType %d", + ) registrationDeformableType = traits.Int( - desc= - "specify the accuracy of the deformable registration. -2: Do not overwrite default, -1: Test, 0: Disable, 1: Fast, 2: Accurate", - argstr="--registrationDeformableType %d") + desc="specify the accuracy of the deformable registration. -2: Do not overwrite default, -1: Test, 0: Disable, 1: Fast, 2: Accurate", + argstr="--registrationDeformableType %d", + ) class EMSegmentCommandLineOutputSpec(TraitedSpec): resultVolumeFileName = File( - desc= - "The file name that the segmentation result volume will be written to.", - exists=True) + desc="The file name that the segmentation result volume will be written to.", + exists=True, + ) generateEmptyMRMLSceneAndQuit = File( - desc= - "Used for testing. Only write a scene with default mrml parameters.", - exists=True) + desc="Used for testing. Only write a scene with default mrml parameters.", + exists=True, + ) resultMRMLSceneFileName = File( - desc= - "Write out the MRML scene after command line substitutions have been made.", - exists=True) + desc="Write out the MRML scene after command line substitutions have been made.", + exists=True, + ) class EMSegmentCommandLine(SEMLikeCommandLine): @@ -206,9 +218,9 @@ class EMSegmentCommandLine(SEMLikeCommandLine): output_spec = EMSegmentCommandLineOutputSpec _cmd = "EMSegmentCommandLine " _outputs_filenames = { - 'generateEmptyMRMLSceneAndQuit': 'generateEmptyMRMLSceneAndQuit', - 'resultMRMLSceneFileName': 'resultMRMLSceneFileName', - 'resultVolumeFileName': 'resultVolumeFileName.mhd' + "generateEmptyMRMLSceneAndQuit": "generateEmptyMRMLSceneAndQuit", + "resultMRMLSceneFileName": "resultMRMLSceneFileName", + "resultVolumeFileName": "resultVolumeFileName.mhd", } @@ -216,34 +228,38 @@ class BRAINSROIAutoInputSpec(CommandLineInputSpec): inputVolume = File( desc="The input image for finding the largest region filled mask.", exists=True, - argstr="--inputVolume %s") + argstr="--inputVolume %s", + ) outputROIMaskVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="The ROI automatically found from the input image.", - argstr="--outputROIMaskVolume %s") + argstr="--outputROIMaskVolume %s", + ) outputClippedVolumeROI = traits.Either( traits.Bool, File(), hash_files=False, desc="The inputVolume clipped to the region of the brain mask.", - argstr="--outputClippedVolumeROI %s") + argstr="--outputClippedVolumeROI %s", + ) otsuPercentileThreshold = traits.Float( desc="Parameter to the Otsu threshold algorithm.", - argstr="--otsuPercentileThreshold %f") + argstr="--otsuPercentileThreshold %f", + ) thresholdCorrectionFactor = traits.Float( - desc= - "A factor to scale the Otsu algorithm's result threshold, in case clipping mangles the image.", - argstr="--thresholdCorrectionFactor %f") + desc="A factor to scale the Otsu algorithm's result threshold, in case clipping mangles the image.", + argstr="--thresholdCorrectionFactor %f", + ) closingSize = traits.Float( - desc= - "The Closing Size (in millimeters) for largest connected filled mask. This value is divided by image spacing and rounded to the next largest voxel number.", - argstr="--closingSize %f") + desc="The Closing Size (in millimeters) for largest connected filled mask. This value is divided by image spacing and rounded to the next largest voxel number.", + argstr="--closingSize %f", + ) ROIAutoDilateSize = traits.Float( - desc= - "This flag is only relavent when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. At setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.", - argstr="--ROIAutoDilateSize %f") + desc="This flag is only relavent when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. At setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.", + argstr="--ROIAutoDilateSize %f", + ) outputVolumePixelType = traits.Enum( "float", "short", @@ -251,20 +267,22 @@ class BRAINSROIAutoInputSpec(CommandLineInputSpec): "int", "uint", "uchar", - desc= - "The output image Pixel Type is the scalar datatype for representation of the Output Volume.", - argstr="--outputVolumePixelType %s") + desc="The output image Pixel Type is the scalar datatype for representation of the Output Volume.", + argstr="--outputVolumePixelType %s", + ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", - argstr="--numberOfThreads %d") + argstr="--numberOfThreads %d", + ) class BRAINSROIAutoOutputSpec(TraitedSpec): outputROIMaskVolume = File( - desc="The ROI automatically found from the input image.", exists=True) + desc="The ROI automatically found from the input image.", exists=True + ) outputClippedVolumeROI = File( - desc="The inputVolume clipped to the region of the brain mask.", - exists=True) + desc="The inputVolume clipped to the region of the brain mask.", exists=True + ) class BRAINSROIAuto(SEMLikeCommandLine): @@ -289,6 +307,6 @@ class BRAINSROIAuto(SEMLikeCommandLine): output_spec = BRAINSROIAutoOutputSpec _cmd = "BRAINSROIAuto " _outputs_filenames = { - 'outputROIMaskVolume': 'outputROIMaskVolume.nii', - 'outputClippedVolumeROI': 'outputClippedVolumeROI.nii' + "outputROIMaskVolume": "outputROIMaskVolume.nii", + "outputClippedVolumeROI": "outputClippedVolumeROI.nii", } diff --git a/nipype/interfaces/slicer/segmentation/tests/test_auto_BRAINSROIAuto.py b/nipype/interfaces/slicer/segmentation/tests/test_auto_BRAINSROIAuto.py index 57959c7102..c88798ec2d 100644 --- a/nipype/interfaces/slicer/segmentation/tests/test_auto_BRAINSROIAuto.py +++ b/nipype/interfaces/slicer/segmentation/tests/test_auto_BRAINSROIAuto.py @@ -4,40 +4,31 @@ def test_BRAINSROIAuto_inputs(): input_map = dict( - ROIAutoDilateSize=dict(argstr='--ROIAutoDilateSize %f', ), - args=dict(argstr='%s', ), - closingSize=dict(argstr='--closingSize %f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='--inputVolume %s', - extensions=None, - ), - numberOfThreads=dict(argstr='--numberOfThreads %d', ), - otsuPercentileThreshold=dict(argstr='--otsuPercentileThreshold %f', ), + ROIAutoDilateSize=dict(argstr="--ROIAutoDilateSize %f",), + args=dict(argstr="%s",), + closingSize=dict(argstr="--closingSize %f",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="--inputVolume %s", extensions=None,), + numberOfThreads=dict(argstr="--numberOfThreads %d",), + otsuPercentileThreshold=dict(argstr="--otsuPercentileThreshold %f",), outputClippedVolumeROI=dict( - argstr='--outputClippedVolumeROI %s', - hash_files=False, - ), - outputROIMaskVolume=dict( - argstr='--outputROIMaskVolume %s', - hash_files=False, + argstr="--outputClippedVolumeROI %s", hash_files=False, ), - outputVolumePixelType=dict(argstr='--outputVolumePixelType %s', ), - thresholdCorrectionFactor=dict( - argstr='--thresholdCorrectionFactor %f', ), + outputROIMaskVolume=dict(argstr="--outputROIMaskVolume %s", hash_files=False,), + outputVolumePixelType=dict(argstr="--outputVolumePixelType %s",), + thresholdCorrectionFactor=dict(argstr="--thresholdCorrectionFactor %f",), ) inputs = BRAINSROIAuto.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BRAINSROIAuto_outputs(): output_map = dict( - outputClippedVolumeROI=dict(extensions=None, ), - outputROIMaskVolume=dict(extensions=None, ), + outputClippedVolumeROI=dict(extensions=None,), + outputROIMaskVolume=dict(extensions=None,), ) outputs = BRAINSROIAuto.output_spec() diff --git a/nipype/interfaces/slicer/segmentation/tests/test_auto_EMSegmentCommandLine.py b/nipype/interfaces/slicer/segmentation/tests/test_auto_EMSegmentCommandLine.py index 30a0ab2ade..241f58c6c0 100644 --- a/nipype/interfaces/slicer/segmentation/tests/test_auto_EMSegmentCommandLine.py +++ b/nipype/interfaces/slicer/segmentation/tests/test_auto_EMSegmentCommandLine.py @@ -4,62 +4,50 @@ def test_EMSegmentCommandLine_inputs(): input_map = dict( - args=dict(argstr='%s', ), - atlasVolumeFileNames=dict(argstr='--atlasVolumeFileNames %s...', ), - disableCompression=dict(argstr='--disableCompression ', ), - disableMultithreading=dict(argstr='--disableMultithreading %d', ), - dontUpdateIntermediateData=dict( - argstr='--dontUpdateIntermediateData %d', ), - dontWriteResults=dict(argstr='--dontWriteResults ', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + atlasVolumeFileNames=dict(argstr="--atlasVolumeFileNames %s...",), + disableCompression=dict(argstr="--disableCompression ",), + disableMultithreading=dict(argstr="--disableMultithreading %d",), + dontUpdateIntermediateData=dict(argstr="--dontUpdateIntermediateData %d",), + dontWriteResults=dict(argstr="--dontWriteResults ",), + environ=dict(nohash=True, usedefault=True,), generateEmptyMRMLSceneAndQuit=dict( - argstr='--generateEmptyMRMLSceneAndQuit %s', - hash_files=False, - ), - intermediateResultsDirectory=dict( - argstr='--intermediateResultsDirectory %s', ), - keepTempFiles=dict(argstr='--keepTempFiles ', ), - loadAtlasNonCentered=dict(argstr='--loadAtlasNonCentered ', ), - loadTargetCentered=dict(argstr='--loadTargetCentered ', ), - mrmlSceneFileName=dict( - argstr='--mrmlSceneFileName %s', - extensions=None, + argstr="--generateEmptyMRMLSceneAndQuit %s", hash_files=False, ), - parametersMRMLNodeName=dict(argstr='--parametersMRMLNodeName %s', ), - registrationAffineType=dict(argstr='--registrationAffineType %d', ), - registrationDeformableType=dict( - argstr='--registrationDeformableType %d', ), - registrationPackage=dict(argstr='--registrationPackage %s', ), + intermediateResultsDirectory=dict(argstr="--intermediateResultsDirectory %s",), + keepTempFiles=dict(argstr="--keepTempFiles ",), + loadAtlasNonCentered=dict(argstr="--loadAtlasNonCentered ",), + loadTargetCentered=dict(argstr="--loadTargetCentered ",), + mrmlSceneFileName=dict(argstr="--mrmlSceneFileName %s", extensions=None,), + parametersMRMLNodeName=dict(argstr="--parametersMRMLNodeName %s",), + registrationAffineType=dict(argstr="--registrationAffineType %d",), + registrationDeformableType=dict(argstr="--registrationDeformableType %d",), + registrationPackage=dict(argstr="--registrationPackage %s",), resultMRMLSceneFileName=dict( - argstr='--resultMRMLSceneFileName %s', - hash_files=False, + argstr="--resultMRMLSceneFileName %s", hash_files=False, ), resultStandardVolumeFileName=dict( - argstr='--resultStandardVolumeFileName %s', - extensions=None, + argstr="--resultStandardVolumeFileName %s", extensions=None, ), resultVolumeFileName=dict( - argstr='--resultVolumeFileName %s', - hash_files=False, + argstr="--resultVolumeFileName %s", hash_files=False, ), - targetVolumeFileNames=dict(argstr='--targetVolumeFileNames %s...', ), - taskPreProcessingSetting=dict( - argstr='--taskPreProcessingSetting %s', ), - verbose=dict(argstr='--verbose ', ), + targetVolumeFileNames=dict(argstr="--targetVolumeFileNames %s...",), + taskPreProcessingSetting=dict(argstr="--taskPreProcessingSetting %s",), + verbose=dict(argstr="--verbose ",), ) inputs = EMSegmentCommandLine.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EMSegmentCommandLine_outputs(): output_map = dict( - generateEmptyMRMLSceneAndQuit=dict(extensions=None, ), - resultMRMLSceneFileName=dict(extensions=None, ), - resultVolumeFileName=dict(extensions=None, ), + generateEmptyMRMLSceneAndQuit=dict(extensions=None,), + resultMRMLSceneFileName=dict(extensions=None,), + resultVolumeFileName=dict(extensions=None,), ) outputs = EMSegmentCommandLine.output_spec() diff --git a/nipype/interfaces/slicer/segmentation/tests/test_auto_RobustStatisticsSegmenter.py b/nipype/interfaces/slicer/segmentation/tests/test_auto_RobustStatisticsSegmenter.py index 4d9bbee882..deed609acb 100644 --- a/nipype/interfaces/slicer/segmentation/tests/test_auto_RobustStatisticsSegmenter.py +++ b/nipype/interfaces/slicer/segmentation/tests/test_auto_RobustStatisticsSegmenter.py @@ -4,42 +4,26 @@ def test_RobustStatisticsSegmenter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - curvatureWeight=dict(argstr='--curvatureWeight %f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - expectedVolume=dict(argstr='--expectedVolume %f', ), - intensityHomogeneity=dict(argstr='--intensityHomogeneity %f', ), - labelImageFileName=dict( - argstr='%s', - extensions=None, - position=-2, - ), - labelValue=dict(argstr='--labelValue %d', ), - maxRunningTime=dict(argstr='--maxRunningTime %f', ), - originalImageFileName=dict( - argstr='%s', - extensions=None, - position=-3, - ), - segmentedImageFileName=dict( - argstr='%s', - hash_files=False, - position=-1, - ), + args=dict(argstr="%s",), + curvatureWeight=dict(argstr="--curvatureWeight %f",), + environ=dict(nohash=True, usedefault=True,), + expectedVolume=dict(argstr="--expectedVolume %f",), + intensityHomogeneity=dict(argstr="--intensityHomogeneity %f",), + labelImageFileName=dict(argstr="%s", extensions=None, position=-2,), + labelValue=dict(argstr="--labelValue %d",), + maxRunningTime=dict(argstr="--maxRunningTime %f",), + originalImageFileName=dict(argstr="%s", extensions=None, position=-3,), + segmentedImageFileName=dict(argstr="%s", hash_files=False, position=-1,), ) inputs = RobustStatisticsSegmenter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RobustStatisticsSegmenter_outputs(): - output_map = dict(segmentedImageFileName=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(segmentedImageFileName=dict(extensions=None, position=-1,),) outputs = RobustStatisticsSegmenter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/segmentation/tests/test_auto_SimpleRegionGrowingSegmentation.py b/nipype/interfaces/slicer/segmentation/tests/test_auto_SimpleRegionGrowingSegmentation.py index 2d3fe30de5..9da82507cc 100644 --- a/nipype/interfaces/slicer/segmentation/tests/test_auto_SimpleRegionGrowingSegmentation.py +++ b/nipype/interfaces/slicer/segmentation/tests/test_auto_SimpleRegionGrowingSegmentation.py @@ -4,39 +4,27 @@ def test_SimpleRegionGrowingSegmentation_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - iterations=dict(argstr='--iterations %d', ), - labelvalue=dict(argstr='--labelvalue %d', ), - multiplier=dict(argstr='--multiplier %f', ), - neighborhood=dict(argstr='--neighborhood %d', ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - seed=dict(argstr='--seed %s...', ), - smoothingIterations=dict(argstr='--smoothingIterations %d', ), - timestep=dict(argstr='--timestep %f', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume=dict(argstr="%s", extensions=None, position=-2,), + iterations=dict(argstr="--iterations %d",), + labelvalue=dict(argstr="--labelvalue %d",), + multiplier=dict(argstr="--multiplier %f",), + neighborhood=dict(argstr="--neighborhood %d",), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), + seed=dict(argstr="--seed %s...",), + smoothingIterations=dict(argstr="--smoothingIterations %d",), + timestep=dict(argstr="--timestep %f",), ) inputs = SimpleRegionGrowingSegmentation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SimpleRegionGrowingSegmentation_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = SimpleRegionGrowingSegmentation.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/surface.py b/nipype/interfaces/slicer/surface.py index 6a1dfe2cc0..970d067157 100644 --- a/nipype/interfaces/slicer/surface.py +++ b/nipype/interfaces/slicer/surface.py @@ -3,7 +3,18 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os @@ -11,12 +22,8 @@ class MergeModelsInputSpec(CommandLineInputSpec): Model1 = File(position=-3, desc="Model", exists=True, argstr="%s") Model2 = File(position=-2, desc="Model", exists=True, argstr="%s") ModelOutput = traits.Either( - traits.Bool, - File(), - position=-1, - hash_files=False, - desc="Model", - argstr="%s") + traits.Bool, File(), position=-1, hash_files=False, desc="Model", argstr="%s" + ) class MergeModelsOutputSpec(TraitedSpec): @@ -43,13 +50,12 @@ class MergeModels(SEMLikeCommandLine): input_spec = MergeModelsInputSpec output_spec = MergeModelsOutputSpec _cmd = "MergeModels " - _outputs_filenames = {'ModelOutput': 'ModelOutput.vtk'} + _outputs_filenames = {"ModelOutput": "ModelOutput.vtk"} class ModelToLabelMapInputSpec(CommandLineInputSpec): distance = traits.Float(desc="Sample distance", argstr="--distance %f") - InputVolume = File( - position=-3, desc="Input volume", exists=True, argstr="%s") + InputVolume = File(position=-3, desc="Input volume", exists=True, argstr="%s") surface = File(position=-2, desc="Model", exists=True, argstr="%s") OutputVolume = traits.Either( traits.Bool, @@ -57,7 +63,8 @@ class ModelToLabelMapInputSpec(CommandLineInputSpec): position=-1, hash_files=False, desc="The label volume", - argstr="%s") + argstr="%s", + ) class ModelToLabelMapOutputSpec(TraitedSpec): @@ -84,7 +91,7 @@ class ModelToLabelMap(SEMLikeCommandLine): input_spec = ModelToLabelMapInputSpec output_spec = ModelToLabelMapOutputSpec _cmd = "ModelToLabelMap " - _outputs_filenames = {'OutputVolume': 'OutputVolume.nii'} + _outputs_filenames = {"OutputVolume": "OutputVolume.nii"} class GrayscaleModelMakerInputSpec(CommandLineInputSpec): @@ -92,39 +99,43 @@ class GrayscaleModelMakerInputSpec(CommandLineInputSpec): position=-2, desc="Volume containing the input grayscale data.", exists=True, - argstr="%s") + argstr="%s", + ) OutputGeometry = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output that contains geometry model.", - argstr="%s") + argstr="%s", + ) threshold = traits.Float( - desc= - "Grayscale threshold of isosurface. The resulting surface of triangles separates the volume into voxels that lie above (inside) and below (outside) the threshold.", - argstr="--threshold %f") + desc="Grayscale threshold of isosurface. The resulting surface of triangles separates the volume into voxels that lie above (inside) and below (outside) the threshold.", + argstr="--threshold %f", + ) name = traits.Str(desc="Name to use for this model.", argstr="--name %s") smooth = traits.Int( desc="Number of smoothing iterations. If 0, no smoothing will be done.", - argstr="--smooth %d") + argstr="--smooth %d", + ) decimate = traits.Float( - desc= - "Target reduction during decimation, as a decimal percentage reduction in the number of polygons. If 0, no decimation will be done.", - argstr="--decimate %f") + desc="Target reduction during decimation, as a decimal percentage reduction in the number of polygons. If 0, no decimation will be done.", + argstr="--decimate %f", + ) splitnormals = traits.Bool( - desc= - "Splitting normals is useful for visualizing sharp features. However it creates holes in surfaces which affect measurements", - argstr="--splitnormals ") + desc="Splitting normals is useful for visualizing sharp features. However it creates holes in surfaces which affect measurements", + argstr="--splitnormals ", + ) pointnormals = traits.Bool( - desc= - "Calculate the point normals? Calculated point normals make the surface appear smooth. Without point normals, the surface will appear faceted.", - argstr="--pointnormals ") + desc="Calculate the point normals? Calculated point normals make the surface appear smooth. Without point normals, the surface will appear faceted.", + argstr="--pointnormals ", + ) class GrayscaleModelMakerOutputSpec(TraitedSpec): OutputGeometry = File( - position=-1, desc="Output that contains geometry model.", exists=True) + position=-1, desc="Output that contains geometry model.", exists=True + ) class GrayscaleModelMaker(SEMLikeCommandLine): @@ -149,24 +160,22 @@ class GrayscaleModelMaker(SEMLikeCommandLine): input_spec = GrayscaleModelMakerInputSpec output_spec = GrayscaleModelMakerOutputSpec _cmd = "GrayscaleModelMaker " - _outputs_filenames = {'OutputGeometry': 'OutputGeometry.vtk'} + _outputs_filenames = {"OutputGeometry": "OutputGeometry.vtk"} class ProbeVolumeWithModelInputSpec(CommandLineInputSpec): InputVolume = File( - position=-3, - desc="Volume to use to 'paint' the model", - exists=True, - argstr="%s") - InputModel = File( - position=-2, desc="Input model", exists=True, argstr="%s") + position=-3, desc="Volume to use to 'paint' the model", exists=True, argstr="%s" + ) + InputModel = File(position=-2, desc="Input model", exists=True, argstr="%s") OutputModel = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output 'painted' model", - argstr="%s") + argstr="%s", + ) class ProbeVolumeWithModelOutputSpec(TraitedSpec): @@ -193,34 +202,34 @@ class ProbeVolumeWithModel(SEMLikeCommandLine): input_spec = ProbeVolumeWithModelInputSpec output_spec = ProbeVolumeWithModelOutputSpec _cmd = "ProbeVolumeWithModel " - _outputs_filenames = {'OutputModel': 'OutputModel.vtk'} + _outputs_filenames = {"OutputModel": "OutputModel.vtk"} class LabelMapSmoothingInputSpec(CommandLineInputSpec): labelToSmooth = traits.Int( - desc= - "The label to smooth. All others will be ignored. If no label is selected by the user, the maximum label in the image is chosen by default.", - argstr="--labelToSmooth %d") + desc="The label to smooth. All others will be ignored. If no label is selected by the user, the maximum label in the image is chosen by default.", + argstr="--labelToSmooth %d", + ) numberOfIterations = traits.Int( desc="The number of iterations of the level set AntiAliasing algorithm", - argstr="--numberOfIterations %d") - maxRMSError = traits.Float( - desc="The maximum RMS error.", argstr="--maxRMSError %f") + argstr="--numberOfIterations %d", + ) + maxRMSError = traits.Float(desc="The maximum RMS error.", argstr="--maxRMSError %f") gaussianSigma = traits.Float( desc="The standard deviation of the Gaussian kernel", - argstr="--gaussianSigma %f") + argstr="--gaussianSigma %f", + ) inputVolume = File( - position=-2, - desc="Input label map to smooth", - exists=True, - argstr="%s") + position=-2, desc="Input label map to smooth", exists=True, argstr="%s" + ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Smoothed label map", - argstr="%s") + argstr="%s", + ) class LabelMapSmoothingOutputSpec(TraitedSpec): @@ -247,98 +256,98 @@ class LabelMapSmoothing(SEMLikeCommandLine): input_spec = LabelMapSmoothingInputSpec output_spec = LabelMapSmoothingOutputSpec _cmd = "LabelMapSmoothing " - _outputs_filenames = {'outputVolume': 'outputVolume.nii'} + _outputs_filenames = {"outputVolume": "outputVolume.nii"} class ModelMakerInputSpec(CommandLineInputSpec): InputVolume = File( position=-1, - desc= - "Input label map. The Input Volume drop down menu is populated with the label map volumes that are present in the scene, select one from which to generate models.", + desc="Input label map. The Input Volume drop down menu is populated with the label map volumes that are present in the scene, select one from which to generate models.", exists=True, - argstr="%s") + argstr="%s", + ) color = File( desc="Color table to make labels to colors and objects", exists=True, - argstr="--color %s") + argstr="--color %s", + ) modelSceneFile = traits.Either( traits.Bool, - InputMultiPath(File(), ), + InputMultiPath(File(),), hash_files=False, - desc= - "Generated models, under a model hierarchy node. Models are imported into Slicer under a model hierarchy node, and their colors are set by the color table associated with the input label map volume. The model hierarchy node must be created before running the model maker, by selecting Create New ModelHierarchy from the Models drop down menu. If you're running from the command line, a model hierarchy node in a new mrml scene will be created for you.", - argstr="--modelSceneFile %s...") + desc="Generated models, under a model hierarchy node. Models are imported into Slicer under a model hierarchy node, and their colors are set by the color table associated with the input label map volume. The model hierarchy node must be created before running the model maker, by selecting Create New ModelHierarchy from the Models drop down menu. If you're running from the command line, a model hierarchy node in a new mrml scene will be created for you.", + argstr="--modelSceneFile %s...", + ) name = traits.Str( - desc= - "Name to use for this model. Any text entered in the entry box will be the starting string for the created model file names. The label number and the color name will also be part of the file name. If making multiple models, use this as a prefix to the label and color name.", - argstr="--name %s") + desc="Name to use for this model. Any text entered in the entry box will be the starting string for the created model file names. The label number and the color name will also be part of the file name. If making multiple models, use this as a prefix to the label and color name.", + argstr="--name %s", + ) generateAll = traits.Bool( - desc= - "Generate models for all labels in the input volume. select this option if you want to create all models that correspond to all values in a labelmap volume (using the Joint Smoothing option below is useful with this option). Ignores Labels, Start Label, End Label settings. Skips label 0.", - argstr="--generateAll ") + desc="Generate models for all labels in the input volume. select this option if you want to create all models that correspond to all values in a labelmap volume (using the Joint Smoothing option below is useful with this option). Ignores Labels, Start Label, End Label settings. Skips label 0.", + argstr="--generateAll ", + ) labels = InputMultiPath( traits.Int, - desc= - "A comma separated list of label values from which to make models. f you specify a list of Labels, it will override any start/end label settings. If you click Generate All Models it will override the list of labels and any start/end label settings.", + desc="A comma separated list of label values from which to make models. f you specify a list of Labels, it will override any start/end label settings. If you click Generate All Models it will override the list of labels and any start/end label settings.", sep=",", - argstr="--labels %s") + argstr="--labels %s", + ) start = traits.Int( - desc= - "If you want to specify a continuous range of labels from which to generate models, enter the lower label here. Voxel value from which to start making models. Used instead of the label list to specify a range (make sure the label list is empty or it will over ride this).", - argstr="--start %d") + desc="If you want to specify a continuous range of labels from which to generate models, enter the lower label here. Voxel value from which to start making models. Used instead of the label list to specify a range (make sure the label list is empty or it will over ride this).", + argstr="--start %d", + ) end = traits.Int( - desc= - "If you want to specify a continuous range of labels from which to generate models, enter the higher label here. Voxel value up to which to continue making models. Skip any values with zero voxels.", - argstr="--end %d") + desc="If you want to specify a continuous range of labels from which to generate models, enter the higher label here. Voxel value up to which to continue making models. Skip any values with zero voxels.", + argstr="--end %d", + ) skipUnNamed = traits.Bool( - desc= - "Select this to not generate models from labels that do not have names defined in the color look up table associated with the input label map. If true, only models which have an entry in the color table will be generated. If false, generate all models that exist within the label range.", - argstr="--skipUnNamed ") + desc="Select this to not generate models from labels that do not have names defined in the color look up table associated with the input label map. If true, only models which have an entry in the color table will be generated. If false, generate all models that exist within the label range.", + argstr="--skipUnNamed ", + ) jointsmooth = traits.Bool( - desc= - "This will ensure that all resulting models fit together smoothly, like jigsaw puzzle pieces. Otherwise the models will be smoothed independently and may overlap.", - argstr="--jointsmooth ") + desc="This will ensure that all resulting models fit together smoothly, like jigsaw puzzle pieces. Otherwise the models will be smoothed independently and may overlap.", + argstr="--jointsmooth ", + ) smooth = traits.Int( - desc= - "Here you can set the number of smoothing iterations for Laplacian smoothing, or the degree of the polynomial approximating the windowed Sinc function. Use 0 if you wish no smoothing. ", - argstr="--smooth %d") + desc="Here you can set the number of smoothing iterations for Laplacian smoothing, or the degree of the polynomial approximating the windowed Sinc function. Use 0 if you wish no smoothing. ", + argstr="--smooth %d", + ) filtertype = traits.Enum( "Sinc", "Laplacian", - desc= - "You can control the type of smoothing done on the models by selecting a filter type of either Sinc or Laplacian.", - argstr="--filtertype %s") + desc="You can control the type of smoothing done on the models by selecting a filter type of either Sinc or Laplacian.", + argstr="--filtertype %s", + ) decimate = traits.Float( - desc= - "Chose the target reduction in number of polygons as a decimal percentage (between 0 and 1) of the number of polygons. Specifies the percentage of triangles to be removed. For example, 0.1 means 10% reduction and 0.9 means 90% reduction.", - argstr="--decimate %f") + desc="Chose the target reduction in number of polygons as a decimal percentage (between 0 and 1) of the number of polygons. Specifies the percentage of triangles to be removed. For example, 0.1 means 10% reduction and 0.9 means 90% reduction.", + argstr="--decimate %f", + ) splitnormals = traits.Bool( - desc= - "Splitting normals is useful for visualizing sharp features. However it creates holes in surfaces which affects measurements.", - argstr="--splitnormals ") + desc="Splitting normals is useful for visualizing sharp features. However it creates holes in surfaces which affects measurements.", + argstr="--splitnormals ", + ) pointnormals = traits.Bool( - desc= - "Turn this flag on if you wish to calculate the normal vectors for the points.", - argstr="--pointnormals ") + desc="Turn this flag on if you wish to calculate the normal vectors for the points.", + argstr="--pointnormals ", + ) pad = traits.Bool( - desc= - "Pad the input volume with zero value voxels on all 6 faces in order to ensure the production of closed surfaces. Sets the origin translation and extent translation so that the models still line up with the unpadded input volume.", - argstr="--pad ") + desc="Pad the input volume with zero value voxels on all 6 faces in order to ensure the production of closed surfaces. Sets the origin translation and extent translation so that the models still line up with the unpadded input volume.", + argstr="--pad ", + ) saveIntermediateModels = traits.Bool( - desc= - "You can save a copy of the models after each of the intermediate steps (marching cubes, smoothing, and decimation if not joint smoothing, otherwise just after decimation). These intermediate models are not saved in the mrml file, you have to load them manually after turning off deleting temporary files in they python console (View ->Python Interactor) using the following command slicer.modules.modelmaker.cliModuleLogic().DeleteTemporaryFilesOff().", - argstr="--saveIntermediateModels ") + desc="You can save a copy of the models after each of the intermediate steps (marching cubes, smoothing, and decimation if not joint smoothing, otherwise just after decimation). These intermediate models are not saved in the mrml file, you have to load them manually after turning off deleting temporary files in they python console (View ->Python Interactor) using the following command slicer.modules.modelmaker.cliModuleLogic().DeleteTemporaryFilesOff().", + argstr="--saveIntermediateModels ", + ) debug = traits.Bool( - desc= - "turn this flag on in order to see debugging output (look in the Error Log window that is accessed via the View menu)", - argstr="--debug ") + desc="turn this flag on in order to see debugging output (look in the Error Log window that is accessed via the View menu)", + argstr="--debug ", + ) class ModelMakerOutputSpec(TraitedSpec): modelSceneFile = OutputMultiPath( File(exists=True), - desc= - "Generated models, under a model hierarchy node. Models are imported into Slicer under a model hierarchy node, and their colors are set by the color table associated with the input label map volume. The model hierarchy node must be created before running the model maker, by selecting Create New ModelHierarchy from the Models drop down menu. If you're running from the command line, a model hierarchy node in a new mrml scene will be created for you." + desc="Generated models, under a model hierarchy node. Models are imported into Slicer under a model hierarchy node, and their colors are set by the color table associated with the input label map volume. The model hierarchy node must be created before running the model maker, by selecting Create New ModelHierarchy from the Models drop down menu. If you're running from the command line, a model hierarchy node in a new mrml scene will be created for you.", ) @@ -364,4 +373,4 @@ class ModelMaker(SEMLikeCommandLine): input_spec = ModelMakerInputSpec output_spec = ModelMakerOutputSpec _cmd = "ModelMaker " - _outputs_filenames = {'modelSceneFile': 'modelSceneFile.mrml'} + _outputs_filenames = {"modelSceneFile": "modelSceneFile.mrml"} diff --git a/nipype/interfaces/slicer/tests/test_auto_DicomToNrrdConverter.py b/nipype/interfaces/slicer/tests/test_auto_DicomToNrrdConverter.py index 3e6b0d875d..6411e0ee54 100644 --- a/nipype/interfaces/slicer/tests/test_auto_DicomToNrrdConverter.py +++ b/nipype/interfaces/slicer/tests/test_auto_DicomToNrrdConverter.py @@ -4,32 +4,25 @@ def test_DicomToNrrdConverter_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputDicomDirectory=dict(argstr='--inputDicomDirectory %s', ), - outputDirectory=dict( - argstr='--outputDirectory %s', - hash_files=False, - ), - outputVolume=dict(argstr='--outputVolume %s', ), - smallGradientThreshold=dict(argstr='--smallGradientThreshold %f', ), - useBMatrixGradientDirections=dict( - argstr='--useBMatrixGradientDirections ', ), - useIdentityMeaseurementFrame=dict( - argstr='--useIdentityMeaseurementFrame ', ), - writeProtocolGradientsFile=dict( - argstr='--writeProtocolGradientsFile ', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputDicomDirectory=dict(argstr="--inputDicomDirectory %s",), + outputDirectory=dict(argstr="--outputDirectory %s", hash_files=False,), + outputVolume=dict(argstr="--outputVolume %s",), + smallGradientThreshold=dict(argstr="--smallGradientThreshold %f",), + useBMatrixGradientDirections=dict(argstr="--useBMatrixGradientDirections ",), + useIdentityMeaseurementFrame=dict(argstr="--useIdentityMeaseurementFrame ",), + writeProtocolGradientsFile=dict(argstr="--writeProtocolGradientsFile ",), ) inputs = DicomToNrrdConverter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DicomToNrrdConverter_outputs(): - output_map = dict(outputDirectory=dict(), ) + output_map = dict(outputDirectory=dict(),) outputs = DicomToNrrdConverter.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/tests/test_auto_EMSegmentTransformToNewFormat.py b/nipype/interfaces/slicer/tests/test_auto_EMSegmentTransformToNewFormat.py index 371d7cb1b3..d65723f4af 100644 --- a/nipype/interfaces/slicer/tests/test_auto_EMSegmentTransformToNewFormat.py +++ b/nipype/interfaces/slicer/tests/test_auto_EMSegmentTransformToNewFormat.py @@ -4,28 +4,21 @@ def test_EMSegmentTransformToNewFormat_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputMRMLFileName=dict( - argstr='--inputMRMLFileName %s', - extensions=None, - ), - outputMRMLFileName=dict( - argstr='--outputMRMLFileName %s', - hash_files=False, - ), - templateFlag=dict(argstr='--templateFlag ', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputMRMLFileName=dict(argstr="--inputMRMLFileName %s", extensions=None,), + outputMRMLFileName=dict(argstr="--outputMRMLFileName %s", hash_files=False,), + templateFlag=dict(argstr="--templateFlag ",), ) inputs = EMSegmentTransformToNewFormat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EMSegmentTransformToNewFormat_outputs(): - output_map = dict(outputMRMLFileName=dict(extensions=None, ), ) + output_map = dict(outputMRMLFileName=dict(extensions=None,),) outputs = EMSegmentTransformToNewFormat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/tests/test_auto_GrayscaleModelMaker.py b/nipype/interfaces/slicer/tests/test_auto_GrayscaleModelMaker.py index 9b9cb3b367..eaaa00d788 100644 --- a/nipype/interfaces/slicer/tests/test_auto_GrayscaleModelMaker.py +++ b/nipype/interfaces/slicer/tests/test_auto_GrayscaleModelMaker.py @@ -4,38 +4,26 @@ def test_GrayscaleModelMaker_inputs(): input_map = dict( - InputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - OutputGeometry=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - args=dict(argstr='%s', ), - decimate=dict(argstr='--decimate %f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - name=dict(argstr='--name %s', ), - pointnormals=dict(argstr='--pointnormals ', ), - smooth=dict(argstr='--smooth %d', ), - splitnormals=dict(argstr='--splitnormals ', ), - threshold=dict(argstr='--threshold %f', ), + InputVolume=dict(argstr="%s", extensions=None, position=-2,), + OutputGeometry=dict(argstr="%s", hash_files=False, position=-1,), + args=dict(argstr="%s",), + decimate=dict(argstr="--decimate %f",), + environ=dict(nohash=True, usedefault=True,), + name=dict(argstr="--name %s",), + pointnormals=dict(argstr="--pointnormals ",), + smooth=dict(argstr="--smooth %d",), + splitnormals=dict(argstr="--splitnormals ",), + threshold=dict(argstr="--threshold %f",), ) inputs = GrayscaleModelMaker.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GrayscaleModelMaker_outputs(): - output_map = dict(OutputGeometry=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(OutputGeometry=dict(extensions=None, position=-1,),) outputs = GrayscaleModelMaker.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/tests/test_auto_LabelMapSmoothing.py b/nipype/interfaces/slicer/tests/test_auto_LabelMapSmoothing.py index c33abad34c..5077a0f23c 100644 --- a/nipype/interfaces/slicer/tests/test_auto_LabelMapSmoothing.py +++ b/nipype/interfaces/slicer/tests/test_auto_LabelMapSmoothing.py @@ -4,36 +4,24 @@ def test_LabelMapSmoothing_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - gaussianSigma=dict(argstr='--gaussianSigma %f', ), - inputVolume=dict( - argstr='%s', - extensions=None, - position=-2, - ), - labelToSmooth=dict(argstr='--labelToSmooth %d', ), - maxRMSError=dict(argstr='--maxRMSError %f', ), - numberOfIterations=dict(argstr='--numberOfIterations %d', ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + gaussianSigma=dict(argstr="--gaussianSigma %f",), + inputVolume=dict(argstr="%s", extensions=None, position=-2,), + labelToSmooth=dict(argstr="--labelToSmooth %d",), + maxRMSError=dict(argstr="--maxRMSError %f",), + numberOfIterations=dict(argstr="--numberOfIterations %d",), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), ) inputs = LabelMapSmoothing.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LabelMapSmoothing_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = LabelMapSmoothing.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/tests/test_auto_MergeModels.py b/nipype/interfaces/slicer/tests/test_auto_MergeModels.py index 86fdd83399..43398aa45f 100644 --- a/nipype/interfaces/slicer/tests/test_auto_MergeModels.py +++ b/nipype/interfaces/slicer/tests/test_auto_MergeModels.py @@ -4,37 +4,21 @@ def test_MergeModels_inputs(): input_map = dict( - Model1=dict( - argstr='%s', - extensions=None, - position=-3, - ), - Model2=dict( - argstr='%s', - extensions=None, - position=-2, - ), - ModelOutput=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + Model1=dict(argstr="%s", extensions=None, position=-3,), + Model2=dict(argstr="%s", extensions=None, position=-2,), + ModelOutput=dict(argstr="%s", hash_files=False, position=-1,), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), ) inputs = MergeModels.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MergeModels_outputs(): - output_map = dict(ModelOutput=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(ModelOutput=dict(extensions=None, position=-1,),) outputs = MergeModels.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/tests/test_auto_ModelMaker.py b/nipype/interfaces/slicer/tests/test_auto_ModelMaker.py index ef6a3f3c0d..888b7e0477 100644 --- a/nipype/interfaces/slicer/tests/test_auto_ModelMaker.py +++ b/nipype/interfaces/slicer/tests/test_auto_ModelMaker.py @@ -4,50 +4,36 @@ def test_ModelMaker_inputs(): input_map = dict( - InputVolume=dict( - argstr='%s', - extensions=None, - position=-1, - ), - args=dict(argstr='%s', ), - color=dict( - argstr='--color %s', - extensions=None, - ), - debug=dict(argstr='--debug ', ), - decimate=dict(argstr='--decimate %f', ), - end=dict(argstr='--end %d', ), - environ=dict( - nohash=True, - usedefault=True, - ), - filtertype=dict(argstr='--filtertype %s', ), - generateAll=dict(argstr='--generateAll ', ), - jointsmooth=dict(argstr='--jointsmooth ', ), - labels=dict( - argstr='--labels %s', - sep=',', - ), - modelSceneFile=dict( - argstr='--modelSceneFile %s...', - hash_files=False, - ), - name=dict(argstr='--name %s', ), - pad=dict(argstr='--pad ', ), - pointnormals=dict(argstr='--pointnormals ', ), - saveIntermediateModels=dict(argstr='--saveIntermediateModels ', ), - skipUnNamed=dict(argstr='--skipUnNamed ', ), - smooth=dict(argstr='--smooth %d', ), - splitnormals=dict(argstr='--splitnormals ', ), - start=dict(argstr='--start %d', ), + InputVolume=dict(argstr="%s", extensions=None, position=-1,), + args=dict(argstr="%s",), + color=dict(argstr="--color %s", extensions=None,), + debug=dict(argstr="--debug ",), + decimate=dict(argstr="--decimate %f",), + end=dict(argstr="--end %d",), + environ=dict(nohash=True, usedefault=True,), + filtertype=dict(argstr="--filtertype %s",), + generateAll=dict(argstr="--generateAll ",), + jointsmooth=dict(argstr="--jointsmooth ",), + labels=dict(argstr="--labels %s", sep=",",), + modelSceneFile=dict(argstr="--modelSceneFile %s...", hash_files=False,), + name=dict(argstr="--name %s",), + pad=dict(argstr="--pad ",), + pointnormals=dict(argstr="--pointnormals ",), + saveIntermediateModels=dict(argstr="--saveIntermediateModels ",), + skipUnNamed=dict(argstr="--skipUnNamed ",), + smooth=dict(argstr="--smooth %d",), + splitnormals=dict(argstr="--splitnormals ",), + start=dict(argstr="--start %d",), ) inputs = ModelMaker.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ModelMaker_outputs(): - output_map = dict(modelSceneFile=dict(), ) + output_map = dict(modelSceneFile=dict(),) outputs = ModelMaker.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/tests/test_auto_ModelToLabelMap.py b/nipype/interfaces/slicer/tests/test_auto_ModelToLabelMap.py index 8c53d9dabe..ad7c305824 100644 --- a/nipype/interfaces/slicer/tests/test_auto_ModelToLabelMap.py +++ b/nipype/interfaces/slicer/tests/test_auto_ModelToLabelMap.py @@ -4,38 +4,22 @@ def test_ModelToLabelMap_inputs(): input_map = dict( - InputVolume=dict( - argstr='%s', - extensions=None, - position=-3, - ), - OutputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - args=dict(argstr='%s', ), - distance=dict(argstr='--distance %f', ), - environ=dict( - nohash=True, - usedefault=True, - ), - surface=dict( - argstr='%s', - extensions=None, - position=-2, - ), + InputVolume=dict(argstr="%s", extensions=None, position=-3,), + OutputVolume=dict(argstr="%s", hash_files=False, position=-1,), + args=dict(argstr="%s",), + distance=dict(argstr="--distance %f",), + environ=dict(nohash=True, usedefault=True,), + surface=dict(argstr="%s", extensions=None, position=-2,), ) inputs = ModelToLabelMap.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ModelToLabelMap_outputs(): - output_map = dict(OutputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(OutputVolume=dict(extensions=None, position=-1,),) outputs = ModelToLabelMap.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/tests/test_auto_OrientScalarVolume.py b/nipype/interfaces/slicer/tests/test_auto_OrientScalarVolume.py index 4e61a5c819..97e7d4ae38 100644 --- a/nipype/interfaces/slicer/tests/test_auto_OrientScalarVolume.py +++ b/nipype/interfaces/slicer/tests/test_auto_OrientScalarVolume.py @@ -4,33 +4,21 @@ def test_OrientScalarVolume_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - inputVolume1=dict( - argstr='%s', - extensions=None, - position=-2, - ), - orientation=dict(argstr='--orientation %s', ), - outputVolume=dict( - argstr='%s', - hash_files=False, - position=-1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + inputVolume1=dict(argstr="%s", extensions=None, position=-2,), + orientation=dict(argstr="--orientation %s",), + outputVolume=dict(argstr="%s", hash_files=False, position=-1,), ) inputs = OrientScalarVolume.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_OrientScalarVolume_outputs(): - output_map = dict(outputVolume=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(outputVolume=dict(extensions=None, position=-1,),) outputs = OrientScalarVolume.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/tests/test_auto_ProbeVolumeWithModel.py b/nipype/interfaces/slicer/tests/test_auto_ProbeVolumeWithModel.py index 98b572e4c8..c4b12dc7a0 100644 --- a/nipype/interfaces/slicer/tests/test_auto_ProbeVolumeWithModel.py +++ b/nipype/interfaces/slicer/tests/test_auto_ProbeVolumeWithModel.py @@ -4,37 +4,21 @@ def test_ProbeVolumeWithModel_inputs(): input_map = dict( - InputModel=dict( - argstr='%s', - extensions=None, - position=-2, - ), - InputVolume=dict( - argstr='%s', - extensions=None, - position=-3, - ), - OutputModel=dict( - argstr='%s', - hash_files=False, - position=-1, - ), - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + InputModel=dict(argstr="%s", extensions=None, position=-2,), + InputVolume=dict(argstr="%s", extensions=None, position=-3,), + OutputModel=dict(argstr="%s", hash_files=False, position=-1,), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), ) inputs = ProbeVolumeWithModel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ProbeVolumeWithModel_outputs(): - output_map = dict(OutputModel=dict( - extensions=None, - position=-1, - ), ) + output_map = dict(OutputModel=dict(extensions=None, position=-1,),) outputs = ProbeVolumeWithModel.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/slicer/tests/test_auto_SlicerCommandLine.py b/nipype/interfaces/slicer/tests/test_auto_SlicerCommandLine.py index b8eeb5bd56..dda2e3d8f8 100644 --- a/nipype/interfaces/slicer/tests/test_auto_SlicerCommandLine.py +++ b/nipype/interfaces/slicer/tests/test_auto_SlicerCommandLine.py @@ -4,11 +4,7 @@ def test_SlicerCommandLine_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), environ=dict(nohash=True, usedefault=True,), ) inputs = SlicerCommandLine.input_spec() diff --git a/nipype/interfaces/slicer/utilities.py b/nipype/interfaces/slicer/utilities.py index 5faf640570..ad998e58fe 100644 --- a/nipype/interfaces/slicer/utilities.py +++ b/nipype/interfaces/slicer/utilities.py @@ -3,34 +3,45 @@ """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" -from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath +from nipype.interfaces.base import ( + CommandLine, + CommandLineInputSpec, + SEMLikeCommandLine, + TraitedSpec, + File, + Directory, + traits, + isdefined, + InputMultiPath, + OutputMultiPath, +) import os class EMSegmentTransformToNewFormatInputSpec(CommandLineInputSpec): inputMRMLFileName = File( - desc= - "Active MRML scene that contains EMSegment algorithm parameters in the format before 3.6.3 - please include absolute file name in path.", + desc="Active MRML scene that contains EMSegment algorithm parameters in the format before 3.6.3 - please include absolute file name in path.", exists=True, - argstr="--inputMRMLFileName %s") + argstr="--inputMRMLFileName %s", + ) outputMRMLFileName = traits.Either( traits.Bool, File(), hash_files=False, - desc= - "Write out the MRML scene after transformation to format 3.6.3 has been made. - has to be in the same directory as the input MRML file due to Slicer Core bug - please include absolute file name in path ", - argstr="--outputMRMLFileName %s") + desc="Write out the MRML scene after transformation to format 3.6.3 has been made. - has to be in the same directory as the input MRML file due to Slicer Core bug - please include absolute file name in path ", + argstr="--outputMRMLFileName %s", + ) templateFlag = traits.Bool( - desc= - "Set to true if the transformed mrml file should be used as template file ", - argstr="--templateFlag ") + desc="Set to true if the transformed mrml file should be used as template file ", + argstr="--templateFlag ", + ) class EMSegmentTransformToNewFormatOutputSpec(TraitedSpec): outputMRMLFileName = File( - desc= - "Write out the MRML scene after transformation to format 3.6.3 has been made. - has to be in the same directory as the input MRML file due to Slicer Core bug - please include absolute file name in path ", - exists=True) + desc="Write out the MRML scene after transformation to format 3.6.3 has been made. - has to be in the same directory as the input MRML file due to Slicer Core bug - please include absolute file name in path ", + exists=True, + ) class EMSegmentTransformToNewFormat(SEMLikeCommandLine): @@ -51,4 +62,4 @@ class EMSegmentTransformToNewFormat(SEMLikeCommandLine): input_spec = EMSegmentTransformToNewFormatInputSpec output_spec = EMSegmentTransformToNewFormatOutputSpec _cmd = "EMSegmentTransformToNewFormat " - _outputs_filenames = {'outputMRMLFileName': 'outputMRMLFileName.mrml'} + _outputs_filenames = {"outputMRMLFileName": "outputMRMLFileName.mrml"} diff --git a/nipype/interfaces/spm/__init__.py b/nipype/interfaces/spm/__init__.py index b97c828450..0d5c91abfb 100644 --- a/nipype/interfaces/spm/__init__.py +++ b/nipype/interfaces/spm/__init__.py @@ -3,14 +3,39 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """Top-level namespace for spm.""" -from .base import (Info, SPMCommand, logger, no_spm, scans_for_fname, - scans_for_fnames) -from .preprocess import (FieldMap, SliceTiming, Realign, RealignUnwarp, - Coregister, Normalize, Normalize12, Segment, - Smooth, NewSegment, DARTEL, DARTELNorm2MNI, - CreateWarped, VBMSegment) -from .model import (Level1Design, EstimateModel, EstimateContrast, Threshold, - OneSampleTTestDesign, TwoSampleTTestDesign, - PairedTTestDesign, MultipleRegressionDesign) -from .utils import (Analyze2nii, CalcCoregAffine, ApplyTransform, Reslice, - ApplyInverseDeformation, ResliceToReference, DicomImport) +from .base import Info, SPMCommand, logger, no_spm, scans_for_fname, scans_for_fnames +from .preprocess import ( + FieldMap, + SliceTiming, + Realign, + RealignUnwarp, + Coregister, + Normalize, + Normalize12, + Segment, + Smooth, + NewSegment, + DARTEL, + DARTELNorm2MNI, + CreateWarped, + VBMSegment, +) +from .model import ( + Level1Design, + EstimateModel, + EstimateContrast, + Threshold, + OneSampleTTestDesign, + TwoSampleTTestDesign, + PairedTTestDesign, + MultipleRegressionDesign, +) +from .utils import ( + Analyze2nii, + CalcCoregAffine, + ApplyTransform, + Reslice, + ApplyInverseDeformation, + ResliceToReference, + DicomImport, +) diff --git a/nipype/interfaces/spm/base.py b/nipype/interfaces/spm/base.py index 785e069c04..a70e0ab166 100644 --- a/nipype/interfaces/spm/base.py +++ b/nipype/interfaces/spm/base.py @@ -25,15 +25,23 @@ # Local imports from ... import logging from ...utils import spm_docs as sd, NUMPY_MMAP -from ..base import (BaseInterface, traits, isdefined, InputMultiPath, - BaseInterfaceInputSpec, Directory, Undefined, ImageFile, - PackageInfo) +from ..base import ( + BaseInterface, + traits, + isdefined, + InputMultiPath, + BaseInterfaceInputSpec, + Directory, + Undefined, + ImageFile, + PackageInfo, +) from ..base.traits_extension import NoDefaultSpecified from ..matlab import MatlabCommand from ...external.due import due, Doi, BibTeX -__docformat__ = 'restructuredtext' -logger = logging.getLogger('nipype.interface') +__docformat__ = "restructuredtext" +logger = logging.getLogger("nipype.interface") def func_is_3d(in_file): @@ -66,18 +74,18 @@ def scans_for_fname(fname): """ if isinstance(fname, list): - scans = np.zeros((len(fname), ), dtype=object) + scans = np.zeros((len(fname),), dtype=object) for sno, f in enumerate(fname): - scans[sno] = '%s,1' % f + scans[sno] = "%s,1" % f return scans img = load(fname, mmap=NUMPY_MMAP) if len(img.shape) == 3: - return np.array(('%s,1' % fname, ), dtype=object) + return np.array(("%s,1" % fname,), dtype=object) else: n_scans = img.shape[3] - scans = np.zeros((n_scans, ), dtype=object) + scans = np.zeros((n_scans,), dtype=object) for sno in range(n_scans): - scans[sno] = '%s,%d' % (fname, sno + 1) + scans[sno] = "%s,%d" % (fname, sno + 1) return scans @@ -98,7 +106,7 @@ def scans_for_fnames(fnames, keep4d=False, separate_sessions=False): if func_is_3d(fnames[0]): fnames = [fnames] if separate_sessions or keep4d: - flist = np.zeros((len(fnames), ), dtype=object) + flist = np.zeros((len(fnames),), dtype=object) for i, f in enumerate(fnames): if separate_sessions: if keep4d: @@ -128,6 +136,7 @@ class Info(PackageInfo): to any call in the Info class to maintain memoization. Otherwise, it will default to the parameters in the `getinfo` function below. """ + _path = None _name = None _command = None @@ -179,19 +188,22 @@ def getinfo(klass, matlab_cmd=None, paths=None, use_mcr=None): returns None of path not found """ - use_mcr = use_mcr or 'FORCE_SPMMCR' in os.environ - matlab_cmd = matlab_cmd or ((use_mcr and os.getenv('SPMMCRCMD')) - or os.getenv('MATLABCMD', 'matlab -nodesktop -nosplash')) - - if klass._name and klass._path and klass._version and \ - klass._command == matlab_cmd and klass._paths == paths: - - return { - 'name': klass._name, - 'path': klass._path, - 'release': klass._version - } - logger.debug('matlab command or path has changed. recomputing version.') + use_mcr = use_mcr or "FORCE_SPMMCR" in os.environ + matlab_cmd = matlab_cmd or ( + (use_mcr and os.getenv("SPMMCRCMD")) + or os.getenv("MATLABCMD", "matlab -nodesktop -nosplash") + ) + + if ( + klass._name + and klass._path + and klass._version + and klass._command == matlab_cmd + and klass._paths == paths + ): + + return {"name": klass._name, "path": klass._path, "release": klass._version} + logger.debug("matlab command or path has changed. recomputing version.") mlab = MatlabCommand(matlab_cmd=matlab_cmd, resource_monitor=False) mlab.inputs.mfile = False if paths: @@ -216,7 +228,7 @@ def getinfo(klass, matlab_cmd=None, paths=None, use_mcr=None): except (IOError, RuntimeError) as e: # if no Matlab at all -- exception could be raised # No Matlab -- no spm - logger.debug('%s', e) + logger.debug("%s", e) klass._version = None klass._path = None klass._name = None @@ -226,13 +238,13 @@ def getinfo(klass, matlab_cmd=None, paths=None, use_mcr=None): out = sd._strip_header(out.runtime.stdout) out_dict = {} - for part in out.split('|'): - key, val = part.split(':') + for part in out.split("|"): + key, val = part.split(":") out_dict[key] = val - klass._version = out_dict['release'] - klass._path = out_dict['path'] - klass._name = out_dict['name'] + klass._version = out_dict["release"] + klass._path = out_dict["path"] + klass._name = out_dict["name"] klass._command = matlab_cmd klass._paths = paths return out_dict @@ -243,23 +255,23 @@ def no_spm(): used with pytest.mark.skipif decorator to skip tests that will fail if spm is not installed""" - if 'NIPYPE_NO_MATLAB' in os.environ or Info.version() is None: + if "NIPYPE_NO_MATLAB" in os.environ or Info.version() is None: return True else: return False class SPMCommandInputSpec(BaseInterfaceInputSpec): - matlab_cmd = traits.Str(desc='matlab command to use') - paths = InputMultiPath(Directory(), desc='Paths to add to matlabpath') - mfile = traits.Bool(True, desc='Run m-code using m-file', usedefault=True) - use_mcr = traits.Bool(desc='Run m-code using SPM MCR') + matlab_cmd = traits.Str(desc="matlab command to use") + paths = InputMultiPath(Directory(), desc="Paths to add to matlabpath") + mfile = traits.Bool(True, desc="Run m-code using m-file", usedefault=True) + use_mcr = traits.Bool(desc="Run m-code using SPM MCR") use_v8struct = traits.Bool( True, - min_ver='8', + min_ver="8", usedefault=True, - desc=('Generate SPM8 and higher ' - 'compatible jobs')) + desc=("Generate SPM8 and higher " "compatible jobs"), + ) class SPMCommand(BaseInterface): @@ -267,36 +279,38 @@ class SPMCommand(BaseInterface): WARNING: Pseudo prototype class, meant to be subclassed """ + input_spec = SPMCommandInputSpec - _additional_metadata = ['field'] + _additional_metadata = ["field"] - _jobtype = 'basetype' - _jobname = 'basename' + _jobtype = "basetype" + _jobname = "basename" _matlab_cmd = None _paths = None _use_mcr = None - references_ = [{ - 'entry': - BibTeX( - "@book{FrackowiakFristonFrithDolanMazziotta1997," - "author={R.S.J. Frackowiak, K.J. Friston, C.D. Frith, R.J. Dolan, and J.C. Mazziotta}," - "title={Human Brain Function}," - "publisher={Academic Press USA}," - "year={1997}," - "}"), - 'description': - 'The fundamental text on Statistical Parametric Mapping (SPM)', - # 'path': "nipype.interfaces.spm", - 'tags': ['implementation'], - }] + references_ = [ + { + "entry": BibTeX( + "@book{FrackowiakFristonFrithDolanMazziotta1997," + "author={R.S.J. Frackowiak, K.J. Friston, C.D. Frith, R.J. Dolan, and J.C. Mazziotta}," + "title={Human Brain Function}," + "publisher={Academic Press USA}," + "year={1997}," + "}" + ), + "description": "The fundamental text on Statistical Parametric Mapping (SPM)", + # 'path': "nipype.interfaces.spm", + "tags": ["implementation"], + } + ] def __init__(self, **inputs): super(SPMCommand, self).__init__(**inputs) self.inputs.on_trait_change( - self._matlab_cmd_update, - ['matlab_cmd', 'mfile', 'paths', 'use_mcr']) + self._matlab_cmd_update, ["matlab_cmd", "mfile", "paths", "use_mcr"] + ) self._find_mlab_cmd_defaults() self._check_mlab_inputs() self._matlab_cmd_update() @@ -306,19 +320,16 @@ def set_mlab_paths(cls, matlab_cmd=None, paths=None, use_mcr=None): cls._matlab_cmd = matlab_cmd cls._paths = paths cls._use_mcr = use_mcr - info_dict = Info.getinfo( - matlab_cmd=matlab_cmd, - paths=paths, - use_mcr=use_mcr) + info_dict = Info.getinfo(matlab_cmd=matlab_cmd, paths=paths, use_mcr=use_mcr) def _find_mlab_cmd_defaults(self): # check if the user has set environment variables to enforce # the standalone (MCR) version of SPM - if self._use_mcr or 'FORCE_SPMMCR' in os.environ: + if self._use_mcr or "FORCE_SPMMCR" in os.environ: self._use_mcr = True if self._matlab_cmd is None: try: - self._matlab_cmd = os.environ['SPMMCRCMD'] + self._matlab_cmd = os.environ["SPMMCRCMD"] except KeyError: pass @@ -330,9 +341,11 @@ def _matlab_cmd_update(self): matlab_cmd=self.inputs.matlab_cmd, mfile=self.inputs.mfile, paths=self.inputs.paths, - resource_monitor=False) - self.mlab.inputs.script_file = 'pyscript_%s.m' % \ - self.__class__.__name__.split('.')[-1].lower() + resource_monitor=False, + ) + self.mlab.inputs.script_file = ( + "pyscript_%s.m" % self.__class__.__name__.split(".")[-1].lower() + ) if isdefined(self.inputs.use_mcr) and self.inputs.use_mcr: self.mlab.inputs.nodesktop = Undefined self.mlab.inputs.nosplash = Undefined @@ -345,10 +358,10 @@ def version(self): info_dict = Info.getinfo( matlab_cmd=self.inputs.matlab_cmd, paths=self.inputs.paths, - use_mcr=self.inputs.use_mcr) + use_mcr=self.inputs.use_mcr, + ) if info_dict: - return '%s.%s' % (info_dict['name'].split('SPM')[-1], - info_dict['release']) + return "%s.%s" % (info_dict["name"].split("SPM")[-1], info_dict["release"]) @property def jobtype(self): @@ -369,11 +382,12 @@ def _check_mlab_inputs(self): def _run_interface(self, runtime): """Executes the SPM function using MATLAB.""" self.mlab.inputs.script = self._make_matlab_command( - deepcopy(self._parse_inputs())) + deepcopy(self._parse_inputs()) + ) results = self.mlab.run() runtime.returncode = results.runtime.returncode if self.mlab.inputs.uses_mcr: - if 'Skipped' in results.runtime.stdout: + if "Skipped" in results.runtime.stdout: self.raise_exception(runtime) runtime.stdout = results.runtime.stdout runtime.stderr = results.runtime.stderr @@ -404,8 +418,8 @@ def _parse_inputs(self, skip=()): if not isdefined(value): continue field = spec.field - if '.' in field: - fields = field.split('.') + if "." in field: + fields = field.split(".") dictref = spmdict for f in fields[:-1]: if f not in list(dictref.keys()): @@ -443,9 +457,9 @@ def _reformat_dict_for_savemat(self, contents): return [newdict] except TypeError: - print('Requires dict input') + print("Requires dict input") - def _generate_job(self, prefix='', contents=None): + def _generate_job(self, prefix="", contents=None): """Recursive function to generate spm job specification as a string Parameters @@ -458,7 +472,7 @@ def _generate_job(self, prefix='', contents=None): matlab commands. """ - jobstring = '' + jobstring = "" if contents is None: return jobstring if isinstance(contents, list): @@ -482,22 +496,20 @@ def _generate_job(self, prefix='', contents=None): jobstring += "{...\n" for i, val in enumerate(contents): if isinstance(val, np.ndarray): - jobstring += self._generate_job( - prefix=None, contents=val) + jobstring += self._generate_job(prefix=None, contents=val) elif isinstance(val, list): items_format = [] for el in val: items_format += [ - '{}' if not isinstance(el, (str, bytes)) else - '\'{}\'' + "{}" if not isinstance(el, (str, bytes)) else "'{}'" ] - val_format = ', '.join(items_format).format - jobstring += '[{}];...\n'.format(val_format(*val)) + val_format = ", ".join(items_format).format + jobstring += "[{}];...\n".format(val_format(*val)) elif isinstance(val, (str, bytes)): - jobstring += '\'{}\';...\n'.format(val) + jobstring += "'{}';...\n".format(val) else: - jobstring += '%s;...\n' % str(val) - jobstring += '};\n' + jobstring += "%s;...\n" % str(val) + jobstring += "};\n" else: for i, val in enumerate(contents): for field in val.dtype.fields: @@ -548,36 +560,45 @@ def _make_matlab_command(self, contents, postscript=None): end\n """ if self.mlab.inputs.mfile: - if (isdefined(self.inputs.use_v8struct) - and self.inputs.use_v8struct): - mscript += self._generate_job('jobs{1}.spm.%s.%s' % - (self.jobtype, - self.jobname), contents[0]) + if isdefined(self.inputs.use_v8struct) and self.inputs.use_v8struct: + mscript += self._generate_job( + "jobs{1}.spm.%s.%s" % (self.jobtype, self.jobname), contents[0] + ) else: if self.jobname in [ - 'st', 'smooth', 'preproc', 'preproc8', 'fmri_spec', - 'fmri_est', 'factorial_design', 'defs' + "st", + "smooth", + "preproc", + "preproc8", + "fmri_spec", + "fmri_est", + "factorial_design", + "defs", ]: # parentheses - mscript += self._generate_job('jobs{1}.%s{1}.%s(1)' % - (self.jobtype, - self.jobname), contents[0]) + mscript += self._generate_job( + "jobs{1}.%s{1}.%s(1)" % (self.jobtype, self.jobname), + contents[0], + ) else: # curly brackets - mscript += self._generate_job('jobs{1}.%s{1}.%s{1}' % - (self.jobtype, - self.jobname), contents[0]) + mscript += self._generate_job( + "jobs{1}.%s{1}.%s{1}" % (self.jobtype, self.jobname), + contents[0], + ) else: from scipy.io import savemat + jobdef = { - 'jobs': [{ - self.jobtype: [{ - self.jobname: - self.reformat_dict_for_savemat(contents[0]) - }] - }] + "jobs": [ + { + self.jobtype: [ + {self.jobname: self.reformat_dict_for_savemat(contents[0])} + ] + } + ] } - savemat(os.path.join(cwd, 'pyjobs_%s.mat' % self.jobname), jobdef) + savemat(os.path.join(cwd, "pyjobs_%s.mat" % self.jobname), jobdef) mscript += "load pyjobs_%s;\n\n" % self.jobname mscript += """ spm_jobman(\'run\', jobs);\n @@ -596,9 +617,15 @@ def _make_matlab_command(self, contents, postscript=None): class ImageFileSPM(ImageFile): """Defines a trait whose value must be a NIfTI file.""" - def __init__(self, value=NoDefaultSpecified, exists=False, resolve=False, **metadata): + def __init__( + self, value=NoDefaultSpecified, exists=False, resolve=False, **metadata + ): """Create an ImageFileSPM trait.""" super(ImageFileSPM, self).__init__( - value=value, exists=exists, types=['nifti1', 'nifti2'], - allow_compressed=False, resolve=resolve, **metadata) - + value=value, + exists=exists, + types=["nifti1", "nifti2"], + allow_compressed=False, + resolve=resolve, + **metadata + ) diff --git a/nipype/interfaces/spm/model.py b/nipype/interfaces/spm/model.py index 4a1d4c80a2..20b16e0870 100644 --- a/nipype/interfaces/spm/model.py +++ b/nipype/interfaces/spm/model.py @@ -13,49 +13,57 @@ # Local imports from ... import logging -from ...utils.filemanip import (ensure_list, simplify_list, - split_filename) -from ..base import (Bunch, traits, TraitedSpec, File, Directory, - OutputMultiPath, InputMultiPath, isdefined) -from .base import (SPMCommand, SPMCommandInputSpec, scans_for_fnames, - ImageFileSPM) - -__docformat__ = 'restructuredtext' -iflogger = logging.getLogger('nipype.interface') +from ...utils.filemanip import ensure_list, simplify_list, split_filename +from ..base import ( + Bunch, + traits, + TraitedSpec, + File, + Directory, + OutputMultiPath, + InputMultiPath, + isdefined, +) +from .base import SPMCommand, SPMCommandInputSpec, scans_for_fnames, ImageFileSPM + +__docformat__ = "restructuredtext" +iflogger = logging.getLogger("nipype.interface") class Level1DesignInputSpec(SPMCommandInputSpec): spm_mat_dir = Directory( - exists=True, field='dir', desc='directory to store SPM.mat file (opt)') + exists=True, field="dir", desc="directory to store SPM.mat file (opt)" + ) timing_units = traits.Enum( - 'secs', - 'scans', - field='timing.units', - desc='units for specification of onsets', - mandatory=True) + "secs", + "scans", + field="timing.units", + desc="units for specification of onsets", + mandatory=True, + ) interscan_interval = traits.Float( - field='timing.RT', desc='Interscan interval in secs', mandatory=True) + field="timing.RT", desc="Interscan interval in secs", mandatory=True + ) microtime_resolution = traits.Int( - field='timing.fmri_t', - desc=('Number of time-bins per scan ' - 'in secs (opt)')) + field="timing.fmri_t", desc=("Number of time-bins per scan " "in secs (opt)") + ) microtime_onset = traits.Float( - field='timing.fmri_t0', - desc=('The onset/time-bin in seconds for ' - 'alignment (opt)')) + field="timing.fmri_t0", + desc=("The onset/time-bin in seconds for " "alignment (opt)"), + ) session_info = traits.Any( - field='sess', - desc=('Session specific information generated ' - 'by ``modelgen.SpecifyModel``'), - mandatory=True) + field="sess", + desc=("Session specific information generated " "by ``modelgen.SpecifyModel``"), + mandatory=True, + ) factor_info = traits.List( - traits.Dict(traits.Enum('name', 'levels')), - field='fact', - desc=('Factor specific information ' - 'file (opt)')) + traits.Dict(traits.Enum("name", "levels")), + field="fact", + desc=("Factor specific information " "file (opt)"), + ) bases = traits.Dict( - traits.Enum('hrf', 'fourier', 'fourier_han', 'gamma', 'fir'), - field='bases', + traits.Enum("hrf", "fourier", "fourier_han", "gamma", "fir"), + field="bases", desc=""" dict {'name':{'basesparam1':val,...}} name : string @@ -73,42 +81,46 @@ class Level1DesignInputSpec(SPMCommandInputSpec): order : int Number of basis functions """, - mandatory=True) + mandatory=True, + ) volterra_expansion_order = traits.Enum( - 1, 2, field='volt', desc=('Model interactions - ' - 'yes:1, no:2')) + 1, 2, field="volt", desc=("Model interactions - " "yes:1, no:2") + ) global_intensity_normalization = traits.Enum( - 'none', - 'scaling', - field='global', - desc=('Global intensity ' - 'normalization - ' - 'scaling or none')) + "none", + "scaling", + field="global", + desc=("Global intensity " "normalization - " "scaling or none"), + ) mask_image = File( - exists=True, - field='mask', - desc='Image for explicitly masking the analysis') + exists=True, field="mask", desc="Image for explicitly masking the analysis" + ) mask_threshold = traits.Either( - traits.Enum('-Inf'), + traits.Enum("-Inf"), traits.Float(), desc="Thresholding for the mask", - default='-Inf', - usedefault=True) + default="-Inf", + usedefault=True, + ) model_serial_correlations = traits.Enum( - 'AR(1)', - 'FAST', - 'none', - field='cvi', - desc=('Model serial correlations ' - 'AR(1), FAST or none. FAST ' - 'is available in SPM12')) + "AR(1)", + "FAST", + "none", + field="cvi", + desc=( + "Model serial correlations " + "AR(1), FAST or none. FAST " + "is available in SPM12" + ), + ) flags = traits.Dict( - desc='Additional arguments to the job, e.g., a common SPM operation is to ' - 'modify the default masking threshold (mthresh)') + desc="Additional arguments to the job, e.g., a common SPM operation is to " + "modify the default masking threshold (mthresh)" + ) class Level1DesignOutputSpec(TraitedSpec): - spm_mat_file = File(exists=True, desc='SPM mat file') + spm_mat_file = File(exists=True, desc="SPM mat file") class Level1Design(SPMCommand): @@ -132,15 +144,15 @@ class Level1Design(SPMCommand): input_spec = Level1DesignInputSpec output_spec = Level1DesignOutputSpec - _jobtype = 'stats' - _jobname = 'fmri_spec' + _jobtype = "stats" + _jobname = "fmri_spec" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt in ['spm_mat_dir', 'mask_image']: + if opt in ["spm_mat_dir", "mask_image"]: return np.array([str(val)], dtype=object) - if opt in ['session_info']: # , 'factor_info']: + if opt in ["session_info"]: # , 'factor_info']: if isinstance(val, dict): return [val] else: @@ -150,17 +162,17 @@ def _format_arg(self, opt, spec, val): def _parse_inputs(self): """validate spm realign options if set to None ignore """ - einputs = super(Level1Design, - self)._parse_inputs(skip=('mask_threshold', 'flags')) + einputs = super(Level1Design, self)._parse_inputs( + skip=("mask_threshold", "flags") + ) if isdefined(self.inputs.flags): - einputs[0].update( - {flag: val - for (flag, val) in self.inputs.flags.items()}) - for sessinfo in einputs[0]['sess']: - sessinfo['scans'] = scans_for_fnames( - ensure_list(sessinfo['scans']), keep4d=False) + einputs[0].update({flag: val for (flag, val) in self.inputs.flags.items()}) + for sessinfo in einputs[0]["sess"]: + sessinfo["scans"] = scans_for_fnames( + ensure_list(sessinfo["scans"]), keep4d=False + ) if not isdefined(self.inputs.spm_mat_dir): - einputs[0]['dir'] = np.array([str(os.getcwd())], dtype=object) + einputs[0]["dir"] = np.array([str(os.getcwd())], dtype=object) return einputs def _make_matlab_command(self, content): @@ -172,68 +184,79 @@ def _make_matlab_command(self, content): # SPM doesn't handle explicit masking properly, especially # when you want to use the entire mask image postscript = "load SPM;\n" - postscript += ("SPM.xM.VM = spm_vol('%s');\n" % simplify_list( - self.inputs.mask_image)) + postscript += "SPM.xM.VM = spm_vol('%s');\n" % simplify_list( + self.inputs.mask_image + ) postscript += "SPM.xM.I = 0;\n" postscript += "SPM.xM.T = [];\n" - postscript += ("SPM.xM.TH = ones(size(SPM.xM.TH))*(%s);\n" % - self.inputs.mask_threshold) - postscript += ("SPM.xM.xs = struct('Masking', " - "'explicit masking only');\n") + postscript += ( + "SPM.xM.TH = ones(size(SPM.xM.TH))*(%s);\n" % self.inputs.mask_threshold + ) + postscript += "SPM.xM.xs = struct('Masking', " "'explicit masking only');\n" postscript += "save SPM SPM;\n" else: postscript = None return super(Level1Design, self)._make_matlab_command( - content, postscript=postscript) + content, postscript=postscript + ) def _list_outputs(self): outputs = self._outputs().get() - spm = os.path.join(os.getcwd(), 'SPM.mat') - outputs['spm_mat_file'] = spm + spm = os.path.join(os.getcwd(), "SPM.mat") + outputs["spm_mat_file"] = spm return outputs class EstimateModelInputSpec(SPMCommandInputSpec): spm_mat_file = File( exists=True, - field='spmmat', + field="spmmat", copyfile=True, mandatory=True, - desc='Absolute path to SPM.mat') + desc="Absolute path to SPM.mat", + ) estimation_method = traits.Dict( - traits.Enum('Classical', 'Bayesian2', 'Bayesian'), - field='method', + traits.Enum("Classical", "Bayesian2", "Bayesian"), + field="method", mandatory=True, - desc=('Dictionary of either Classical: 1, Bayesian: 1, ' - 'or Bayesian2: 1 (dict)')) + desc=( + "Dictionary of either Classical: 1, Bayesian: 1, " "or Bayesian2: 1 (dict)" + ), + ) write_residuals = traits.Bool( - field='write_residuals', desc="Write individual residual images") - flags = traits.Dict(desc='Additional arguments') + field="write_residuals", desc="Write individual residual images" + ) + flags = traits.Dict(desc="Additional arguments") class EstimateModelOutputSpec(TraitedSpec): - mask_image = ImageFileSPM( - exists=True, desc='binary mask to constrain estimation') + mask_image = ImageFileSPM(exists=True, desc="binary mask to constrain estimation") beta_images = OutputMultiPath( - ImageFileSPM(exists=True), desc='design parameter estimates') + ImageFileSPM(exists=True), desc="design parameter estimates" + ) residual_image = ImageFileSPM( - exists=True, desc='Mean-squared image of the residuals') + exists=True, desc="Mean-squared image of the residuals" + ) residual_images = OutputMultiPath( ImageFileSPM(exists=True), - desc="individual residual images (requires `write_residuals`") - RPVimage = ImageFileSPM(exists=True, desc='Resels per voxel image') - spm_mat_file = File(exists=True, desc='Updated SPM mat file') + desc="individual residual images (requires `write_residuals`", + ) + RPVimage = ImageFileSPM(exists=True, desc="Resels per voxel image") + spm_mat_file = File(exists=True, desc="Updated SPM mat file") labels = ImageFileSPM(exists=True, desc="label file") SDerror = OutputMultiPath( - ImageFileSPM(exists=True), - desc="Images of the standard deviation of the error") + ImageFileSPM(exists=True), desc="Images of the standard deviation of the error" + ) ARcoef = OutputMultiPath( - ImageFileSPM(exists=True), desc="Images of the AR coefficient") + ImageFileSPM(exists=True), desc="Images of the AR coefficient" + ) Cbetas = OutputMultiPath( - ImageFileSPM(exists=True), desc="Images of the parameter posteriors") + ImageFileSPM(exists=True), desc="Images of the parameter posteriors" + ) SDbetas = OutputMultiPath( ImageFileSPM(exists=True), - desc="Images of the standard deviation of parameter posteriors") + desc="Images of the standard deviation of parameter posteriors", + ) class EstimateModel(SPMCommand): @@ -248,19 +271,20 @@ class EstimateModel(SPMCommand): >>> est.inputs.estimation_method = {'Classical': 1} >>> est.run() # doctest: +SKIP """ + input_spec = EstimateModelInputSpec output_spec = EstimateModelOutputSpec - _jobtype = 'stats' - _jobname = 'fmri_est' + _jobtype = "stats" + _jobname = "fmri_est" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt == 'spm_mat_file': + if opt == "spm_mat_file": return np.array([str(val)], dtype=object) - if opt == 'estimation_method': + if opt == "estimation_method": if isinstance(val, (str, bytes)): - return {'{}'.format(val): 1} + return {"{}".format(val): 1} else: return val return super(EstimateModel, self)._format_arg(opt, spec, val) @@ -268,106 +292,132 @@ def _format_arg(self, opt, spec, val): def _parse_inputs(self): """validate spm realign options if set to None ignore """ - einputs = super(EstimateModel, self)._parse_inputs(skip=('flags')) + einputs = super(EstimateModel, self)._parse_inputs(skip=("flags")) if isdefined(self.inputs.flags): - einputs[0].update( - {flag: val - for (flag, val) in self.inputs.flags.items()}) + einputs[0].update({flag: val for (flag, val) in self.inputs.flags.items()}) return einputs def _list_outputs(self): import scipy.io as sio + outputs = self._outputs().get() pth = os.path.dirname(self.inputs.spm_mat_file) - outtype = 'nii' if '12' in self.version.split('.')[0] else 'img' + outtype = "nii" if "12" in self.version.split(".")[0] else "img" spm = sio.loadmat(self.inputs.spm_mat_file, struct_as_record=False) - betas = [vbeta.fname[0] for vbeta in spm['SPM'][0, 0].Vbeta[0]] - if ('Bayesian' in self.inputs.estimation_method.keys() - or 'Bayesian2' in self.inputs.estimation_method.keys()): - outputs['labels'] = os.path.join(pth, 'labels.{}'.format(outtype)) - outputs['SDerror'] = glob(os.path.join(pth, 'Sess*_SDerror*')) - outputs['ARcoef'] = glob(os.path.join(pth, 'Sess*_AR_*')) + betas = [vbeta.fname[0] for vbeta in spm["SPM"][0, 0].Vbeta[0]] + if ( + "Bayesian" in self.inputs.estimation_method.keys() + or "Bayesian2" in self.inputs.estimation_method.keys() + ): + outputs["labels"] = os.path.join(pth, "labels.{}".format(outtype)) + outputs["SDerror"] = glob(os.path.join(pth, "Sess*_SDerror*")) + outputs["ARcoef"] = glob(os.path.join(pth, "Sess*_AR_*")) if betas: - outputs['Cbetas'] = [ - os.path.join(pth, 'C{}'.format(beta)) for beta in betas + outputs["Cbetas"] = [ + os.path.join(pth, "C{}".format(beta)) for beta in betas ] - outputs['SDbetas'] = [ - os.path.join(pth, 'SD{}'.format(beta)) for beta in betas + outputs["SDbetas"] = [ + os.path.join(pth, "SD{}".format(beta)) for beta in betas ] - if 'Classical' in self.inputs.estimation_method.keys(): - outputs['residual_image'] = os.path.join( - pth, 'ResMS.{}'.format(outtype)) - outputs['RPVimage'] = os.path.join(pth, 'RPV.{}'.format(outtype)) + if "Classical" in self.inputs.estimation_method.keys(): + outputs["residual_image"] = os.path.join(pth, "ResMS.{}".format(outtype)) + outputs["RPVimage"] = os.path.join(pth, "RPV.{}".format(outtype)) if self.inputs.write_residuals: - outputs['residual_images'] = glob(os.path.join(pth, 'Res_*')) + outputs["residual_images"] = glob(os.path.join(pth, "Res_*")) if betas: - outputs['beta_images'] = [ - os.path.join(pth, beta) for beta in betas - ] + outputs["beta_images"] = [os.path.join(pth, beta) for beta in betas] - outputs['mask_image'] = os.path.join(pth, 'mask.{}'.format(outtype)) - outputs['spm_mat_file'] = os.path.join(pth, 'SPM.mat') + outputs["mask_image"] = os.path.join(pth, "mask.{}".format(outtype)) + outputs["spm_mat_file"] = os.path.join(pth, "SPM.mat") return outputs class EstimateContrastInputSpec(SPMCommandInputSpec): spm_mat_file = File( exists=True, - field='spmmat', - desc='Absolute path to SPM.mat', + field="spmmat", + desc="Absolute path to SPM.mat", copyfile=True, - mandatory=True) + mandatory=True, + ) contrasts = traits.List( traits.Either( - traits.Tuple(traits.Str, traits.Enum('T'), traits.List(traits.Str), - traits.List(traits.Float)), - traits.Tuple(traits.Str, traits.Enum('T'), traits.List(traits.Str), - traits.List(traits.Float), traits.List(traits.Float)), - traits.Tuple(traits.Str, traits.Enum('F'), - traits.List( - traits.Either( - traits.Tuple(traits.Str, traits.Enum('T'), - traits.List(traits.Str), - traits.List(traits.Float)), - traits.Tuple(traits.Str, traits.Enum('T'), - traits.List(traits.Str), - traits.List(traits.Float), - traits.List(traits.Float)))))), + traits.Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + ), + traits.Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + traits.List(traits.Float), + ), + traits.Tuple( + traits.Str, + traits.Enum("F"), + traits.List( + traits.Either( + traits.Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + ), + traits.Tuple( + traits.Str, + traits.Enum("T"), + traits.List(traits.Str), + traits.List(traits.Float), + traits.List(traits.Float), + ), + ) + ), + ), + ), desc="""List of contrasts with each contrast being a list of the form: [('name', 'stat', [condition list], [weight list], [session list])] If session list is None or not provided, all sessions are used. For F contrasts, the condition list should contain previously defined T-contrasts.""", - mandatory=True) + mandatory=True, + ) beta_images = InputMultiPath( File(exists=True), - desc=('Parameter estimates of the ' - 'design matrix'), + desc=("Parameter estimates of the " "design matrix"), copyfile=False, - mandatory=True) + mandatory=True, + ) residual_image = File( exists=True, - desc='Mean-squared image of the residuals', + desc="Mean-squared image of the residuals", copyfile=False, - mandatory=True) + mandatory=True, + ) use_derivs = traits.Bool( - desc='use derivatives for estimation', xor=['group_contrast']) - group_contrast = traits.Bool( - desc='higher level contrast', xor=['use_derivs']) + desc="use derivatives for estimation", xor=["group_contrast"] + ) + group_contrast = traits.Bool(desc="higher level contrast", xor=["use_derivs"]) class EstimateContrastOutputSpec(TraitedSpec): con_images = OutputMultiPath( - File(exists=True), desc='contrast images from a t-contrast') + File(exists=True), desc="contrast images from a t-contrast" + ) spmT_images = OutputMultiPath( - File(exists=True), desc='stat images from a t-contrast') + File(exists=True), desc="stat images from a t-contrast" + ) ess_images = OutputMultiPath( - File(exists=True), desc='contrast images from an F-contrast') + File(exists=True), desc="contrast images from an F-contrast" + ) spmF_images = OutputMultiPath( - File(exists=True), desc='stat images from an F-contrast') - spm_mat_file = File(exists=True, desc='Updated SPM mat file') + File(exists=True), desc="stat images from an F-contrast" + ) + spm_mat_file = File(exists=True, desc="Updated SPM mat file") class EstimateContrast(SPMCommand): @@ -388,8 +438,8 @@ class EstimateContrast(SPMCommand): input_spec = EstimateContrastInputSpec output_spec = EstimateContrastOutputSpec - _jobtype = 'stats' - _jobname = 'con' + _jobtype = "stats" + _jobname = "con" def _make_matlab_command(self, _): """validates spm options and generates job structure @@ -398,164 +448,176 @@ def _make_matlab_command(self, _): cname = [] for i, cont in enumerate(self.inputs.contrasts): cname.insert(i, cont[0]) - contrasts.insert(i, - Bunch( - name=cont[0], - stat=cont[1], - conditions=cont[2], - weights=None, - sessions=None)) + contrasts.insert( + i, + Bunch( + name=cont[0], + stat=cont[1], + conditions=cont[2], + weights=None, + sessions=None, + ), + ) if len(cont) >= 4: contrasts[i].weights = cont[3] if len(cont) >= 5: contrasts[i].sessions = cont[4] script = "% generated by nipype.interfaces.spm\n" script += "spm_defaults;\n" - script += ("jobs{1}.stats{1}.con.spmmat = {'%s'};\n" % - self.inputs.spm_mat_file) + script += "jobs{1}.stats{1}.con.spmmat = {'%s'};\n" % self.inputs.spm_mat_file script += "load(jobs{1}.stats{1}.con.spmmat{:});\n" script += "SPM.swd = '%s';\n" % os.getcwd() script += "save(jobs{1}.stats{1}.con.spmmat{:},'SPM');\n" script += "names = SPM.xX.name;\n" # get names for columns - if (isdefined(self.inputs.group_contrast) - and self.inputs.group_contrast): + if isdefined(self.inputs.group_contrast) and self.inputs.group_contrast: script += "condnames=names;\n" else: if self.inputs.use_derivs: script += "pat = 'Sn\([0-9]*\) (.*)';\n" else: - script += ("pat = 'Sn\([0-9]*\) (.*)\*bf\(1\)|Sn\([0-9]*\) " - ".*\*bf\([2-9]\)|Sn\([0-9]*\) (.*)';\n") + script += ( + "pat = 'Sn\([0-9]*\) (.*)\*bf\(1\)|Sn\([0-9]*\) " + ".*\*bf\([2-9]\)|Sn\([0-9]*\) (.*)';\n" + ) script += "t = regexp(names,pat,'tokens');\n" # get sessidx for columns script += "pat1 = 'Sn\(([0-9].*)\)\s.*';\n" script += "t1 = regexp(names,pat1,'tokens');\n" - script += ("for i0=1:numel(t),condnames{i0}='';condsess(i0)=0;if " - "~isempty(t{i0}{1}),condnames{i0} = t{i0}{1}{1};" - "condsess(i0)=str2num(t1{i0}{1}{1});end;end;\n") + script += ( + "for i0=1:numel(t),condnames{i0}='';condsess(i0)=0;if " + "~isempty(t{i0}{1}),condnames{i0} = t{i0}{1}{1};" + "condsess(i0)=str2num(t1{i0}{1}{1});end;end;\n" + ) # BUILD CONTRAST SESSION STRUCTURE for i, contrast in enumerate(contrasts): - if contrast.stat == 'T': - script += ("consess{%d}.tcon.name = '%s';\n" % - (i + 1, contrast.name)) - script += ( - "consess{%d}.tcon.convec = zeros(1,numel(names));\n" % - (i + 1)) + if contrast.stat == "T": + script += "consess{%d}.tcon.name = '%s';\n" % (i + 1, contrast.name) + script += "consess{%d}.tcon.convec = zeros(1,numel(names));\n" % (i + 1) for c0, cond in enumerate(contrast.conditions): - script += ("idx = strmatch('%s',condnames,'exact');\n" % - (cond)) - script += (("if isempty(idx), throw(MException(" - "'CondName:Chk', sprintf('Condition %%s not " - "found in design','%s'))); end;\n") % cond) + script += "idx = strmatch('%s',condnames,'exact');\n" % (cond) + script += ( + "if isempty(idx), throw(MException(" + "'CondName:Chk', sprintf('Condition %%s not " + "found in design','%s'))); end;\n" + ) % cond if contrast.sessions: for sno, sw in enumerate(contrast.sessions): - script += ("sidx = find(condsess(idx)==%d);\n" % - (sno + 1)) - script += (("consess{%d}.tcon.convec(idx(sidx)) " - "= %f;\n") % - (i + 1, sw * contrast.weights[c0])) + script += "sidx = find(condsess(idx)==%d);\n" % (sno + 1) + script += ( + "consess{%d}.tcon.convec(idx(sidx)) " "= %f;\n" + ) % (i + 1, sw * contrast.weights[c0]) else: - script += ("consess{%d}.tcon.convec(idx) = %f;\n" % - (i + 1, contrast.weights[c0])) + script += "consess{%d}.tcon.convec(idx) = %f;\n" % ( + i + 1, + contrast.weights[c0], + ) for i, contrast in enumerate(contrasts): - if contrast.stat == 'F': - script += ("consess{%d}.fcon.name = '%s';\n" % - (i + 1, contrast.name)) + if contrast.stat == "F": + script += "consess{%d}.fcon.name = '%s';\n" % (i + 1, contrast.name) for cl0, fcont in enumerate(contrast.conditions): try: tidx = cname.index(fcont[0]) except: - Exception("Contrast Estimate: could not get index of" - " T contrast. probably not defined prior " - "to the F contrasts") - script += (("consess{%d}.fcon.convec{%d} = " - "consess{%d}.tcon.convec;\n") % - (i + 1, cl0 + 1, tidx + 1)) + Exception( + "Contrast Estimate: could not get index of" + " T contrast. probably not defined prior " + "to the F contrasts" + ) + script += ( + "consess{%d}.fcon.convec{%d} = " "consess{%d}.tcon.convec;\n" + ) % (i + 1, cl0 + 1, tidx + 1) script += "jobs{1}.stats{1}.con.consess = consess;\n" - script += ("if strcmp(spm('ver'),'SPM8'), spm_jobman('initcfg');" - "jobs=spm_jobman('spm5tospm8',{jobs});end\n") + script += ( + "if strcmp(spm('ver'),'SPM8'), spm_jobman('initcfg');" + "jobs=spm_jobman('spm5tospm8',{jobs});end\n" + ) script += "spm_jobman('run',jobs);" return script def _list_outputs(self): import scipy.io as sio + outputs = self._outputs().get() pth, _ = os.path.split(self.inputs.spm_mat_file) spm = sio.loadmat(self.inputs.spm_mat_file, struct_as_record=False) con_images = [] spmT_images = [] - for con in spm['SPM'][0, 0].xCon[0]: + for con in spm["SPM"][0, 0].xCon[0]: con_images.append(str(os.path.join(pth, con.Vcon[0, 0].fname[0]))) spmT_images.append(str(os.path.join(pth, con.Vspm[0, 0].fname[0]))) if con_images: - outputs['con_images'] = con_images - outputs['spmT_images'] = spmT_images - spm12 = '12' in self.version.split('.')[0] + outputs["con_images"] = con_images + outputs["spmT_images"] = spmT_images + spm12 = "12" in self.version.split(".")[0] if spm12: - ess = glob(os.path.join(pth, 'ess*.nii')) + ess = glob(os.path.join(pth, "ess*.nii")) else: - ess = glob(os.path.join(pth, 'ess*.img')) + ess = glob(os.path.join(pth, "ess*.img")) if len(ess) > 0: - outputs['ess_images'] = sorted(ess) + outputs["ess_images"] = sorted(ess) if spm12: - spmf = glob(os.path.join(pth, 'spmF*.nii')) + spmf = glob(os.path.join(pth, "spmF*.nii")) else: - spmf = glob(os.path.join(pth, 'spmF*.img')) + spmf = glob(os.path.join(pth, "spmF*.img")) if len(spmf) > 0: - outputs['spmF_images'] = sorted(spmf) - outputs['spm_mat_file'] = self.inputs.spm_mat_file + outputs["spmF_images"] = sorted(spmf) + outputs["spm_mat_file"] = self.inputs.spm_mat_file return outputs class ThresholdInputSpec(SPMCommandInputSpec): spm_mat_file = File( - exists=True, - desc='absolute path to SPM.mat', - copyfile=True, - mandatory=True) - stat_image = File( - exists=True, desc='stat image', copyfile=False, mandatory=True) + exists=True, desc="absolute path to SPM.mat", copyfile=True, mandatory=True + ) + stat_image = File(exists=True, desc="stat image", copyfile=False, mandatory=True) contrast_index = traits.Int( - mandatory=True, desc='which contrast in the SPM.mat to use') + mandatory=True, desc="which contrast in the SPM.mat to use" + ) use_fwe_correction = traits.Bool( True, usedefault=True, - desc=('whether to use FWE (Bonferroni) ' - 'correction for initial threshold ' - '(height_threshold_type has to be ' - 'set to p-value)')) + desc=( + "whether to use FWE (Bonferroni) " + "correction for initial threshold " + "(height_threshold_type has to be " + "set to p-value)" + ), + ) use_topo_fdr = traits.Bool( True, usedefault=True, - desc=('whether to use FDR over cluster extent ' - 'probabilities')) + desc=("whether to use FDR over cluster extent " "probabilities"), + ) height_threshold = traits.Float( 0.05, usedefault=True, - desc=('value for initial thresholding ' - '(defining clusters)')) + desc=("value for initial thresholding " "(defining clusters)"), + ) height_threshold_type = traits.Enum( - 'p-value', - 'stat', + "p-value", + "stat", usedefault=True, - desc=('Is the cluster forming ' - 'threshold a stat value or ' - 'p-value?')) + desc=("Is the cluster forming " "threshold a stat value or " "p-value?"), + ) extent_fdr_p_threshold = traits.Float( 0.05, usedefault=True, - desc=('p threshold on FDR corrected ' - 'cluster size probabilities')) + desc=("p threshold on FDR corrected " "cluster size probabilities"), + ) extent_threshold = traits.Int( - 0, usedefault=True, desc='Minimum cluster size in voxels') + 0, usedefault=True, desc="Minimum cluster size in voxels" + ) force_activation = traits.Bool( False, usedefault=True, - desc=('In case no clusters survive the ' - 'topological inference step this ' - 'will pick a culster with the highes ' - 'sum of t-values. Use with care.')) + desc=( + "In case no clusters survive the " + "topological inference step this " + "will pick a culster with the highes " + "sum of t-values. Use with care." + ), + ) class ThresholdOutputSpec(TraitedSpec): @@ -582,6 +644,7 @@ class Threshold(SPMCommand): >>> thresh.inputs.extent_fdr_p_threshold = 0.05 >>> thresh.run() # doctest: +SKIP """ + input_spec = ThresholdInputSpec output_spec = ThresholdOutputSpec @@ -610,11 +673,11 @@ def _make_matlab_command(self, _): script += "force_activation = 1;\n" else: script += "force_activation = 0;\n" - script += ("cluster_extent_p_fdr_thr = %f;\n" % - self.inputs.extent_fdr_p_threshold) + script += ( + "cluster_extent_p_fdr_thr = %f;\n" % self.inputs.extent_fdr_p_threshold + ) script += "stat_filename = '%s';\n" % self.inputs.stat_image - script += ("height_threshold_type = '%s';\n" % - self.inputs.height_threshold_type) + script += "height_threshold_type = '%s';\n" % self.inputs.height_threshold_type script += "extent_threshold = %d;\n" % self.inputs.extent_threshold script += "load %s;\n" % self.inputs.spm_mat_file @@ -647,9 +710,10 @@ def _make_matlab_command(self, _): Zth = Z(Z >= cluster_forming_thr); """ - script += (("spm_write_filtered(Zth,XYZth,stat_map_vol.dim'," - "stat_map_vol.mat,'thresholded map', '%s');\n") % - self._gen_pre_topo_map_filename()) + script += ( + "spm_write_filtered(Zth,XYZth,stat_map_vol.dim'," + "stat_map_vol.mat,'thresholded map', '%s');\n" + ) % self._gen_pre_topo_map_filename() script += """ max_size = 0; max_size_index = 0; @@ -707,55 +771,65 @@ def _make_matlab_command(self, _): fprintf('cluster_forming_thr = %f\\n',cluster_forming_thr); """ - script += (("spm_write_filtered(thresholded_Z,thresholded_XYZ," - "stat_map_vol.dim',stat_map_vol.mat,'thresholded map'," - " '%s');\n") % self._gen_thresholded_map_filename()) + script += ( + "spm_write_filtered(thresholded_Z,thresholded_XYZ," + "stat_map_vol.dim',stat_map_vol.mat,'thresholded map'," + " '%s');\n" + ) % self._gen_thresholded_map_filename() return script def aggregate_outputs(self, runtime=None): outputs = self._outputs() - setattr(outputs, 'thresholded_map', - self._gen_thresholded_map_filename()) - setattr(outputs, 'pre_topo_fdr_map', self._gen_pre_topo_map_filename()) - for line in runtime.stdout.split('\n'): + setattr(outputs, "thresholded_map", self._gen_thresholded_map_filename()) + setattr(outputs, "pre_topo_fdr_map", self._gen_pre_topo_map_filename()) + for line in runtime.stdout.split("\n"): if line.startswith("activation_forced = "): - setattr(outputs, 'activation_forced', - line[len("activation_forced = "):].strip() == "1") + setattr( + outputs, + "activation_forced", + line[len("activation_forced = ") :].strip() == "1", + ) elif line.startswith("n_clusters = "): - setattr(outputs, 'n_clusters', - int(line[len("n_clusters = "):].strip())) + setattr( + outputs, "n_clusters", int(line[len("n_clusters = ") :].strip()) + ) elif line.startswith("pre_topo_n_clusters = "): - setattr(outputs, 'pre_topo_n_clusters', - int(line[len("pre_topo_n_clusters = "):].strip())) + setattr( + outputs, + "pre_topo_n_clusters", + int(line[len("pre_topo_n_clusters = ") :].strip()), + ) elif line.startswith("cluster_forming_thr = "): - setattr(outputs, 'cluster_forming_thr', - float(line[len("cluster_forming_thr = "):].strip())) + setattr( + outputs, + "cluster_forming_thr", + float(line[len("cluster_forming_thr = ") :].strip()), + ) return outputs def _list_outputs(self): outputs = self._outputs().get() - outputs['thresholded_map'] = self._gen_thresholded_map_filename() - outputs['pre_topo_fdr_map'] = self._gen_pre_topo_map_filename() + outputs["thresholded_map"] = self._gen_thresholded_map_filename() + outputs["pre_topo_fdr_map"] = self._gen_pre_topo_map_filename() return outputs class ThresholdStatisticsInputSpec(SPMCommandInputSpec): spm_mat_file = File( - exists=True, - desc='absolute path to SPM.mat', - copyfile=True, - mandatory=True) - stat_image = File( - exists=True, desc='stat image', copyfile=False, mandatory=True) + exists=True, desc="absolute path to SPM.mat", copyfile=True, mandatory=True + ) + stat_image = File(exists=True, desc="stat image", copyfile=False, mandatory=True) contrast_index = traits.Int( - mandatory=True, desc='which contrast in the SPM.mat to use') + mandatory=True, desc="which contrast in the SPM.mat to use" + ) height_threshold = traits.Float( - desc=('stat value for initial ' - 'thresholding (defining clusters)'), - mandatory=True) + desc=("stat value for initial " "thresholding (defining clusters)"), + mandatory=True, + ) extent_threshold = traits.Int( - 0, usedefault=True, desc="Minimum cluster size in voxels") + 0, usedefault=True, desc="Minimum cluster size in voxels" + ) class ThresholdStatisticsOutputSpec(TraitedSpec): @@ -781,6 +855,7 @@ class ThresholdStatistics(SPMCommand): >>> thresh.inputs.height_threshold = 4.56 >>> thresh.run() # doctest: +SKIP """ + input_spec = ThresholdStatisticsInputSpec output_spec = ThresholdStatisticsOutputSpec @@ -841,16 +916,19 @@ def _make_matlab_command(self, _): def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() cur_output = "" - for line in runtime.stdout.split('\n'): + for line in runtime.stdout.split("\n"): if cur_output != "" and len(line.split()) != 0: setattr(outputs, cur_output, float(line)) cur_output = "" continue - if (len(line.split()) != 0 and line.split()[0] in [ - "clusterwise_P_FDR", "clusterwise_P_RF", - "voxelwise_P_Bonf", "voxelwise_P_FDR", "voxelwise_P_RF", - "voxelwise_P_uncor" - ]): + if len(line.split()) != 0 and line.split()[0] in [ + "clusterwise_P_FDR", + "clusterwise_P_RF", + "voxelwise_P_Bonf", + "voxelwise_P_FDR", + "voxelwise_P_RF", + "voxelwise_P_uncor", + ]: cur_output = line.split()[0] continue @@ -859,64 +937,68 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): class FactorialDesignInputSpec(SPMCommandInputSpec): spm_mat_dir = Directory( - exists=True, field='dir', desc='directory to store SPM.mat file (opt)') + exists=True, field="dir", desc="directory to store SPM.mat file (opt)" + ) # Need to make an alias of InputMultiPath; the inputs below are not Path covariates = InputMultiPath( traits.Dict( - key_trait=traits.Enum('vector', 'name', 'interaction', - 'centering')), - field='cov', - desc=('covariate dictionary {vector, name, ' - 'interaction, centering}')) + key_trait=traits.Enum("vector", "name", "interaction", "centering") + ), + field="cov", + desc=("covariate dictionary {vector, name, " "interaction, centering}"), + ) threshold_mask_none = traits.Bool( - field='masking.tm.tm_none', - xor=['threshold_mask_absolute', 'threshold_mask_relative'], - desc='do not use threshold masking') + field="masking.tm.tm_none", + xor=["threshold_mask_absolute", "threshold_mask_relative"], + desc="do not use threshold masking", + ) threshold_mask_absolute = traits.Float( - field='masking.tm.tma.athresh', - xor=['threshold_mask_none', 'threshold_mask_relative'], - desc='use an absolute threshold') + field="masking.tm.tma.athresh", + xor=["threshold_mask_none", "threshold_mask_relative"], + desc="use an absolute threshold", + ) threshold_mask_relative = traits.Float( - field='masking.tm.tmr.rthresh', - xor=['threshold_mask_absolute', 'threshold_mask_none'], - desc=('threshold using a ' - 'proportion of the global ' - 'value')) + field="masking.tm.tmr.rthresh", + xor=["threshold_mask_absolute", "threshold_mask_none"], + desc=("threshold using a " "proportion of the global " "value"), + ) use_implicit_threshold = traits.Bool( - field='masking.im', - desc=('use implicit mask NaNs or ' - 'zeros to threshold')) + field="masking.im", desc=("use implicit mask NaNs or " "zeros to threshold") + ) explicit_mask_file = File( - field='masking.em', # requires cell - desc='use an implicit mask file to threshold') + field="masking.em", # requires cell + desc="use an implicit mask file to threshold", + ) global_calc_omit = traits.Bool( - field='globalc.g_omit', - xor=['global_calc_mean', 'global_calc_values'], - desc='omit global calculation') + field="globalc.g_omit", + xor=["global_calc_mean", "global_calc_values"], + desc="omit global calculation", + ) global_calc_mean = traits.Bool( - field='globalc.g_mean', - xor=['global_calc_omit', 'global_calc_values'], - desc='use mean for global calculation') + field="globalc.g_mean", + xor=["global_calc_omit", "global_calc_values"], + desc="use mean for global calculation", + ) global_calc_values = traits.List( traits.Float, - field='globalc.g_user.global_uval', - xor=['global_calc_mean', 'global_calc_omit'], - desc='omit global calculation') + field="globalc.g_user.global_uval", + xor=["global_calc_mean", "global_calc_omit"], + desc="omit global calculation", + ) no_grand_mean_scaling = traits.Bool( - field='globalm.gmsca.gmsca_no', - desc=('do not perform grand mean ' - 'scaling')) + field="globalm.gmsca.gmsca_no", desc=("do not perform grand mean " "scaling") + ) global_normalization = traits.Enum( 1, 2, 3, - field='globalm.glonorm', - desc=('global normalization None-1, ' - 'Proportional-2, ANCOVA-3')) + field="globalm.glonorm", + desc=("global normalization None-1, " "Proportional-2, ANCOVA-3"), + ) class FactorialDesignOutputSpec(TraitedSpec): - spm_mat_file = File(exists=True, desc='SPM mat file') + spm_mat_file = File(exists=True, desc="SPM mat file") class FactorialDesign(SPMCommand): @@ -928,21 +1010,21 @@ class FactorialDesign(SPMCommand): input_spec = FactorialDesignInputSpec output_spec = FactorialDesignOutputSpec - _jobtype = 'stats' - _jobname = 'factorial_design' + _jobtype = "stats" + _jobname = "factorial_design" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt in ['spm_mat_dir', 'explicit_mask_file']: + if opt in ["spm_mat_dir", "explicit_mask_file"]: return np.array([str(val)], dtype=object) - if opt in ['covariates']: + if opt in ["covariates"]: outlist = [] mapping = { - 'name': 'cname', - 'vector': 'c', - 'interaction': 'iCFI', - 'centering': 'iCC' + "name": "cname", + "vector": "c", + "interaction": "iCFI", + "centering": "iCC", } for dictitem in val: outdict = {} @@ -957,23 +1039,24 @@ def _parse_inputs(self): """ einputs = super(FactorialDesign, self)._parse_inputs() if not isdefined(self.inputs.spm_mat_dir): - einputs[0]['dir'] = np.array([str(os.getcwd())], dtype=object) + einputs[0]["dir"] = np.array([str(os.getcwd())], dtype=object) return einputs def _list_outputs(self): outputs = self._outputs().get() - spm = os.path.join(os.getcwd(), 'SPM.mat') - outputs['spm_mat_file'] = spm + spm = os.path.join(os.getcwd(), "SPM.mat") + outputs["spm_mat_file"] = spm return outputs class OneSampleTTestDesignInputSpec(FactorialDesignInputSpec): in_files = traits.List( File(exists=True), - field='des.t1.scans', + field="des.t1.scans", mandatory=True, minlen=2, - desc='input files') + desc="input files", + ) class OneSampleTTestDesign(FactorialDesign): @@ -992,7 +1075,7 @@ class OneSampleTTestDesign(FactorialDesign): def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt in ['in_files']: + if opt in ["in_files"]: return np.array(val, dtype=object) return super(OneSampleTTestDesign, self)._format_arg(opt, spec, val) @@ -1002,24 +1085,25 @@ class TwoSampleTTestDesignInputSpec(FactorialDesignInputSpec): # parameters to require at least two files in each group [SG] group1_files = traits.List( File(exists=True), - field='des.t2.scans1', + field="des.t2.scans1", mandatory=True, minlen=2, - desc='Group 1 input files') + desc="Group 1 input files", + ) group2_files = traits.List( File(exists=True), - field='des.t2.scans2', + field="des.t2.scans2", mandatory=True, minlen=2, - desc='Group 2 input files') + desc="Group 2 input files", + ) dependent = traits.Bool( - field='des.t2.dept', - desc=('Are the measurements dependent between ' - 'levels')) + field="des.t2.dept", desc=("Are the measurements dependent between " "levels") + ) unequal_variance = traits.Bool( - field='des.t2.variance', - desc=('Are the variances equal or unequal ' - 'between groups')) + field="des.t2.variance", + desc=("Are the variances equal or unequal " "between groups"), + ) class TwoSampleTTestDesign(FactorialDesign): @@ -1039,7 +1123,7 @@ class TwoSampleTTestDesign(FactorialDesign): def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt in ['group1_files', 'group2_files']: + if opt in ["group1_files", "group2_files"]: return np.array(val, dtype=object) return super(TwoSampleTTestDesign, self)._format_arg(opt, spec, val) @@ -1047,14 +1131,17 @@ def _format_arg(self, opt, spec, val): class PairedTTestDesignInputSpec(FactorialDesignInputSpec): paired_files = traits.List( traits.List(File(exists=True), minlen=2, maxlen=2), - field='des.pt.pair', + field="des.pt.pair", mandatory=True, minlen=2, - desc='List of paired files') + desc="List of paired files", + ) grand_mean_scaling = traits.Bool( - field='des.pt.gmsca', desc='Perform grand mean scaling') + field="des.pt.gmsca", desc="Perform grand mean scaling" + ) ancova = traits.Bool( - field='des.pt.ancova', desc='Specify ancova-by-factor regressors') + field="des.pt.ancova", desc="Specify ancova-by-factor regressors" + ) class PairedTTestDesign(FactorialDesign): @@ -1073,7 +1160,7 @@ class PairedTTestDesign(FactorialDesign): def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt in ['paired_files']: + if opt in ["paired_files"]: return [dict(scans=np.array(files, dtype=object)) for files in val] return super(PairedTTestDesign, self)._format_arg(opt, spec, val) @@ -1081,20 +1168,22 @@ def _format_arg(self, opt, spec, val): class MultipleRegressionDesignInputSpec(FactorialDesignInputSpec): in_files = traits.List( File(exists=True), - field='des.mreg.scans', + field="des.mreg.scans", mandatory=True, minlen=2, - desc='List of files') + desc="List of files", + ) include_intercept = traits.Bool( True, - field='des.mreg.incint', + field="des.mreg.incint", usedefault=True, - desc='Include intercept in design') + desc="Include intercept in design", + ) user_covariates = InputMultiPath( - traits.Dict(key_trait=traits.Enum('vector', 'name', 'centering')), - field='des.mreg.mcov', - desc=('covariate dictionary {vector, ' - 'name, centering}')) + traits.Dict(key_trait=traits.Enum("vector", "name", "centering")), + field="des.mreg.mcov", + desc=("covariate dictionary {vector, " "name, centering}"), + ) class MultipleRegressionDesign(FactorialDesign): @@ -1113,16 +1202,15 @@ class MultipleRegressionDesign(FactorialDesign): def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt in ['in_files']: + if opt in ["in_files"]: return np.array(val, dtype=object) - if opt in ['user_covariates']: + if opt in ["user_covariates"]: outlist = [] - mapping = {'name': 'cname', 'vector': 'c', 'centering': 'iCC'} + mapping = {"name": "cname", "vector": "c", "centering": "iCC"} for dictitem in val: outdict = {} for key, keyval in list(dictitem.items()): outdict[mapping[key]] = keyval outlist.append(outdict) return outlist - return (super(MultipleRegressionDesign, self)._format_arg( - opt, spec, val)) + return super(MultipleRegressionDesign, self)._format_arg(opt, spec, val) diff --git a/nipype/interfaces/spm/preprocess.py b/nipype/interfaces/spm/preprocess.py index 5c63936b09..b6a705fa8e 100644 --- a/nipype/interfaces/spm/preprocess.py +++ b/nipype/interfaces/spm/preprocess.py @@ -10,98 +10,194 @@ import numpy as np # Local imports -from ...utils.filemanip import (fname_presuffix, ensure_list, - simplify_list, split_filename) -from ..base import (OutputMultiPath, TraitedSpec, isdefined, - traits, InputMultiPath, InputMultiObject, File, Str) -from .base import (SPMCommand, scans_for_fname, func_is_3d, - scans_for_fnames, SPMCommandInputSpec, ImageFileSPM) - -__docformat__ = 'restructuredtext' +from ...utils.filemanip import ( + fname_presuffix, + ensure_list, + simplify_list, + split_filename, +) +from ..base import ( + OutputMultiPath, + TraitedSpec, + isdefined, + traits, + InputMultiPath, + InputMultiObject, + File, + Str, +) +from .base import ( + SPMCommand, + scans_for_fname, + func_is_3d, + scans_for_fnames, + SPMCommandInputSpec, + ImageFileSPM, +) + +__docformat__ = "restructuredtext" class FieldMapInputSpec(SPMCommandInputSpec): - jobtype = traits.Enum('calculatevdm', 'applyvdm', usedefault=True, - desc='one of: calculatevdm, applyvdm') - phase_file = File(mandatory=True, exists=True, copyfile=False, - field='subj.data.presubphasemag.phase', - desc='presubstracted phase file') - magnitude_file = File(mandatory=True, exists=True, copyfile=False, - field='subj.data.presubphasemag.magnitude', - desc='presubstracted magnitude file') - echo_times = traits.Tuple(traits.Float, traits.Float, mandatory=True, - field='subj.defaults.defaultsval.et', - desc='short and long echo times') - maskbrain = traits.Bool(True, usedefault=True, - field='subj.defaults.defaultsval.maskbrain', - desc='masking or no masking of the brain') - blip_direction = traits.Enum(1, -1, mandatory=True, - field='subj.defaults.defaultsval.blipdir', - desc='polarity of the phase-encode blips') - total_readout_time = traits.Float(mandatory=True, - field='subj.defaults.defaultsval.tert', - desc='total EPI readout time') - epifm = traits.Bool(False, usedefault=True, - field='subj.defaults.defaultsval.epifm', - desc='epi-based field map'); - jacobian_modulation = traits.Bool(False, usedefault=True, - field='subj.defaults.defaultsval.ajm', - desc='jacobian modulation'); + jobtype = traits.Enum( + "calculatevdm", + "applyvdm", + usedefault=True, + desc="one of: calculatevdm, applyvdm", + ) + phase_file = File( + mandatory=True, + exists=True, + copyfile=False, + field="subj.data.presubphasemag.phase", + desc="presubstracted phase file", + ) + magnitude_file = File( + mandatory=True, + exists=True, + copyfile=False, + field="subj.data.presubphasemag.magnitude", + desc="presubstracted magnitude file", + ) + echo_times = traits.Tuple( + traits.Float, + traits.Float, + mandatory=True, + field="subj.defaults.defaultsval.et", + desc="short and long echo times", + ) + maskbrain = traits.Bool( + True, + usedefault=True, + field="subj.defaults.defaultsval.maskbrain", + desc="masking or no masking of the brain", + ) + blip_direction = traits.Enum( + 1, + -1, + mandatory=True, + field="subj.defaults.defaultsval.blipdir", + desc="polarity of the phase-encode blips", + ) + total_readout_time = traits.Float( + mandatory=True, + field="subj.defaults.defaultsval.tert", + desc="total EPI readout time", + ) + epifm = traits.Bool( + False, + usedefault=True, + field="subj.defaults.defaultsval.epifm", + desc="epi-based field map", + ) + jacobian_modulation = traits.Bool( + False, + usedefault=True, + field="subj.defaults.defaultsval.ajm", + desc="jacobian modulation", + ) # Unwarping defaults parameters - method = traits.Enum('Mark3D', 'Mark2D', 'Huttonish', usedefault=True, - desc='One of: Mark3D, Mark2D, Huttonish', - field='subj.defaults.defaultsval.uflags.method'); - unwarp_fwhm = traits.Range(low=0, value=10, usedefault=True, - field='subj.defaults.defaultsval.uflags.fwhm', - desc='gaussian smoothing kernel width'); - pad = traits.Range(low=0, value=0, usedefault=True, - field='subj.defaults.defaultsval.uflags.pad', - desc='padding kernel width'); - ws = traits.Bool(True, usedefault=True, - field='subj.defaults.defaultsval.uflags.ws', - desc='weighted smoothing'); + method = traits.Enum( + "Mark3D", + "Mark2D", + "Huttonish", + usedefault=True, + desc="One of: Mark3D, Mark2D, Huttonish", + field="subj.defaults.defaultsval.uflags.method", + ) + unwarp_fwhm = traits.Range( + low=0, + value=10, + usedefault=True, + field="subj.defaults.defaultsval.uflags.fwhm", + desc="gaussian smoothing kernel width", + ) + pad = traits.Range( + low=0, + value=0, + usedefault=True, + field="subj.defaults.defaultsval.uflags.pad", + desc="padding kernel width", + ) + ws = traits.Bool( + True, + usedefault=True, + field="subj.defaults.defaultsval.uflags.ws", + desc="weighted smoothing", + ) # Brain mask defaults parameters - template = File(copyfile=False, exists=True, - field='subj.defaults.defaultsval.mflags.template', - desc='template image for brain masking'); - mask_fwhm = traits.Range(low=0, value=5, usedefault=True, - field='subj.defaults.defaultsval.mflags.fwhm', - desc='gaussian smoothing kernel width'); - nerode = traits.Range(low=0, value=2, usedefault=True, - field='subj.defaults.defaultsval.mflags.nerode', - desc='number of erosions'); - ndilate = traits.Range(low=0, value=4, usedefault=True, - field='subj.defaults.defaultsval.mflags.ndilate', - desc='number of erosions'); - thresh = traits.Float(0.5, usedefault=True, - field='subj.defaults.defaultsval.mflags.thresh', - desc='threshold used to create brain mask from segmented data'); - reg = traits.Float(0.02, usedefault=True, - field='subj.defaults.defaultsval.mflags.reg', - desc='regularization value used in the segmentation'); + template = File( + copyfile=False, + exists=True, + field="subj.defaults.defaultsval.mflags.template", + desc="template image for brain masking", + ) + mask_fwhm = traits.Range( + low=0, + value=5, + usedefault=True, + field="subj.defaults.defaultsval.mflags.fwhm", + desc="gaussian smoothing kernel width", + ) + nerode = traits.Range( + low=0, + value=2, + usedefault=True, + field="subj.defaults.defaultsval.mflags.nerode", + desc="number of erosions", + ) + ndilate = traits.Range( + low=0, + value=4, + usedefault=True, + field="subj.defaults.defaultsval.mflags.ndilate", + desc="number of erosions", + ) + thresh = traits.Float( + 0.5, + usedefault=True, + field="subj.defaults.defaultsval.mflags.thresh", + desc="threshold used to create brain mask from segmented data", + ) + reg = traits.Float( + 0.02, + usedefault=True, + field="subj.defaults.defaultsval.mflags.reg", + desc="regularization value used in the segmentation", + ) # EPI unwarping for quality check - epi_file = File(copyfile=False, exists=True, mandatory=True, - field='subj.session.epi', - desc='EPI to unwarp'); - matchvdm = traits.Bool(True, usedefault=True, - field='subj.matchvdm', - desc='match VDM to EPI'); - sessname = Str('_run-', usedefault=True, - field='subj.sessname', - desc='VDM filename extension'); - writeunwarped = traits.Bool(False, usedefault=True, - field='subj.writeunwarped', - desc='write unwarped EPI'); - anat_file = File(copyfile=False, exists=True, - field='subj.anat', - desc='anatomical image for comparison'); - matchanat = traits.Bool(True, usedefault=True, - field='subj.matchanat', - desc='match anatomical image to EPI'); + epi_file = File( + copyfile=False, + exists=True, + mandatory=True, + field="subj.session.epi", + desc="EPI to unwarp", + ) + matchvdm = traits.Bool( + True, usedefault=True, field="subj.matchvdm", desc="match VDM to EPI" + ) + sessname = Str( + "_run-", usedefault=True, field="subj.sessname", desc="VDM filename extension" + ) + writeunwarped = traits.Bool( + False, usedefault=True, field="subj.writeunwarped", desc="write unwarped EPI" + ) + anat_file = File( + copyfile=False, + exists=True, + field="subj.anat", + desc="anatomical image for comparison", + ) + matchanat = traits.Bool( + True, + usedefault=True, + field="subj.matchanat", + desc="match anatomical image to EPI", + ) class FieldMapOutputSpec(TraitedSpec): - vdm = File(exists=True, desc='voxel difference map') + vdm = File(exists=True, desc="voxel difference map") class FieldMap(SPMCommand): @@ -129,13 +225,13 @@ class FieldMap(SPMCommand): input_spec = FieldMapInputSpec output_spec = FieldMapOutputSpec - _jobtype = 'tools' - _jobname = 'fieldmap' + _jobtype = "tools" + _jobname = "fieldmap" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt in ['phase_file', 'magnitude_file', 'anat_file', 'epi_file']: + if opt in ["phase_file", "magnitude_file", "anat_file", "epi_file"]: return scans_for_fname(ensure_list(val)) return super(FieldMap, self)._format_arg(opt, spec, val) @@ -150,7 +246,7 @@ def _list_outputs(self): outputs = self._outputs().get() jobtype = self.inputs.jobtype if jobtype == "calculatevdm": - outputs['vdm'] = fname_presuffix(self.inputs.phase_file, prefix='vdm5_sc') + outputs["vdm"] = fname_presuffix(self.inputs.phase_file, prefix="vdm5_sc") return outputs @@ -158,43 +254,49 @@ def _list_outputs(self): class SliceTimingInputSpec(SPMCommandInputSpec): in_files = InputMultiPath( traits.Either( - traits.List(ImageFileSPM(exists=True)), ImageFileSPM(exists=True)), - field='scans', - desc='list of filenames to apply slice timing', + traits.List(ImageFileSPM(exists=True)), ImageFileSPM(exists=True) + ), + field="scans", + desc="list of filenames to apply slice timing", mandatory=True, - copyfile=False) + copyfile=False, + ) num_slices = traits.Int( - field='nslices', desc='number of slices in a volume', mandatory=True) + field="nslices", desc="number of slices in a volume", mandatory=True + ) time_repetition = traits.Float( - field='tr', - desc=('time between volume acquisitions' - '(start to start time)'), - mandatory=True) + field="tr", + desc=("time between volume acquisitions" "(start to start time)"), + mandatory=True, + ) time_acquisition = traits.Float( - field='ta', - desc=('time of volume acquisition. usually' - 'calculated as TR-(TR/num_slices)'), - mandatory=True) + field="ta", + desc=("time of volume acquisition. usually" "calculated as TR-(TR/num_slices)"), + mandatory=True, + ) slice_order = traits.List( traits.Float(), - field='so', - desc=('1-based order or onset (in ms) in which ' - 'slices are acquired'), - mandatory=True) + field="so", + desc=("1-based order or onset (in ms) in which " "slices are acquired"), + mandatory=True, + ) ref_slice = traits.Int( - field='refslice', - desc='1-based Number of the reference slice or ' - 'reference time point if slice_order is in ' - 'onsets (ms)', - mandatory=True) + field="refslice", + desc="1-based Number of the reference slice or " + "reference time point if slice_order is in " + "onsets (ms)", + mandatory=True, + ) out_prefix = traits.String( - 'a', field='prefix', usedefault=True, desc='slicetimed output prefix') + "a", field="prefix", usedefault=True, desc="slicetimed output prefix" + ) class SliceTimingOutputSpec(TraitedSpec): timecorrected_files = OutputMultiPath( traits.Either(traits.List(File(exists=True)), File(exists=True)), - desc='slice time corrected files') + desc="slice time corrected files", + ) class SliceTiming(SPMCommand): @@ -220,127 +322,134 @@ class SliceTiming(SPMCommand): input_spec = SliceTimingInputSpec output_spec = SliceTimingOutputSpec - _jobtype = 'temporal' - _jobname = 'st' + _jobtype = "temporal" + _jobname = "st" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt == 'in_files': + if opt == "in_files": return scans_for_fnames( - ensure_list(val), keep4d=False, separate_sessions=True) + ensure_list(val), keep4d=False, separate_sessions=True + ) return super(SliceTiming, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() - outputs['timecorrected_files'] = [] + outputs["timecorrected_files"] = [] filelist = ensure_list(self.inputs.in_files) for f in filelist: if isinstance(f, list): run = [ - fname_presuffix(in_f, prefix=self.inputs.out_prefix) - for in_f in f + fname_presuffix(in_f, prefix=self.inputs.out_prefix) for in_f in f ] else: run = fname_presuffix(f, prefix=self.inputs.out_prefix) - outputs['timecorrected_files'].append(run) + outputs["timecorrected_files"].append(run) return outputs class RealignInputSpec(SPMCommandInputSpec): in_files = InputMultiPath( - traits.Either(ImageFileSPM(exists=True), - traits.List(ImageFileSPM(exists=True))), - field='data', + traits.Either( + ImageFileSPM(exists=True), traits.List(ImageFileSPM(exists=True)) + ), + field="data", mandatory=True, copyfile=True, - desc='list of filenames to realign') + desc="list of filenames to realign", + ) jobtype = traits.Enum( - 'estwrite', - 'estimate', - 'write', - desc='one of: estimate, write, estwrite', - usedefault=True) + "estwrite", + "estimate", + "write", + desc="one of: estimate, write, estwrite", + usedefault=True, + ) quality = traits.Range( - low=0.0, - high=1.0, - field='eoptions.quality', - desc='0.1 = fast, 1.0 = precise') + low=0.0, high=1.0, field="eoptions.quality", desc="0.1 = fast, 1.0 = precise" + ) fwhm = traits.Range( - low=0.0, field='eoptions.fwhm', desc='gaussian smoothing kernel width') + low=0.0, field="eoptions.fwhm", desc="gaussian smoothing kernel width" + ) separation = traits.Range( - low=0.0, field='eoptions.sep', desc='sampling separation in mm') + low=0.0, field="eoptions.sep", desc="sampling separation in mm" + ) register_to_mean = traits.Bool( - field='eoptions.rtm', - desc=('Indicate whether realignment is ' - 'done to the mean image')) + field="eoptions.rtm", + desc=("Indicate whether realignment is " "done to the mean image"), + ) weight_img = File( - exists=True, - field='eoptions.weight', - desc='filename of weighting image') + exists=True, field="eoptions.weight", desc="filename of weighting image" + ) interp = traits.Range( low=0, high=7, - field='eoptions.interp', - desc='degree of b-spline used for interpolation') + field="eoptions.interp", + desc="degree of b-spline used for interpolation", + ) wrap = traits.List( traits.Int(), minlen=3, maxlen=3, - field='eoptions.wrap', - desc='Check if interpolation should wrap in [x,y,z]') + field="eoptions.wrap", + desc="Check if interpolation should wrap in [x,y,z]", + ) write_which = traits.ListInt( [2, 1], - field='roptions.which', + field="roptions.which", minlen=2, maxlen=2, usedefault=True, - desc='determines which images to reslice') + desc="determines which images to reslice", + ) write_interp = traits.Range( low=0, high=7, - field='roptions.interp', - desc=('degree of b-spline used for ' - 'interpolation')) + field="roptions.interp", + desc=("degree of b-spline used for " "interpolation"), + ) write_wrap = traits.List( traits.Int(), minlen=3, maxlen=3, - field='roptions.wrap', - desc=('Check if interpolation should wrap in ' - '[x,y,z]')) - write_mask = traits.Bool( - field='roptions.mask', desc='True/False mask output image') + field="roptions.wrap", + desc=("Check if interpolation should wrap in " "[x,y,z]"), + ) + write_mask = traits.Bool(field="roptions.mask", desc="True/False mask output image") out_prefix = traits.String( - 'r', - field='roptions.prefix', - usedefault=True, - desc='realigned output prefix') + "r", field="roptions.prefix", usedefault=True, desc="realigned output prefix" + ) class RealignOutputSpec(TraitedSpec): - mean_image = File(exists=True, desc='Mean image file from the realignment') + mean_image = File(exists=True, desc="Mean image file from the realignment") modified_in_files = OutputMultiPath( traits.Either(traits.List(File(exists=True)), File(exists=True)), - desc=('Copies of all files passed to ' - 'in_files. Headers will have ' - 'been modified to align all ' - 'images with the first, or ' - 'optionally to first do that, ' - 'extract a mean image, and ' - 're-align to that mean image.')) + desc=( + "Copies of all files passed to " + "in_files. Headers will have " + "been modified to align all " + "images with the first, or " + "optionally to first do that, " + "extract a mean image, and " + "re-align to that mean image." + ), + ) realigned_files = OutputMultiPath( traits.Either(traits.List(File(exists=True)), File(exists=True)), - desc=('If jobtype is write or estwrite, ' - 'these will be the resliced files.' - ' Otherwise, they will be copies ' - 'of in_files that have had their ' - 'headers rewritten.')) + desc=( + "If jobtype is write or estwrite, " + "these will be the resliced files." + " Otherwise, they will be copies " + "of in_files that have had their " + "headers rewritten." + ), + ) realignment_parameters = OutputMultiPath( - File(exists=True), - desc=('Estimated translation and ' - 'rotation parameters')) + File(exists=True), desc=("Estimated translation and " "rotation parameters") + ) class Realign(SPMCommand): @@ -362,26 +471,27 @@ class Realign(SPMCommand): input_spec = RealignInputSpec output_spec = RealignOutputSpec - _jobtype = 'spatial' - _jobname = 'realign' + _jobtype = "spatial" + _jobname = "realign" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt == 'in_files': + if opt == "in_files": if self.inputs.jobtype == "write": separate_sessions = False else: separate_sessions = True return scans_for_fnames( - val, keep4d=False, separate_sessions=separate_sessions) + val, keep4d=False, separate_sessions=separate_sessions + ) return super(Realign, self)._format_arg(opt, spec, val) def _parse_inputs(self): """validate spm realign options if set to None ignore """ einputs = super(Realign, self)._parse_inputs() - return [{'%s' % (self.inputs.jobtype): einputs[0]}] + return [{"%s" % (self.inputs.jobtype): einputs[0]}] def _list_outputs(self): outputs = self._outputs().get() @@ -390,22 +500,23 @@ def _list_outputs(self): if self.inputs.jobtype != "write": if isdefined(self.inputs.in_files): - outputs['realignment_parameters'] = [] + outputs["realignment_parameters"] = [] for imgf in self.inputs.in_files: if isinstance(imgf, list): tmp_imgf = imgf[0] else: tmp_imgf = imgf - outputs['realignment_parameters'].append( + outputs["realignment_parameters"].append( fname_presuffix( - tmp_imgf, prefix='rp_', suffix='.txt', use_ext=False)) + tmp_imgf, prefix="rp_", suffix=".txt", use_ext=False + ) + ) if not isinstance(imgf, list) and func_is_3d(imgf): break if self.inputs.jobtype == "estimate": - outputs['realigned_files'] = self.inputs.in_files - if (self.inputs.jobtype == "estimate" - or self.inputs.jobtype == "estwrite"): - outputs['modified_in_files'] = self.inputs.in_files + outputs["realigned_files"] = self.inputs.in_files + if self.inputs.jobtype == "estimate" or self.inputs.jobtype == "estwrite": + outputs["modified_in_files"] = self.inputs.in_files if self.inputs.jobtype == "write" or self.inputs.jobtype == "estwrite": if isinstance(self.inputs.in_files[0], list): first_image = self.inputs.in_files[0][0] @@ -413,170 +524,195 @@ def _list_outputs(self): first_image = self.inputs.in_files[0] if resliced_mean: - outputs['mean_image'] = fname_presuffix( - first_image, prefix='mean') + outputs["mean_image"] = fname_presuffix(first_image, prefix="mean") if resliced_all: - outputs['realigned_files'] = [] - for idx, imgf in enumerate( - ensure_list(self.inputs.in_files)): + outputs["realigned_files"] = [] + for idx, imgf in enumerate(ensure_list(self.inputs.in_files)): realigned_run = [] if isinstance(imgf, list): for i, inner_imgf in enumerate(ensure_list(imgf)): newfile = fname_presuffix( - inner_imgf, prefix=self.inputs.out_prefix) + inner_imgf, prefix=self.inputs.out_prefix + ) realigned_run.append(newfile) else: realigned_run = fname_presuffix( - imgf, prefix=self.inputs.out_prefix) - outputs['realigned_files'].append(realigned_run) + imgf, prefix=self.inputs.out_prefix + ) + outputs["realigned_files"].append(realigned_run) return outputs class RealignUnwarpInputSpec(SPMCommandInputSpec): in_files = InputMultiObject( - traits.Either(ImageFileSPM(exists=True), - traits.List(ImageFileSPM(exists=True))), - field='data.scans', + traits.Either( + ImageFileSPM(exists=True), traits.List(ImageFileSPM(exists=True)) + ), + field="data.scans", mandatory=True, copyfile=True, - desc='list of filenames to realign and unwarp') + desc="list of filenames to realign and unwarp", + ) phase_map = File( - field='data.pmscan', - desc='Voxel displacement map to use in unwarping. Unlike SPM standard ' - 'behaviour, the same map will be used for all sessions', - copyfile=False) + field="data.pmscan", + desc="Voxel displacement map to use in unwarping. Unlike SPM standard " + "behaviour, the same map will be used for all sessions", + copyfile=False, + ) quality = traits.Range( - low=0.0, - high=1.0, - field='eoptions.quality', - desc='0.1 = fast, 1.0 = precise') + low=0.0, high=1.0, field="eoptions.quality", desc="0.1 = fast, 1.0 = precise" + ) fwhm = traits.Range( - low=0.0, - field='eoptions.fwhm', - desc='gaussian smoothing kernel width') + low=0.0, field="eoptions.fwhm", desc="gaussian smoothing kernel width" + ) separation = traits.Range( - low=0.0, - field='eoptions.sep', - desc='sampling separation in mm') + low=0.0, field="eoptions.sep", desc="sampling separation in mm" + ) register_to_mean = traits.Bool( - field='eoptions.rtm', - desc='Indicate whether realignment is done to the mean image') + field="eoptions.rtm", + desc="Indicate whether realignment is done to the mean image", + ) weight_img = File( - exists=True, - field='eoptions.weight', - desc='filename of weighting image') + exists=True, field="eoptions.weight", desc="filename of weighting image" + ) interp = traits.Range( low=0, high=7, - field='eoptions.einterp', - desc='degree of b-spline used for interpolation') + field="eoptions.einterp", + desc="degree of b-spline used for interpolation", + ) wrap = traits.List( traits.Int(), minlen=3, maxlen=3, - field='eoptions.ewrap', - desc='Check if interpolation should wrap in [x,y,z]') + field="eoptions.ewrap", + desc="Check if interpolation should wrap in [x,y,z]", + ) est_basis_func = traits.List( traits.Int(), minlen=2, maxlen=2, - field='uweoptions.basfcn', - desc='Number of basis functions to use for each dimension') + field="uweoptions.basfcn", + desc="Number of basis functions to use for each dimension", + ) est_reg_order = traits.Range( low=0, high=3, - field='uweoptions.regorder', - desc=('This parameter determines how to balance the compromise between likelihood ' - 'maximization and smoothness maximization of the estimated field.')) + field="uweoptions.regorder", + desc=( + "This parameter determines how to balance the compromise between likelihood " + "maximization and smoothness maximization of the estimated field." + ), + ) est_reg_factor = traits.ListInt( [100000], - field='uweoptions.lambda', + field="uweoptions.lambda", minlen=1, maxlen=1, usedefault=True, - desc='Regularisation factor. Default: 100000 (medium).') + desc="Regularisation factor. Default: 100000 (medium).", + ) est_jacobian_deformations = traits.Bool( - field='uweoptions.jm', - desc=('Jacobian deformations. In theory a good idea to include them, ' - ' in practice a bad idea. Default: No.')) + field="uweoptions.jm", + desc=( + "Jacobian deformations. In theory a good idea to include them, " + " in practice a bad idea. Default: No." + ), + ) est_first_order_effects = traits.List( traits.Int(), minlen=1, maxlen=6, - field='uweoptions.fot', - desc='First order effects should only depend on pitch and roll, i.e. [4 5]') + field="uweoptions.fot", + desc="First order effects should only depend on pitch and roll, i.e. [4 5]", + ) est_second_order_effects = traits.List( traits.Int(), minlen=1, maxlen=6, - field='uweoptions.sot', - desc='List of second order terms to model second derivatives of.') + field="uweoptions.sot", + desc="List of second order terms to model second derivatives of.", + ) est_unwarp_fwhm = traits.Range( low=0.0, - field='uweoptions.uwfwhm', - desc='gaussian smoothing kernel width for unwarp') + field="uweoptions.uwfwhm", + desc="gaussian smoothing kernel width for unwarp", + ) est_re_est_mov_par = traits.Bool( - field='uweoptions.rem', - desc='Re-estimate movement parameters at each unwarping iteration.') + field="uweoptions.rem", + desc="Re-estimate movement parameters at each unwarping iteration.", + ) est_num_of_iterations = traits.ListInt( [5], - field='uweoptions.noi', + field="uweoptions.noi", minlen=1, maxlen=1, usedefault=True, - desc='Number of iterations.') + desc="Number of iterations.", + ) est_taylor_expansion_point = traits.String( - 'Average', - field='uweoptions.expround', + "Average", + field="uweoptions.expround", usedefault=True, - desc='Point in position space to perform Taylor-expansion around.') + desc="Point in position space to perform Taylor-expansion around.", + ) reslice_which = traits.ListInt( [2, 1], - field='uwroptions.uwwhich', + field="uwroptions.uwwhich", minlen=2, maxlen=2, usedefault=True, - desc='determines which images to reslice') + desc="determines which images to reslice", + ) reslice_interp = traits.Range( low=0, high=7, - field='uwroptions.rinterp', - desc='degree of b-spline used for interpolation') + field="uwroptions.rinterp", + desc="degree of b-spline used for interpolation", + ) reslice_wrap = traits.List( traits.Int(), minlen=3, maxlen=3, - field='uwroptions.wrap', - desc='Check if interpolation should wrap in [x,y,z]') + field="uwroptions.wrap", + desc="Check if interpolation should wrap in [x,y,z]", + ) reslice_mask = traits.Bool( - field='uwroptions.mask', - desc='True/False mask output image') + field="uwroptions.mask", desc="True/False mask output image" + ) out_prefix = traits.String( - 'u', - field='uwroptions.prefix', + "u", + field="uwroptions.prefix", usedefault=True, - desc='realigned and unwarped output prefix') + desc="realigned and unwarped output prefix", + ) class RealignUnwarpOutputSpec(TraitedSpec): - mean_image = File(exists=True, desc='Mean image file from the realignment & unwarping') + mean_image = File( + exists=True, desc="Mean image file from the realignment & unwarping" + ) modified_in_files = OutputMultiPath( traits.Either(traits.List(File(exists=True)), File(exists=True)), - desc=('Copies of all files passed to ' - 'in_files. Headers will have ' - 'been modified to align all ' - 'images with the first, or ' - 'optionally to first do that, ' - 'extract a mean image, and ' - 're-align to that mean image.')) + desc=( + "Copies of all files passed to " + "in_files. Headers will have " + "been modified to align all " + "images with the first, or " + "optionally to first do that, " + "extract a mean image, and " + "re-align to that mean image." + ), + ) realigned_unwarped_files = OutputMultiPath( traits.Either(traits.List(File(exists=True)), File(exists=True)), - desc='Realigned and unwarped files written to disc.') + desc="Realigned and unwarped files written to disc.", + ) realignment_parameters = OutputMultiPath( - File(exists=True), - desc='Estimated translation and rotation parameters') + File(exists=True), desc="Estimated translation and rotation parameters" + ) class RealignUnwarp(SPMCommand): @@ -602,56 +738,54 @@ class RealignUnwarp(SPMCommand): input_spec = RealignUnwarpInputSpec output_spec = RealignUnwarpOutputSpec - _jobtype = 'spatial' - _jobname = 'realignunwarp' + _jobtype = "spatial" + _jobname = "realignunwarp" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt == 'in_files': - return scans_for_fnames(ensure_list(val), - keep4d=False, - separate_sessions=True) + if opt == "in_files": + return scans_for_fnames( + ensure_list(val), keep4d=False, separate_sessions=True + ) return super(RealignUnwarp, self)._format_arg(opt, spec, val) - def _parse_inputs(self, skip=()): spmdict = super(RealignUnwarp, self)._parse_inputs(skip=())[0] if isdefined(self.inputs.phase_map): - pmscan = spmdict['data']['pmscan'] + pmscan = spmdict["data"]["pmscan"] else: - pmscan = '' + pmscan = "" if isdefined(self.inputs.in_files): if isinstance(self.inputs.in_files, list): - data = [dict(scans = sess, pmscan = pmscan) - for sess in spmdict['data']['scans']] + data = [ + dict(scans=sess, pmscan=pmscan) for sess in spmdict["data"]["scans"] + ] else: - data = [dict(scans = spmdict['data']['scans'], pmscan = pmscan)] + data = [dict(scans=spmdict["data"]["scans"], pmscan=pmscan)] - spmdict['data'] = data + spmdict["data"] = data return [spmdict] - def _list_outputs(self): outputs = self._outputs().get() resliced_all = self.inputs.reslice_which[0] > 0 resliced_mean = self.inputs.reslice_which[1] > 0 if isdefined(self.inputs.in_files): - outputs['realignment_parameters'] = [] + outputs["realignment_parameters"] = [] for imgf in self.inputs.in_files: if isinstance(imgf, list): tmp_imgf = imgf[0] else: tmp_imgf = imgf - outputs['realignment_parameters'].append(fname_presuffix(tmp_imgf, - prefix='rp_', - suffix='.txt', - use_ext=False)) + outputs["realignment_parameters"].append( + fname_presuffix(tmp_imgf, prefix="rp_", suffix=".txt", use_ext=False) + ) if not isinstance(imgf, list) and func_is_3d(imgf): break @@ -661,21 +795,21 @@ def _list_outputs(self): first_image = self.inputs.in_files[0] if resliced_mean: - outputs['mean_image'] = fname_presuffix(first_image, prefix='meanu') + outputs["mean_image"] = fname_presuffix(first_image, prefix="meanu") if resliced_all: - outputs['realigned_unwarped_files'] = [] + outputs["realigned_unwarped_files"] = [] for idx, imgf in enumerate(ensure_list(self.inputs.in_files)): realigned_run = [] if isinstance(imgf, list): for i, inner_imgf in enumerate(ensure_list(imgf)): - newfile = fname_presuffix(inner_imgf, - prefix=self.inputs.out_prefix) + newfile = fname_presuffix( + inner_imgf, prefix=self.inputs.out_prefix + ) realigned_run.append(newfile) else: - realigned_run = fname_presuffix(imgf, - prefix=self.inputs.out_prefix) - outputs['realigned_unwarped_files'].append(realigned_run) + realigned_run = fname_presuffix(imgf, prefix=self.inputs.out_prefix) + outputs["realigned_unwarped_files"].append(realigned_run) return outputs @@ -683,76 +817,83 @@ class CoregisterInputSpec(SPMCommandInputSpec): target = ImageFileSPM( exists=True, mandatory=True, - field='ref', - desc='reference file to register to', - copyfile=False) + field="ref", + desc="reference file to register to", + copyfile=False, + ) source = InputMultiPath( ImageFileSPM(exists=True), - field='source', - desc='file to register to target', + field="source", + desc="file to register to target", copyfile=True, - mandatory=True) + mandatory=True, + ) jobtype = traits.Enum( - 'estwrite', - 'estimate', - 'write', - desc='one of: estimate, write, estwrite', - usedefault=True) + "estwrite", + "estimate", + "write", + desc="one of: estimate, write, estwrite", + usedefault=True, + ) apply_to_files = InputMultiPath( File(exists=True), - field='other', - desc='files to apply transformation to', - copyfile=True) + field="other", + desc="files to apply transformation to", + copyfile=True, + ) cost_function = traits.Enum( - 'mi', - 'nmi', - 'ecc', - 'ncc', - field='eoptions.cost_fun', + "mi", + "nmi", + "ecc", + "ncc", + field="eoptions.cost_fun", desc="""cost function, one of: 'mi' - Mutual Information, 'nmi' - Normalised Mutual Information, 'ecc' - Entropy Correlation Coefficient, - 'ncc' - Normalised Cross Correlation""") + 'ncc' - Normalised Cross Correlation""", + ) fwhm = traits.List( traits.Float(), minlen=2, maxlen=2, - field='eoptions.fwhm', - desc='gaussian smoothing kernel width (mm)') + field="eoptions.fwhm", + desc="gaussian smoothing kernel width (mm)", + ) separation = traits.List( - traits.Float(), field='eoptions.sep', desc='sampling separation in mm') + traits.Float(), field="eoptions.sep", desc="sampling separation in mm" + ) tolerance = traits.List( traits.Float(), - field='eoptions.tol', - desc='acceptable tolerance for each of 12 params') + field="eoptions.tol", + desc="acceptable tolerance for each of 12 params", + ) write_interp = traits.Range( low=0, high=7, - field='roptions.interp', - desc=('degree of b-spline used for ' - 'interpolation')) + field="roptions.interp", + desc=("degree of b-spline used for " "interpolation"), + ) write_wrap = traits.List( traits.Int(), minlen=3, maxlen=3, - field='roptions.wrap', - desc=('Check if interpolation should wrap in ' - '[x,y,z]')) - write_mask = traits.Bool( - field='roptions.mask', desc='True/False mask output image') + field="roptions.wrap", + desc=("Check if interpolation should wrap in " "[x,y,z]"), + ) + write_mask = traits.Bool(field="roptions.mask", desc="True/False mask output image") out_prefix = traits.String( - 'r', - field='roptions.prefix', - usedefault=True, - desc='coregistered output prefix') + "r", field="roptions.prefix", usedefault=True, desc="coregistered output prefix" + ) class CoregisterOutputSpec(TraitedSpec): coregistered_source = OutputMultiPath( - File(exists=True), desc='Coregistered source files') + File(exists=True), desc="Coregistered source files" + ) coregistered_files = OutputMultiPath( - File(exists=True), desc='Coregistered other files') + File(exists=True), desc="Coregistered other files" + ) class Coregister(SPMCommand): @@ -773,18 +914,17 @@ class Coregister(SPMCommand): input_spec = CoregisterInputSpec output_spec = CoregisterOutputSpec - _jobtype = 'spatial' - _jobname = 'coreg' + _jobtype = "spatial" + _jobname = "coreg" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if (opt == 'target' - or (opt == 'source' and self.inputs.jobtype != "write")): + if opt == "target" or (opt == "source" and self.inputs.jobtype != "write"): return scans_for_fnames(ensure_list(val), keep4d=True) - if opt == 'apply_to_files': + if opt == "apply_to_files": return np.array(ensure_list(val), dtype=object) - if opt == 'source' and self.inputs.jobtype == "write": + if opt == "source" and self.inputs.jobtype == "write": if isdefined(self.inputs.apply_to_files): return scans_for_fnames(val + self.inputs.apply_to_files) else: @@ -795,32 +935,38 @@ def _parse_inputs(self): """validate spm coregister options if set to None ignore """ if self.inputs.jobtype == "write": - einputs = (super(Coregister, self) - ._parse_inputs(skip=('jobtype', 'apply_to_files'))) + einputs = super(Coregister, self)._parse_inputs( + skip=("jobtype", "apply_to_files") + ) else: - einputs = super(Coregister, self)._parse_inputs(skip=('jobtype')) + einputs = super(Coregister, self)._parse_inputs(skip=("jobtype")) jobtype = self.inputs.jobtype - return [{'%s' % (jobtype): einputs[0]}] + return [{"%s" % (jobtype): einputs[0]}] def _list_outputs(self): outputs = self._outputs().get() if self.inputs.jobtype == "estimate": if isdefined(self.inputs.apply_to_files): - outputs['coregistered_files'] = self.inputs.apply_to_files - outputs['coregistered_source'] = self.inputs.source - elif (self.inputs.jobtype == "write" - or self.inputs.jobtype == "estwrite"): + outputs["coregistered_files"] = self.inputs.apply_to_files + outputs["coregistered_source"] = self.inputs.source + elif self.inputs.jobtype == "write" or self.inputs.jobtype == "estwrite": if isdefined(self.inputs.apply_to_files): - outputs['coregistered_files'] = [] + outputs["coregistered_files"] = [] for imgf in ensure_list(self.inputs.apply_to_files): - (outputs['coregistered_files'].append( - fname_presuffix(imgf, prefix=self.inputs.out_prefix))) + ( + outputs["coregistered_files"].append( + fname_presuffix(imgf, prefix=self.inputs.out_prefix) + ) + ) - outputs['coregistered_source'] = [] + outputs["coregistered_source"] = [] for imgf in ensure_list(self.inputs.source): - (outputs['coregistered_source'].append( - fname_presuffix(imgf, prefix=self.inputs.out_prefix))) + ( + outputs["coregistered_source"].append( + fname_presuffix(imgf, prefix=self.inputs.out_prefix) + ) + ) return outputs @@ -828,108 +974,106 @@ def _list_outputs(self): class NormalizeInputSpec(SPMCommandInputSpec): template = File( exists=True, - field='eoptions.template', - desc='template file to normalize to', + field="eoptions.template", + desc="template file to normalize to", mandatory=True, - xor=['parameter_file'], - copyfile=False) + xor=["parameter_file"], + copyfile=False, + ) source = InputMultiPath( ImageFileSPM(exists=True), - field='subj.source', - xor=['parameter_file'], - desc='file to normalize to template', + field="subj.source", + xor=["parameter_file"], + desc="file to normalize to template", mandatory=True, - copyfile=True) + copyfile=True, + ) jobtype = traits.Enum( - 'estwrite', - 'est', - 'write', - usedefault=True, - desc='Estimate, Write or do both') + "estwrite", "est", "write", usedefault=True, desc="Estimate, Write or do both" + ) apply_to_files = InputMultiPath( traits.Either(File(exists=True), traits.List(File(exists=True))), - field='subj.resample', - desc='files to apply transformation to', - copyfile=True) + field="subj.resample", + desc="files to apply transformation to", + copyfile=True, + ) parameter_file = File( - field='subj.matname', + field="subj.matname", mandatory=True, - xor=['source', 'template'], - desc='normalization parameter file*_sn.mat', - copyfile=False) + xor=["source", "template"], + desc="normalization parameter file*_sn.mat", + copyfile=False, + ) source_weight = File( - field='subj.wtsrc', - desc='name of weighting image for source', - copyfile=False) + field="subj.wtsrc", desc="name of weighting image for source", copyfile=False + ) template_weight = File( - field='eoptions.weight', - desc='name of weighting image for template', - copyfile=False) + field="eoptions.weight", + desc="name of weighting image for template", + copyfile=False, + ) source_image_smoothing = traits.Float( - field='eoptions.smosrc', desc='source smoothing') + field="eoptions.smosrc", desc="source smoothing" + ) template_image_smoothing = traits.Float( - field='eoptions.smoref', desc='template smoothing') + field="eoptions.smoref", desc="template smoothing" + ) affine_regularization_type = traits.Enum( - 'mni', - 'size', - 'none', - field='eoptions.regtype', - desc='mni, size, none') + "mni", "size", "none", field="eoptions.regtype", desc="mni, size, none" + ) DCT_period_cutoff = traits.Float( - field='eoptions.cutoff', desc='Cutoff of for DCT bases') + field="eoptions.cutoff", desc="Cutoff of for DCT bases" + ) nonlinear_iterations = traits.Int( - field='eoptions.nits', - desc=('Number of iterations of ' - 'nonlinear warping')) + field="eoptions.nits", desc=("Number of iterations of " "nonlinear warping") + ) nonlinear_regularization = traits.Float( - field='eoptions.reg', - desc=('the amount of the ' - 'regularization for the ' - 'nonlinear part of the ' - 'normalization')) + field="eoptions.reg", + desc=( + "the amount of the " + "regularization for the " + "nonlinear part of the " + "normalization" + ), + ) write_preserve = traits.Bool( - field='roptions.preserve', - desc='True/False warped images are modulated') + field="roptions.preserve", desc="True/False warped images are modulated" + ) write_bounding_box = traits.List( traits.List(traits.Float(), minlen=3, maxlen=3), - field='roptions.bb', + field="roptions.bb", minlen=2, maxlen=2, - desc='3x2-element list of lists') + desc="3x2-element list of lists", + ) write_voxel_sizes = traits.List( - traits.Float(), - field='roptions.vox', - minlen=3, - maxlen=3, - desc='3-element list') + traits.Float(), field="roptions.vox", minlen=3, maxlen=3, desc="3-element list" + ) write_interp = traits.Range( low=0, high=7, - field='roptions.interp', - desc=('degree of b-spline used for ' - 'interpolation')) + field="roptions.interp", + desc=("degree of b-spline used for " "interpolation"), + ) write_wrap = traits.List( traits.Int(), - field='roptions.wrap', - desc=('Check if interpolation should wrap in ' - '[x,y,z] - list of bools')) + field="roptions.wrap", + desc=("Check if interpolation should wrap in " "[x,y,z] - list of bools"), + ) out_prefix = traits.String( - 'w', - field='roptions.prefix', - usedefault=True, - desc='normalized output prefix') + "w", field="roptions.prefix", usedefault=True, desc="normalized output prefix" + ) class NormalizeOutputSpec(TraitedSpec): normalization_parameters = OutputMultiPath( File(exists=True), - desc=('MAT files containing ' - 'the normalization ' - 'parameters')) + desc=("MAT files containing " "the normalization " "parameters"), + ) normalized_source = OutputMultiPath( - File(exists=True), desc='Normalized source files') - normalized_files = OutputMultiPath( - File(exists=True), desc='Normalized other files') + File(exists=True), desc="Normalized source files" + ) + normalized_files = OutputMultiPath(File(exists=True), desc="Normalized other files") class Normalize(SPMCommand): @@ -948,82 +1092,81 @@ class Normalize(SPMCommand): input_spec = NormalizeInputSpec output_spec = NormalizeOutputSpec - _jobtype = 'spatial' - _jobname = 'normalise' + _jobtype = "spatial" + _jobname = "normalise" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt == 'template': + if opt == "template": return scans_for_fname(ensure_list(val)) - if opt == 'source': + if opt == "source": return scans_for_fname(ensure_list(val)) - if opt == 'apply_to_files': + if opt == "apply_to_files": return scans_for_fnames(ensure_list(val)) - if opt == 'parameter_file': + if opt == "parameter_file": return np.array([simplify_list(val)], dtype=object) - if opt in ['write_wrap']: + if opt in ["write_wrap"]: if len(val) != 3: - raise ValueError('%s must have 3 elements' % opt) + raise ValueError("%s must have 3 elements" % opt) return super(Normalize, self)._format_arg(opt, spec, val) def _parse_inputs(self): """Validate spm normalize options if set to None ignore """ - einputs = super( - Normalize, self)._parse_inputs(skip=('jobtype', 'apply_to_files')) + einputs = super(Normalize, self)._parse_inputs( + skip=("jobtype", "apply_to_files") + ) if isdefined(self.inputs.apply_to_files): inputfiles = deepcopy(self.inputs.apply_to_files) if isdefined(self.inputs.source): inputfiles.extend(self.inputs.source) - einputs[0]['subj']['resample'] = scans_for_fnames(inputfiles) + einputs[0]["subj"]["resample"] = scans_for_fnames(inputfiles) jobtype = self.inputs.jobtype - if jobtype in ['estwrite', 'write']: + if jobtype in ["estwrite", "write"]: if not isdefined(self.inputs.apply_to_files): if isdefined(self.inputs.source): - einputs[0]['subj']['resample'] = scans_for_fname( - self.inputs.source) - return [{'%s' % (jobtype): einputs[0]}] + einputs[0]["subj"]["resample"] = scans_for_fname(self.inputs.source) + return [{"%s" % (jobtype): einputs[0]}] def _list_outputs(self): outputs = self._outputs().get() jobtype = self.inputs.jobtype - if jobtype.startswith('est'): - outputs['normalization_parameters'] = [] + if jobtype.startswith("est"): + outputs["normalization_parameters"] = [] for imgf in ensure_list(self.inputs.source): - outputs['normalization_parameters'].append( - fname_presuffix(imgf, suffix='_sn.mat', use_ext=False)) - outputs['normalization_parameters'] = simplify_list( - outputs['normalization_parameters']) + outputs["normalization_parameters"].append( + fname_presuffix(imgf, suffix="_sn.mat", use_ext=False) + ) + outputs["normalization_parameters"] = simplify_list( + outputs["normalization_parameters"] + ) if self.inputs.jobtype == "estimate": if isdefined(self.inputs.apply_to_files): - outputs['normalized_files'] = self.inputs.apply_to_files - outputs['normalized_source'] = self.inputs.source - elif 'write' in self.inputs.jobtype: - if (isdefined(self.inputs.write_preserve) - and self.inputs.write_preserve): - prefixNorm = ''.join(['m', self.inputs.out_prefix]) + outputs["normalized_files"] = self.inputs.apply_to_files + outputs["normalized_source"] = self.inputs.source + elif "write" in self.inputs.jobtype: + if isdefined(self.inputs.write_preserve) and self.inputs.write_preserve: + prefixNorm = "".join(["m", self.inputs.out_prefix]) else: prefixNorm = self.inputs.out_prefix - outputs['normalized_files'] = [] + outputs["normalized_files"] = [] if isdefined(self.inputs.apply_to_files): filelist = ensure_list(self.inputs.apply_to_files) for f in filelist: if isinstance(f, list): - run = [ - fname_presuffix(in_f, prefix=prefixNorm) - for in_f in f - ] + run = [fname_presuffix(in_f, prefix=prefixNorm) for in_f in f] else: run = [fname_presuffix(f, prefix=prefixNorm)] - outputs['normalized_files'].extend(run) + outputs["normalized_files"].extend(run) if isdefined(self.inputs.source): - outputs['normalized_source'] = [] + outputs["normalized_source"] = [] for imgf in ensure_list(self.inputs.source): - outputs['normalized_source'].append( - fname_presuffix(imgf, prefix=prefixNorm)) + outputs["normalized_source"].append( + fname_presuffix(imgf, prefix=prefixNorm) + ) return outputs @@ -1031,32 +1174,34 @@ def _list_outputs(self): class Normalize12InputSpec(SPMCommandInputSpec): image_to_align = ImageFileSPM( exists=True, - field='subj.vol', - desc=('file to estimate normalization parameters ' - 'with'), - xor=['deformation_file'], + field="subj.vol", + desc=("file to estimate normalization parameters " "with"), + xor=["deformation_file"], mandatory=True, - copyfile=True) + copyfile=True, + ) apply_to_files = InputMultiPath( traits.Either( - ImageFileSPM(exists=True), traits.List(ImageFileSPM(exists=True))), - field='subj.resample', - desc='files to apply transformation to', - copyfile=True) + ImageFileSPM(exists=True), traits.List(ImageFileSPM(exists=True)) + ), + field="subj.resample", + desc="files to apply transformation to", + copyfile=True, + ) deformation_file = ImageFileSPM( - field='subj.def', + field="subj.def", mandatory=True, - xor=['image_to_align', 'tpm'], + xor=["image_to_align", "tpm"], copyfile=False, - desc=('file y_*.nii containing 3 deformation ' - 'fields for the deformation in x, y and z ' - 'dimension')) + desc=( + "file y_*.nii containing 3 deformation " + "fields for the deformation in x, y and z " + "dimension" + ), + ) jobtype = traits.Enum( - 'estwrite', - 'est', - 'write', - usedefault=True, - desc='Estimate, Write or do Both') + "estwrite", "est", "write", usedefault=True, desc="Estimate, Write or do Both" + ) bias_regularization = traits.Enum( 0, 0.00001, @@ -1066,8 +1211,9 @@ class Normalize12InputSpec(SPMCommandInputSpec): 0.1, 1, 10, - field='eoptions.biasreg', - desc='no(0) - extremely heavy (10)') + field="eoptions.biasreg", + desc="no(0) - extremely heavy (10)", + ) bias_fwhm = traits.Enum( 30, 40, @@ -1082,75 +1228,82 @@ class Normalize12InputSpec(SPMCommandInputSpec): 130, 140, 150, - 'Inf', - field='eoptions.biasfwhm', - desc='FWHM of Gaussian smoothness of bias') + "Inf", + field="eoptions.biasfwhm", + desc="FWHM of Gaussian smoothness of bias", + ) tpm = File( exists=True, - field='eoptions.tpm', - desc=('template in form of tissue probablitiy maps to ' - 'normalize to'), - xor=['deformation_file'], - copyfile=False) + field="eoptions.tpm", + desc=("template in form of tissue probablitiy maps to " "normalize to"), + xor=["deformation_file"], + copyfile=False, + ) affine_regularization_type = traits.Enum( - 'mni', 'size', 'none', field='eoptions.affreg', desc='mni, size, none') + "mni", "size", "none", field="eoptions.affreg", desc="mni, size, none" + ) warping_regularization = traits.List( traits.Float(), - field='eoptions.reg', + field="eoptions.reg", minlen=5, maxlen=5, - desc=('controls balance between ' - 'parameters and data')) + desc=("controls balance between " "parameters and data"), + ) smoothness = traits.Float( - field='eoptions.fwhm', - desc=('value (in mm) to smooth the data before ' - 'normalization')) + field="eoptions.fwhm", + desc=("value (in mm) to smooth the data before " "normalization"), + ) sampling_distance = traits.Float( - field='eoptions.samp', - desc=('Sampling distance on data for ' - 'parameter estimation')) + field="eoptions.samp", + desc=("Sampling distance on data for " "parameter estimation"), + ) write_bounding_box = traits.List( traits.List(traits.Float(), minlen=3, maxlen=3), - field='woptions.bb', + field="woptions.bb", minlen=2, maxlen=2, - desc=('3x2-element list of lists ' - 'representing the bounding box ' - '(in mm) to be written')) + desc=( + "3x2-element list of lists " + "representing the bounding box " + "(in mm) to be written" + ), + ) write_voxel_sizes = traits.List( traits.Float(), - field='woptions.vox', + field="woptions.vox", minlen=3, maxlen=3, - desc=('3-element list representing the ' - 'voxel sizes (in mm) of the written ' - 'normalised images')) + desc=( + "3-element list representing the " + "voxel sizes (in mm) of the written " + "normalised images" + ), + ) write_interp = traits.Range( low=0, high=7, - field='woptions.interp', - desc=('degree of b-spline used for ' - 'interpolation')) + field="woptions.interp", + desc=("degree of b-spline used for " "interpolation"), + ) out_prefix = traits.String( - 'w', - field='woptions.prefix', - usedefault=True, - desc='Normalized output prefix') + "w", field="woptions.prefix", usedefault=True, desc="Normalized output prefix" + ) class Normalize12OutputSpec(TraitedSpec): deformation_field = OutputMultiPath( File(exists=True), - desc=('NIfTI file containing 3 ' - 'deformation fields for the ' - 'deformation in x, y and z ' - 'dimension')) + desc=( + "NIfTI file containing 3 " + "deformation fields for the " + "deformation in x, y and z " + "dimension" + ), + ) normalized_image = OutputMultiPath( - File(exists=True), - desc=('Normalized file that needed to ' - 'be aligned')) - normalized_files = OutputMultiPath( - File(exists=True), desc='Normalized other files') + File(exists=True), desc=("Normalized file that needed to " "be aligned") + ) + normalized_files = OutputMultiPath(File(exists=True), desc="Normalized other files") class Normalize12(SPMCommand): @@ -1174,74 +1327,75 @@ class Normalize12(SPMCommand): input_spec = Normalize12InputSpec output_spec = Normalize12OutputSpec - _jobtype = 'spatial' - _jobname = 'normalise' + _jobtype = "spatial" + _jobname = "normalise" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt == 'tpm': + if opt == "tpm": return scans_for_fname(ensure_list(val)) - if opt == 'image_to_align': + if opt == "image_to_align": return scans_for_fname(ensure_list(val)) - if opt == 'apply_to_files': + if opt == "apply_to_files": return scans_for_fnames(ensure_list(val)) - if opt == 'deformation_file': + if opt == "deformation_file": return np.array([simplify_list(val)], dtype=object) - if opt in ['nonlinear_regularization']: + if opt in ["nonlinear_regularization"]: if len(val) != 5: - raise ValueError('%s must have 5 elements' % opt) + raise ValueError("%s must have 5 elements" % opt) return super(Normalize12, self)._format_arg(opt, spec, val) def _parse_inputs(self, skip=()): """validate spm normalize options if set to None ignore """ - einputs = super( - Normalize12, - self)._parse_inputs(skip=('jobtype', 'apply_to_files')) + einputs = super(Normalize12, self)._parse_inputs( + skip=("jobtype", "apply_to_files") + ) if isdefined(self.inputs.apply_to_files): inputfiles = deepcopy(self.inputs.apply_to_files) if isdefined(self.inputs.image_to_align): inputfiles.extend([self.inputs.image_to_align]) - einputs[0]['subj']['resample'] = scans_for_fnames(inputfiles) + einputs[0]["subj"]["resample"] = scans_for_fnames(inputfiles) jobtype = self.inputs.jobtype - if jobtype in ['estwrite', 'write']: + if jobtype in ["estwrite", "write"]: if not isdefined(self.inputs.apply_to_files): if isdefined(self.inputs.image_to_align): - einputs[0]['subj']['resample'] = scans_for_fname( - self.inputs.image_to_align) - return [{'%s' % (jobtype): einputs[0]}] + einputs[0]["subj"]["resample"] = scans_for_fname( + self.inputs.image_to_align + ) + return [{"%s" % (jobtype): einputs[0]}] def _list_outputs(self): outputs = self._outputs().get() jobtype = self.inputs.jobtype - if jobtype.startswith('est'): - outputs['deformation_field'] = [] + if jobtype.startswith("est"): + outputs["deformation_field"] = [] for imgf in ensure_list(self.inputs.image_to_align): - outputs['deformation_field'].append( - fname_presuffix(imgf, prefix='y_')) - outputs['deformation_field'] = simplify_list( - outputs['deformation_field']) + outputs["deformation_field"].append(fname_presuffix(imgf, prefix="y_")) + outputs["deformation_field"] = simplify_list(outputs["deformation_field"]) if self.inputs.jobtype == "estimate": if isdefined(self.inputs.apply_to_files): - outputs['normalized_files'] = self.inputs.apply_to_files - outputs['normalized_image'] = fname_presuffix( - self.inputs.image_to_align, prefix='w') - elif 'write' in self.inputs.jobtype: - outputs['normalized_files'] = [] + outputs["normalized_files"] = self.inputs.apply_to_files + outputs["normalized_image"] = fname_presuffix( + self.inputs.image_to_align, prefix="w" + ) + elif "write" in self.inputs.jobtype: + outputs["normalized_files"] = [] if isdefined(self.inputs.apply_to_files): filelist = ensure_list(self.inputs.apply_to_files) for f in filelist: if isinstance(f, list): - run = [fname_presuffix(in_f, prefix='w') for in_f in f] + run = [fname_presuffix(in_f, prefix="w") for in_f in f] else: - run = [fname_presuffix(f, prefix='w')] - outputs['normalized_files'].extend(run) + run = [fname_presuffix(f, prefix="w")] + outputs["normalized_files"].extend(run) if isdefined(self.inputs.image_to_align): - outputs['normalized_image'] = fname_presuffix( - self.inputs.image_to_align, prefix='w') + outputs["normalized_image"] = fname_presuffix( + self.inputs.image_to_align, prefix="w" + ) return outputs @@ -1249,17 +1403,17 @@ def _list_outputs(self): class SegmentInputSpec(SPMCommandInputSpec): data = InputMultiPath( ImageFileSPM(exists=True), - field='data', - desc='one scan per subject', + field="data", + desc="one scan per subject", copyfile=False, - mandatory=True) + mandatory=True, + ) gm_output_type = traits.List( traits.Bool(), minlen=3, maxlen=3, - field='output.GM', - desc= - """Options to produce grey matter images: c1*.img, wc1*.img and mwc1*.img. + field="output.GM", + desc="""Options to produce grey matter images: c1*.img, wc1*.img and mwc1*.img. None: [False,False,False], Native Space: [False,False,True], Unmodulated Normalised: [False,True,False], @@ -1267,12 +1421,13 @@ class SegmentInputSpec(SPMCommandInputSpec): Native + Unmodulated Normalised: [False,True,True], Native + Modulated Normalised: [True,False,True], Native + Modulated + Unmodulated: [True,True,True], - Modulated + Unmodulated Normalised: [True,True,False]""") + Modulated + Unmodulated Normalised: [True,True,False]""", + ) wm_output_type = traits.List( traits.Bool(), minlen=3, maxlen=3, - field='output.WM', + field="output.WM", desc=""" Options to produce white matter images: c2*.img, wc2*.img and mwc2*.img. None: [False,False,False], @@ -1282,12 +1437,13 @@ class SegmentInputSpec(SPMCommandInputSpec): Native + Unmodulated Normalised: [False,True,True], Native + Modulated Normalised: [True,False,True], Native + Modulated + Unmodulated: [True,True,True], - Modulated + Unmodulated Normalised: [True,True,False]""") + Modulated + Unmodulated Normalised: [True,True,False]""", + ) csf_output_type = traits.List( traits.Bool(), minlen=3, maxlen=3, - field='output.CSF', + field="output.CSF", desc=""" Options to produce CSF images: c3*.img, wc3*.img and mwc3*.img. None: [False,False,False], @@ -1297,45 +1453,48 @@ class SegmentInputSpec(SPMCommandInputSpec): Native + Unmodulated Normalised: [False,True,True], Native + Modulated Normalised: [True,False,True], Native + Modulated + Unmodulated: [True,True,True], - Modulated + Unmodulated Normalised: [True,True,False]""") + Modulated + Unmodulated Normalised: [True,True,False]""", + ) save_bias_corrected = traits.Bool( - field='output.biascor', - desc=('True/False produce a bias ' - 'corrected image')) + field="output.biascor", desc=("True/False produce a bias " "corrected image") + ) clean_masks = traits.Enum( - 'no', - 'light', - 'thorough', - field='output.cleanup', - desc=("clean using estimated brain mask " - "('no','light','thorough')")) + "no", + "light", + "thorough", + field="output.cleanup", + desc=("clean using estimated brain mask " "('no','light','thorough')"), + ) tissue_prob_maps = traits.List( File(exists=True), - field='opts.tpm', - desc=('list of gray, white & csf prob. ' - '(opt,)')) + field="opts.tpm", + desc=("list of gray, white & csf prob. " "(opt,)"), + ) gaussians_per_class = traits.List( traits.Int(), - field='opts.ngaus', - desc=('num Gaussians capture intensity ' - 'distribution')) + field="opts.ngaus", + desc=("num Gaussians capture intensity " "distribution"), + ) affine_regularization = traits.Enum( - 'mni', - 'eastern', - 'subj', - 'none', - '', - field='opts.regtype', - desc=('Possible options: "mni", ' - '"eastern", "subj", "none" ' - '(no reguralisation), "" ' - '(no affine registration)')) + "mni", + "eastern", + "subj", + "none", + "", + field="opts.regtype", + desc=( + 'Possible options: "mni", ' + '"eastern", "subj", "none" ' + '(no reguralisation), "" ' + "(no affine registration)" + ), + ) warping_regularization = traits.Float( - field='opts.warpreg', - desc=('Controls balance between ' - 'parameters and data')) + field="opts.warpreg", desc=("Controls balance between " "parameters and data") + ) warp_frequency_cutoff = traits.Float( - field='opts.warpco', desc='Cutoff of DCT bases') + field="opts.warpco", desc="Cutoff of DCT bases" + ) bias_regularization = traits.Enum( 0, 0.00001, @@ -1345,8 +1504,9 @@ class SegmentInputSpec(SPMCommandInputSpec): 0.1, 1, 10, - field='opts.biasreg', - desc='no(0) - extremely heavy (10)') + field="opts.biasreg", + desc="no(0) - extremely heavy (10)", + ) bias_fwhm = traits.Enum( 30, 40, @@ -1359,43 +1519,39 @@ class SegmentInputSpec(SPMCommandInputSpec): 110, 120, 130, - 'Inf', - field='opts.biasfwhm', - desc='FWHM of Gaussian smoothness of bias') + "Inf", + field="opts.biasfwhm", + desc="FWHM of Gaussian smoothness of bias", + ) sampling_distance = traits.Float( - field='opts.samp', - desc=('Sampling distance on data for ' - 'parameter estimation')) + field="opts.samp", + desc=("Sampling distance on data for " "parameter estimation"), + ) mask_image = File( exists=True, - field='opts.msk', - desc='Binary image to restrict parameter estimation ') + field="opts.msk", + desc="Binary image to restrict parameter estimation ", + ) class SegmentOutputSpec(TraitedSpec): - native_gm_image = File(desc='native space grey probability map') - normalized_gm_image = File(desc='normalized grey probability map', ) - modulated_gm_image = File( - desc=('modulated, normalized grey ' - 'probability map')) - native_wm_image = File(desc='native space white probability map') - normalized_wm_image = File(desc='normalized white probability map') - modulated_wm_image = File( - desc=('modulated, normalized white ' - 'probability map')) - native_csf_image = File(desc='native space csf probability map') - normalized_csf_image = File(desc='normalized csf probability map') - modulated_csf_image = File( - desc=('modulated, normalized csf ' - 'probability map')) + native_gm_image = File(desc="native space grey probability map") + normalized_gm_image = File(desc="normalized grey probability map",) + modulated_gm_image = File(desc=("modulated, normalized grey " "probability map")) + native_wm_image = File(desc="native space white probability map") + normalized_wm_image = File(desc="normalized white probability map") + modulated_wm_image = File(desc=("modulated, normalized white " "probability map")) + native_csf_image = File(desc="native space csf probability map") + normalized_csf_image = File(desc="normalized csf probability map") + modulated_csf_image = File(desc=("modulated, normalized csf " "probability map")) modulated_input_image = File( - deprecated='0.10', - new_name='bias_corrected_image', - desc='bias-corrected version of input image') - bias_corrected_image = File(desc='bias-corrected version of input image') - transformation_mat = File(exists=True, desc='Normalization transformation') - inverse_transformation_mat = File( - exists=True, desc='Inverse normalization info') + deprecated="0.10", + new_name="bias_corrected_image", + desc="bias-corrected version of input image", + ) + bias_corrected_image = File(desc="bias-corrected version of input image") + transformation_mat = File(exists=True, desc="Normalization transformation") + inverse_transformation_mat = File(exists=True, desc="Inverse normalization info") class Segment(SPMCommand): @@ -1418,30 +1574,30 @@ class Segment(SPMCommand): def __init__(self, **inputs): _local_version = SPMCommand().version - if _local_version and '12.' in _local_version: - self._jobtype = 'tools' - self._jobname = 'oldseg' + if _local_version and "12." in _local_version: + self._jobtype = "tools" + self._jobname = "oldseg" else: - self._jobtype = 'spatial' - self._jobname = 'preproc' + self._jobtype = "spatial" + self._jobname = "preproc" SPMCommand.__init__(self, **inputs) def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - clean_masks_dict = {'no': 0, 'light': 1, 'thorough': 2} + clean_masks_dict = {"no": 0, "light": 1, "thorough": 2} - if opt in ['data', 'tissue_prob_maps']: + if opt in ["data", "tissue_prob_maps"]: if isinstance(val, list): return scans_for_fnames(val) else: return scans_for_fname(val) - if 'output_type' in opt: + if "output_type" in opt: return [int(v) for v in val] - if opt == 'mask_image': + if opt == "mask_image": return scans_for_fname(val) - if opt == 'clean_masks': + if opt == "clean_masks": return clean_masks_dict[val] return super(Segment, self)._format_arg(opt, spec, val) @@ -1449,23 +1605,26 @@ def _list_outputs(self): outputs = self._outputs().get() f = self.inputs.data[0] - for tidx, tissue in enumerate(['gm', 'wm', 'csf']): - outtype = '%s_output_type' % tissue + for tidx, tissue in enumerate(["gm", "wm", "csf"]): + outtype = "%s_output_type" % tissue if isdefined(getattr(self.inputs, outtype)): - for idx, (image, prefix) in enumerate([('modulated', 'mw'), - ('normalized', - 'w'), ('native', '')]): + for idx, (image, prefix) in enumerate( + [("modulated", "mw"), ("normalized", "w"), ("native", "")] + ): if getattr(self.inputs, outtype)[idx]: - outfield = '%s_%s_image' % (image, tissue) + outfield = "%s_%s_image" % (image, tissue) outputs[outfield] = fname_presuffix( - f, prefix='%sc%d' % (prefix, tidx + 1)) - if (isdefined(self.inputs.save_bias_corrected) - and self.inputs.save_bias_corrected): - outputs['bias_corrected_image'] = fname_presuffix(f, prefix='m') - t_mat = fname_presuffix(f, suffix='_seg_sn.mat', use_ext=False) - outputs['transformation_mat'] = t_mat - invt_mat = fname_presuffix(f, suffix='_seg_inv_sn.mat', use_ext=False) - outputs['inverse_transformation_mat'] = invt_mat + f, prefix="%sc%d" % (prefix, tidx + 1) + ) + if ( + isdefined(self.inputs.save_bias_corrected) + and self.inputs.save_bias_corrected + ): + outputs["bias_corrected_image"] = fname_presuffix(f, prefix="m") + t_mat = fname_presuffix(f, suffix="_seg_sn.mat", use_ext=False) + outputs["transformation_mat"] = t_mat + invt_mat = fname_presuffix(f, suffix="_seg_inv_sn.mat", use_ext=False) + outputs["inverse_transformation_mat"] = invt_mat return outputs @@ -1474,8 +1633,9 @@ class NewSegmentInputSpec(SPMCommandInputSpec): ImageFileSPM(exists=True), mandatory=True, desc="A list of files to be segmented", - field='channel', - copyfile=False) + field="channel", + copyfile=False, + ) channel_info = traits.Tuple( traits.Float(), traits.Float(), @@ -1484,64 +1644,75 @@ class NewSegmentInputSpec(SPMCommandInputSpec): - bias reguralisation (0-10) - FWHM of Gaussian smoothness of bias - which maps to save (Field, Corrected) - a tuple of two boolean values""", - field='channel') + field="channel", + ) tissues = traits.List( traits.Tuple( traits.Tuple(ImageFileSPM(exists=True), traits.Int()), - traits.Int(), traits.Tuple(traits.Bool, traits.Bool), - traits.Tuple(traits.Bool, traits.Bool)), + traits.Int(), + traits.Tuple(traits.Bool, traits.Bool), + traits.Tuple(traits.Bool, traits.Bool), + ), desc="""A list of tuples (one per tissue) with the following fields: - tissue probability map (4D), 1-based index to frame - number of gaussians - which maps to save [Native, DARTEL] - a tuple of two boolean values - which maps to save [Unmodulated, Modulated] - a tuple of two boolean values""", - field='tissue') + field="tissue", + ) affine_regularization = traits.Enum( - 'mni', - 'eastern', - 'subj', - 'none', - field='warp.affreg', - desc='mni, eastern, subj, none ') + "mni", + "eastern", + "subj", + "none", + field="warp.affreg", + desc="mni, eastern, subj, none ", + ) warping_regularization = traits.Either( traits.List(traits.Float(), minlen=5, maxlen=5), traits.Float(), - field='warp.reg', - desc=('Warping regularization ' - 'parameter(s). Accepts float ' - 'or list of floats (the ' - 'latter is required by ' - 'SPM12)')) + field="warp.reg", + desc=( + "Warping regularization " + "parameter(s). Accepts float " + "or list of floats (the " + "latter is required by " + "SPM12)" + ), + ) sampling_distance = traits.Float( - field='warp.samp', - desc=('Sampling distance on data for ' - 'parameter estimation')) + field="warp.samp", + desc=("Sampling distance on data for " "parameter estimation"), + ) write_deformation_fields = traits.List( traits.Bool(), minlen=2, maxlen=2, - field='warp.write', - desc=("Which deformation fields to " - "write:[Inverse, Forward]")) + field="warp.write", + desc=("Which deformation fields to " "write:[Inverse, Forward]"), + ) class NewSegmentOutputSpec(TraitedSpec): native_class_images = traits.List( - traits.List(File(exists=True)), desc='native space probability maps') + traits.List(File(exists=True)), desc="native space probability maps" + ) dartel_input_images = traits.List( - traits.List(File(exists=True)), desc='dartel imported class images') + traits.List(File(exists=True)), desc="dartel imported class images" + ) normalized_class_images = traits.List( - traits.List(File(exists=True)), desc='normalized class images') + traits.List(File(exists=True)), desc="normalized class images" + ) modulated_class_images = traits.List( - traits.List(File(exists=True)), - desc=('modulated+normalized class ' - 'images')) + traits.List(File(exists=True)), desc=("modulated+normalized class " "images") + ) transformation_mat = OutputMultiPath( - File(exists=True), desc='Normalization transformation') + File(exists=True), desc="Normalization transformation" + ) bias_corrected_images = OutputMultiPath( - File(exists=True), desc='bias corrected images') - bias_field_images = OutputMultiPath( - File(exists=True), desc='bias field images') + File(exists=True), desc="bias corrected images" + ) + bias_field_images = OutputMultiPath(File(exists=True), desc="bias field images") forward_deformation_field = OutputMultiPath(File(exists=True)) inverse_deformation_field = OutputMultiPath(File(exists=True)) @@ -1582,12 +1753,12 @@ class NewSegment(SPMCommand): def __init__(self, **inputs): _local_version = SPMCommand().version - if _local_version and '12.' in _local_version: - self._jobtype = 'spatial' - self._jobname = 'preproc' + if _local_version and "12." in _local_version: + self._jobtype = "spatial" + self._jobname = "preproc" else: - self._jobtype = 'tools' - self._jobname = 'preproc8' + self._jobtype = "tools" + self._jobname = "preproc8" SPMCommand.__init__(self, **inputs) @@ -1595,119 +1766,132 @@ def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt in ['channel_files', 'channel_info']: + if opt in ["channel_files", "channel_info"]: # structure have to be recreated because of some weird traits error new_channel = {} - new_channel['vols'] = scans_for_fnames(self.inputs.channel_files) + new_channel["vols"] = scans_for_fnames(self.inputs.channel_files) if isdefined(self.inputs.channel_info): info = self.inputs.channel_info - new_channel['biasreg'] = info[0] - new_channel['biasfwhm'] = info[1] - new_channel['write'] = [int(info[2][0]), int(info[2][1])] + new_channel["biasreg"] = info[0] + new_channel["biasfwhm"] = info[1] + new_channel["write"] = [int(info[2][0]), int(info[2][1])] return [new_channel] - elif opt == 'tissues': + elif opt == "tissues": new_tissues = [] for tissue in val: new_tissue = {} - new_tissue['tpm'] = np.array( - [','.join([tissue[0][0], str(tissue[0][1])])], - dtype=object) - new_tissue['ngaus'] = tissue[1] - new_tissue['native'] = [int(tissue[2][0]), int(tissue[2][1])] - new_tissue['warped'] = [int(tissue[3][0]), int(tissue[3][1])] + new_tissue["tpm"] = np.array( + [",".join([tissue[0][0], str(tissue[0][1])])], dtype=object + ) + new_tissue["ngaus"] = tissue[1] + new_tissue["native"] = [int(tissue[2][0]), int(tissue[2][1])] + new_tissue["warped"] = [int(tissue[3][0]), int(tissue[3][1])] new_tissues.append(new_tissue) return new_tissues - elif opt == 'write_deformation_fields': + elif opt == "write_deformation_fields": return super(NewSegment, self)._format_arg( - opt, spec, [int(val[0]), int(val[1])]) + opt, spec, [int(val[0]), int(val[1])] + ) else: return super(NewSegment, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() - outputs['native_class_images'] = [] - outputs['dartel_input_images'] = [] - outputs['normalized_class_images'] = [] - outputs['modulated_class_images'] = [] - outputs['transformation_mat'] = [] - outputs['bias_corrected_images'] = [] - outputs['bias_field_images'] = [] - outputs['inverse_deformation_field'] = [] - outputs['forward_deformation_field'] = [] + outputs["native_class_images"] = [] + outputs["dartel_input_images"] = [] + outputs["normalized_class_images"] = [] + outputs["modulated_class_images"] = [] + outputs["transformation_mat"] = [] + outputs["bias_corrected_images"] = [] + outputs["bias_field_images"] = [] + outputs["inverse_deformation_field"] = [] + outputs["forward_deformation_field"] = [] n_classes = 5 if isdefined(self.inputs.tissues): n_classes = len(self.inputs.tissues) for i in range(n_classes): - outputs['native_class_images'].append([]) - outputs['dartel_input_images'].append([]) - outputs['normalized_class_images'].append([]) - outputs['modulated_class_images'].append([]) + outputs["native_class_images"].append([]) + outputs["dartel_input_images"].append([]) + outputs["normalized_class_images"].append([]) + outputs["modulated_class_images"].append([]) for filename in self.inputs.channel_files: pth, base, ext = split_filename(filename) if isdefined(self.inputs.tissues): for i, tissue in enumerate(self.inputs.tissues): if tissue[2][0]: - outputs['native_class_images'][i].append( - os.path.join(pth, "c%d%s.nii" % (i + 1, base))) + outputs["native_class_images"][i].append( + os.path.join(pth, "c%d%s.nii" % (i + 1, base)) + ) if tissue[2][1]: - outputs['dartel_input_images'][i].append( - os.path.join(pth, "rc%d%s.nii" % (i + 1, base))) + outputs["dartel_input_images"][i].append( + os.path.join(pth, "rc%d%s.nii" % (i + 1, base)) + ) if tissue[3][0]: - outputs['normalized_class_images'][i].append( - os.path.join(pth, "wc%d%s.nii" % (i + 1, base))) + outputs["normalized_class_images"][i].append( + os.path.join(pth, "wc%d%s.nii" % (i + 1, base)) + ) if tissue[3][1]: - outputs['modulated_class_images'][i].append( - os.path.join(pth, "mwc%d%s.nii" % (i + 1, base))) + outputs["modulated_class_images"][i].append( + os.path.join(pth, "mwc%d%s.nii" % (i + 1, base)) + ) else: for i in range(n_classes): - outputs['native_class_images'][i].append( - os.path.join(pth, "c%d%s.nii" % (i + 1, base))) - outputs['transformation_mat'].append( - os.path.join(pth, "%s_seg8.mat" % base)) + outputs["native_class_images"][i].append( + os.path.join(pth, "c%d%s.nii" % (i + 1, base)) + ) + outputs["transformation_mat"].append( + os.path.join(pth, "%s_seg8.mat" % base) + ) if isdefined(self.inputs.write_deformation_fields): if self.inputs.write_deformation_fields[0]: - outputs['inverse_deformation_field'].append( - os.path.join(pth, "iy_%s.nii" % base)) + outputs["inverse_deformation_field"].append( + os.path.join(pth, "iy_%s.nii" % base) + ) if self.inputs.write_deformation_fields[1]: - outputs['forward_deformation_field'].append( - os.path.join(pth, "y_%s.nii" % base)) + outputs["forward_deformation_field"].append( + os.path.join(pth, "y_%s.nii" % base) + ) if isdefined(self.inputs.channel_info): if self.inputs.channel_info[2][0]: - outputs['bias_field_images'].append( - os.path.join(pth, "BiasField_%s.nii" % (base))) + outputs["bias_field_images"].append( + os.path.join(pth, "BiasField_%s.nii" % (base)) + ) if self.inputs.channel_info[2][1]: - outputs['bias_corrected_images'].append( - os.path.join(pth, "m%s.nii" % (base))) + outputs["bias_corrected_images"].append( + os.path.join(pth, "m%s.nii" % (base)) + ) return outputs class SmoothInputSpec(SPMCommandInputSpec): in_files = InputMultiPath( ImageFileSPM(exists=True), - field='data', - desc='list of files to smooth', + field="data", + desc="list of files to smooth", mandatory=True, - copyfile=False) + copyfile=False, + ) fwhm = traits.Either( traits.List(traits.Float(), minlen=3, maxlen=3), traits.Float(), - field='fwhm', - desc='3-list of fwhm for each dimension') - data_type = traits.Int( - field='dtype', desc='Data type of the output images') + field="fwhm", + desc="3-list of fwhm for each dimension", + ) + data_type = traits.Int(field="dtype", desc="Data type of the output images") implicit_masking = traits.Bool( - field='im', desc=('A mask implied by a particular' - 'voxel value')) + field="im", desc=("A mask implied by a particular" "voxel value") + ) out_prefix = traits.String( - 's', field='prefix', usedefault=True, desc='smoothed output prefix') + "s", field="prefix", usedefault=True, desc="smoothed output prefix" + ) class SmoothOutputSpec(TraitedSpec): - smoothed_files = OutputMultiPath(File(exists=True), desc='smoothed files') + smoothed_files = OutputMultiPath(File(exists=True), desc="smoothed files") class Smooth(SPMCommand): @@ -1726,13 +1910,13 @@ class Smooth(SPMCommand): input_spec = SmoothInputSpec output_spec = SmoothOutputSpec - _jobtype = 'spatial' - _jobname = 'smooth' + _jobtype = "spatial" + _jobname = "smooth" def _format_arg(self, opt, spec, val): - if opt in ['in_files']: + if opt in ["in_files"]: return scans_for_fnames(ensure_list(val)) - if opt == 'fwhm': + if opt == "fwhm": if not isinstance(val, list): return [val, val, val] if isinstance(val, list): @@ -1745,11 +1929,12 @@ def _format_arg(self, opt, spec, val): def _list_outputs(self): outputs = self._outputs().get() - outputs['smoothed_files'] = [] + outputs["smoothed_files"] = [] for imgf in ensure_list(self.inputs.in_files): - outputs['smoothed_files'].append( - fname_presuffix(imgf, prefix=self.inputs.out_prefix)) + outputs["smoothed_files"].append( + fname_presuffix(imgf, prefix=self.inputs.out_prefix) + ) return outputs @@ -1757,57 +1942,60 @@ class DARTELInputSpec(SPMCommandInputSpec): image_files = traits.List( traits.List(ImageFileSPM(exists=True)), desc="A list of files to be segmented", - field='warp.images', + field="warp.images", copyfile=False, - mandatory=True) + mandatory=True, + ) template_prefix = traits.Str( - 'Template', + "Template", usedefault=True, - field='warp.settings.template', - desc='Prefix for template') + field="warp.settings.template", + desc="Prefix for template", + ) regularization_form = traits.Enum( - 'Linear', - 'Membrane', - 'Bending', - field='warp.settings.rform', - desc=('Form of regularization energy ' - 'term')) + "Linear", + "Membrane", + "Bending", + field="warp.settings.rform", + desc=("Form of regularization energy " "term"), + ) iteration_parameters = traits.List( traits.Tuple( traits.Range(1, 10), traits.Tuple(traits.Float, traits.Float, traits.Float), traits.Enum(1, 2, 4, 8, 16, 32, 64, 128, 256, 512), - traits.Enum(0, 0.5, 1, 2, 4, 8, 16, 32)), + traits.Enum(0, 0.5, 1, 2, 4, 8, 16, 32), + ), minlen=3, maxlen=12, - field='warp.settings.param', + field="warp.settings.param", desc="""List of tuples for each iteration - Inner iterations - Regularization parameters - Time points for deformation model - smoothing parameter - """) + """, + ) optimization_parameters = traits.Tuple( traits.Float, traits.Range(1, 8), traits.Range(1, 8), - field='warp.settings.optim', + field="warp.settings.optim", desc=""" Optimization settings a tuple - LM regularization - cycles of multigrid solver - relaxation iterations - """) + """, + ) class DARTELOutputSpec(TraitedSpec): - final_template_file = File(exists=True, desc='final DARTEL template') + final_template_file = File(exists=True, desc="final DARTEL template") template_files = traits.List( - File(exists=True), - desc=('Templates from different stages of ' - 'iteration')) - dartel_flow_fields = traits.List( - File(exists=True), desc='DARTEL flow fields') + File(exists=True), desc=("Templates from different stages of " "iteration") + ) + dartel_flow_fields = traits.List(File(exists=True), desc="DARTEL flow fields") class DARTEL(SPMCommand): @@ -1826,52 +2014,53 @@ class DARTEL(SPMCommand): input_spec = DARTELInputSpec output_spec = DARTELOutputSpec - _jobtype = 'tools' - _jobname = 'dartel' + _jobtype = "tools" + _jobname = "dartel" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt in ['image_files']: + if opt in ["image_files"]: return scans_for_fnames(val, keep4d=True, separate_sessions=True) - elif opt == 'regularization_form': - mapper = {'Linear': 0, 'Membrane': 1, 'Bending': 2} + elif opt == "regularization_form": + mapper = {"Linear": 0, "Membrane": 1, "Bending": 2} return mapper[val] - elif opt == 'iteration_parameters': + elif opt == "iteration_parameters": params = [] for param in val: new_param = {} - new_param['its'] = param[0] - new_param['rparam'] = list(param[1]) - new_param['K'] = param[2] - new_param['slam'] = param[3] + new_param["its"] = param[0] + new_param["rparam"] = list(param[1]) + new_param["K"] = param[2] + new_param["slam"] = param[3] params.append(new_param) return params - elif opt == 'optimization_parameters': + elif opt == "optimization_parameters": new_param = {} - new_param['lmreg'] = val[0] - new_param['cyc'] = val[1] - new_param['its'] = val[2] + new_param["lmreg"] = val[0] + new_param["cyc"] = val[1] + new_param["its"] = val[2] return [new_param] else: return super(DARTEL, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() - outputs['template_files'] = [] + outputs["template_files"] = [] for i in range(6): - outputs['template_files'].append( - os.path.realpath('%s_%d.nii' % (self.inputs.template_prefix, - i + 1))) - outputs['final_template_file'] = os.path.realpath( - '%s_6.nii' % self.inputs.template_prefix) - outputs['dartel_flow_fields'] = [] + outputs["template_files"].append( + os.path.realpath("%s_%d.nii" % (self.inputs.template_prefix, i + 1)) + ) + outputs["final_template_file"] = os.path.realpath( + "%s_6.nii" % self.inputs.template_prefix + ) + outputs["dartel_flow_fields"] = [] for filename in self.inputs.image_files[0]: pth, base, ext = split_filename(filename) - outputs['dartel_flow_fields'].append( - os.path.realpath('u_%s_%s%s' % - (base, self.inputs.template_prefix, ext))) + outputs["dartel_flow_fields"].append( + os.path.realpath("u_%s_%s%s" % (base, self.inputs.template_prefix, ext)) + ) return outputs @@ -1881,24 +2070,28 @@ class DARTELNorm2MNIInputSpec(SPMCommandInputSpec): copyfile=False, mandatory=True, desc="DARTEL template", - field='mni_norm.template') + field="mni_norm.template", + ) flowfield_files = InputMultiPath( ImageFileSPM(exists=True), mandatory=True, desc="DARTEL flow fields u_rc1*", - field='mni_norm.data.subjs.flowfields') + field="mni_norm.data.subjs.flowfields", + ) apply_to_files = InputMultiPath( ImageFileSPM(exists=True), desc="Files to apply the transform to", - field='mni_norm.data.subjs.images', + field="mni_norm.data.subjs.images", mandatory=True, - copyfile=False) + copyfile=False, + ) voxel_size = traits.Tuple( traits.Float, traits.Float, traits.Float, desc="Voxel sizes for output file", - field='mni_norm.vox') + field="mni_norm.vox", + ) bounding_box = traits.Tuple( traits.Float, traits.Float, @@ -1907,24 +2100,27 @@ class DARTELNorm2MNIInputSpec(SPMCommandInputSpec): traits.Float, traits.Float, desc="Voxel sizes for output file", - field='mni_norm.bb') + field="mni_norm.bb", + ) modulate = traits.Bool( - field='mni_norm.preserve', - desc=("Modulate out images - no modulation " - "preserves concentrations")) + field="mni_norm.preserve", + desc=("Modulate out images - no modulation " "preserves concentrations"), + ) fwhm = traits.Either( traits.List(traits.Float(), minlen=3, maxlen=3), traits.Float(), - field='mni_norm.fwhm', - desc='3-list of fwhm for each dimension') + field="mni_norm.fwhm", + desc="3-list of fwhm for each dimension", + ) class DARTELNorm2MNIOutputSpec(TraitedSpec): normalized_files = OutputMultiPath( - File(exists=True), desc='Normalized files in MNI space') + File(exists=True), desc="Normalized files in MNI space" + ) normalization_parameter_file = File( - exists=True, desc=('Transform parameters to MNI ' - 'space')) + exists=True, desc=("Transform parameters to MNI " "space") + ) class DARTELNorm2MNI(SPMCommand): @@ -1946,23 +2142,23 @@ class DARTELNorm2MNI(SPMCommand): input_spec = DARTELNorm2MNIInputSpec output_spec = DARTELNorm2MNIOutputSpec - _jobtype = 'tools' - _jobname = 'dartel' + _jobtype = "tools" + _jobname = "dartel" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt in ['template_file']: + if opt in ["template_file"]: return np.array([val], dtype=object) - elif opt in ['flowfield_files']: + elif opt in ["flowfield_files"]: return scans_for_fnames(val, keep4d=True) - elif opt in ['apply_to_files']: + elif opt in ["apply_to_files"]: return scans_for_fnames(val, keep4d=True, separate_sessions=True) - elif opt == 'voxel_size': + elif opt == "voxel_size": return list(val) - elif opt == 'bounding_box': + elif opt == "bounding_box": return list(val) - elif opt == 'fwhm': + elif opt == "fwhm": if isinstance(val, list): return val else: @@ -1973,18 +2169,18 @@ def _format_arg(self, opt, spec, val): def _list_outputs(self): outputs = self._outputs().get() pth, base, ext = split_filename(self.inputs.template_file) - outputs['normalization_parameter_file'] = os.path.realpath( - base + '_2mni.mat') - outputs['normalized_files'] = [] + outputs["normalization_parameter_file"] = os.path.realpath(base + "_2mni.mat") + outputs["normalized_files"] = [] prefix = "w" if isdefined(self.inputs.modulate) and self.inputs.modulate: - prefix = 'm' + prefix + prefix = "m" + prefix if not isdefined(self.inputs.fwhm) or self.inputs.fwhm > 0: - prefix = 's' + prefix + prefix = "s" + prefix for filename in self.inputs.apply_to_files: pth, base, ext = split_filename(filename) - outputs['normalized_files'].append( - os.path.realpath('%s%s%s' % (prefix, base, ext))) + outputs["normalized_files"].append( + os.path.realpath("%s%s%s" % (prefix, base, ext)) + ) return outputs @@ -1994,31 +2190,33 @@ class CreateWarpedInputSpec(SPMCommandInputSpec): ImageFileSPM(exists=True), mandatory=True, desc="A list of files to be warped", - field='crt_warped.images', - copyfile=False) + field="crt_warped.images", + copyfile=False, + ) flowfield_files = InputMultiPath( ImageFileSPM(exists=True), copyfile=False, desc="DARTEL flow fields u_rc1*", - field='crt_warped.flowfields', - mandatory=True) + field="crt_warped.flowfields", + mandatory=True, + ) iterations = traits.Range( low=0, high=9, - desc=("The number of iterations: log2(number of " - "time steps)"), - field='crt_warped.K') + desc=("The number of iterations: log2(number of " "time steps)"), + field="crt_warped.K", + ) interp = traits.Range( low=0, high=7, - field='crt_warped.interp', - desc='degree of b-spline used for interpolation') - modulate = traits.Bool( - field='crt_warped.jactransf', desc="Modulate images") + field="crt_warped.interp", + desc="degree of b-spline used for interpolation", + ) + modulate = traits.Bool(field="crt_warped.jactransf", desc="Modulate images") class CreateWarpedOutputSpec(TraitedSpec): - warped_files = traits.List(File(exists=True, desc='final warped files')) + warped_files = traits.List(File(exists=True, desc="final warped files")) class CreateWarped(SPMCommand): @@ -2038,45 +2236,41 @@ class CreateWarped(SPMCommand): input_spec = CreateWarpedInputSpec output_spec = CreateWarpedOutputSpec - _jobtype = 'tools' - _jobname = 'dartel' + _jobtype = "tools" + _jobname = "dartel" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt in ['image_files']: + if opt in ["image_files"]: return scans_for_fnames(val, keep4d=True, separate_sessions=True) - if opt in ['flowfield_files']: + if opt in ["flowfield_files"]: return scans_for_fnames(val, keep4d=True) else: return super(CreateWarped, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() - outputs['warped_files'] = [] + outputs["warped_files"] = [] for filename in self.inputs.image_files: pth, base, ext = split_filename(filename) if isdefined(self.inputs.modulate) and self.inputs.modulate: - outputs['warped_files'].append( - os.path.realpath('mw%s%s' % (base, ext))) + outputs["warped_files"].append(os.path.realpath("mw%s%s" % (base, ext))) else: - outputs['warped_files'].append( - os.path.realpath('w%s%s' % (base, ext))) + outputs["warped_files"].append(os.path.realpath("w%s%s" % (base, ext))) return outputs class ApplyDeformationFieldInputSpec(SPMCommandInputSpec): - in_files = InputMultiPath( - ImageFileSPM(exists=True), mandatory=True, field='fnames') - deformation_field = File(exists=True, mandatory=True, field='comp{1}.def') + in_files = InputMultiPath(ImageFileSPM(exists=True), mandatory=True, field="fnames") + deformation_field = File(exists=True, mandatory=True, field="comp{1}.def") reference_volume = ImageFileSPM( - exists=True, mandatory=True, field='comp{2}.id.space') + exists=True, mandatory=True, field="comp{2}.id.space" + ) interp = traits.Range( - low=0, - high=7, - field='interp', - desc='degree of b-spline used for interpolation') + low=0, high=7, field="interp", desc="degree of b-spline used for interpolation" + ) class ApplyDeformationFieldOutputSpec(TraitedSpec): @@ -2087,18 +2281,18 @@ class ApplyDeformations(SPMCommand): input_spec = ApplyDeformationFieldInputSpec output_spec = ApplyDeformationFieldOutputSpec - _jobtype = 'util' - _jobname = 'defs' + _jobtype = "util" + _jobname = "defs" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt in ['deformation_field', 'reference_volume']: + if opt in ["deformation_field", "reference_volume"]: val = [val] - if opt in ['deformation_field']: + if opt in ["deformation_field"]: return scans_for_fnames(val, keep4d=True, separate_sessions=False) - if opt in ['in_files', 'reference_volume']: + if opt in ["in_files", "reference_volume"]: return scans_for_fnames(val, keep4d=False, separate_sessions=False) else: @@ -2106,10 +2300,10 @@ def _format_arg(self, opt, spec, val): def _list_outputs(self): outputs = self._outputs().get() - outputs['out_files'] = [] + outputs["out_files"] = [] for filename in self.inputs.in_files: _, fname = os.path.split(filename) - outputs['out_files'].append(os.path.realpath('w%s' % fname)) + outputs["out_files"].append(os.path.realpath("w%s" % fname)) return outputs @@ -2118,197 +2312,197 @@ class VBMSegmentInputSpec(SPMCommandInputSpec): in_files = InputMultiPath( ImageFileSPM(exists=True), desc="A list of files to be segmented", - field='estwrite.data', + field="estwrite.data", copyfile=False, - mandatory=True) + mandatory=True, + ) tissues = ImageFileSPM( - exists=True, field='estwrite.tpm', desc='tissue probability map') + exists=True, field="estwrite.tpm", desc="tissue probability map" + ) gaussians_per_class = traits.Tuple( (2, 2, 2, 3, 4, 2), *([traits.Int()] * 6), usedefault=True, - desc='number of gaussians for each tissue class') + desc="number of gaussians for each tissue class" + ) bias_regularization = traits.Enum( - 0.0001, (0, 0.00001, 0.0001, 0.001, 0.01, 0.1, 1, 10), - field='estwrite.opts.biasreg', + 0.0001, + (0, 0.00001, 0.0001, 0.001, 0.01, 0.1, 1, 10), + field="estwrite.opts.biasreg", usedefault=True, - desc='no(0) - extremely heavy (10)') + desc="no(0) - extremely heavy (10)", + ) bias_fwhm = traits.Enum( - 60, (30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 'Inf'), - field='estwrite.opts.biasfwhm', + 60, + (30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, "Inf"), + field="estwrite.opts.biasfwhm", usedefault=True, - desc='FWHM of Gaussian smoothness of bias') + desc="FWHM of Gaussian smoothness of bias", + ) sampling_distance = traits.Float( 3, usedefault=True, - field='estwrite.opts.samp', - desc='Sampling distance on data for parameter estimation') + field="estwrite.opts.samp", + desc="Sampling distance on data for parameter estimation", + ) warping_regularization = traits.Float( 4, usedefault=True, - field='estwrite.opts.warpreg', - desc='Controls balance between parameters and data') - - spatial_normalization = traits.Enum( - 'high', - 'low', - usedefault=True, + field="estwrite.opts.warpreg", + desc="Controls balance between parameters and data", ) + + spatial_normalization = traits.Enum("high", "low", usedefault=True,) dartel_template = ImageFileSPM( - exists=True, field='estwrite.extopts.dartelwarp.normhigh.darteltpm') + exists=True, field="estwrite.extopts.dartelwarp.normhigh.darteltpm" + ) use_sanlm_denoising_filter = traits.Range( 0, 2, 2, usedefault=True, - field='estwrite.extopts.sanlm', - desc="0=No denoising, 1=denoising,2=denoising multi-threaded") - mrf_weighting = traits.Float( - 0.15, usedefault=True, field='estwrite.extopts.mrf') + field="estwrite.extopts.sanlm", + desc="0=No denoising, 1=denoising,2=denoising multi-threaded", + ) + mrf_weighting = traits.Float(0.15, usedefault=True, field="estwrite.extopts.mrf") cleanup_partitions = traits.Int( 1, usedefault=True, - field='estwrite.extopts.cleanup', - desc="0=None,1=light,2=thorough") - display_results = traits.Bool( - True, usedefault=True, field='estwrite.extopts.print') - - gm_native = traits.Bool( - False, - usedefault=True, - field='estwrite.output.GM.native', + field="estwrite.extopts.cleanup", + desc="0=None,1=light,2=thorough", ) + display_results = traits.Bool(True, usedefault=True, field="estwrite.extopts.print") + + gm_native = traits.Bool(False, usedefault=True, field="estwrite.output.GM.native",) gm_normalized = traits.Bool( - False, - usedefault=True, - field='estwrite.output.GM.warped', + False, usedefault=True, field="estwrite.output.GM.warped", ) gm_modulated_normalized = traits.Range( 0, 2, 2, usedefault=True, - field='estwrite.output.GM.modulated', - desc='0=none,1=affine+non-linear(SPM8 default),2=non-linear only') + field="estwrite.output.GM.modulated", + desc="0=none,1=affine+non-linear(SPM8 default),2=non-linear only", + ) gm_dartel = traits.Range( 0, 2, 0, usedefault=True, - field='estwrite.output.GM.dartel', - desc="0=None,1=rigid(SPM8 default),2=affine") - - wm_native = traits.Bool( - False, - usedefault=True, - field='estwrite.output.WM.native', + field="estwrite.output.GM.dartel", + desc="0=None,1=rigid(SPM8 default),2=affine", ) + + wm_native = traits.Bool(False, usedefault=True, field="estwrite.output.WM.native",) wm_normalized = traits.Bool( - False, - usedefault=True, - field='estwrite.output.WM.warped', + False, usedefault=True, field="estwrite.output.WM.warped", ) wm_modulated_normalized = traits.Range( 0, 2, 2, usedefault=True, - field='estwrite.output.WM.modulated', - desc='0=none,1=affine+non-linear(SPM8 default),2=non-linear only') + field="estwrite.output.WM.modulated", + desc="0=none,1=affine+non-linear(SPM8 default),2=non-linear only", + ) wm_dartel = traits.Range( 0, 2, 0, usedefault=True, - field='estwrite.output.WM.dartel', - desc="0=None,1=rigid(SPM8 default),2=affine") + field="estwrite.output.WM.dartel", + desc="0=None,1=rigid(SPM8 default),2=affine", + ) csf_native = traits.Bool( - False, - usedefault=True, - field='estwrite.output.CSF.native', + False, usedefault=True, field="estwrite.output.CSF.native", ) csf_normalized = traits.Bool( - False, - usedefault=True, - field='estwrite.output.CSF.warped', + False, usedefault=True, field="estwrite.output.CSF.warped", ) csf_modulated_normalized = traits.Range( 0, 2, 2, usedefault=True, - field='estwrite.output.CSF.modulated', - desc='0=none,1=affine+non-linear(SPM8 default),2=non-linear only') + field="estwrite.output.CSF.modulated", + desc="0=none,1=affine+non-linear(SPM8 default),2=non-linear only", + ) csf_dartel = traits.Range( 0, 2, 0, usedefault=True, - field='estwrite.output.CSF.dartel', - desc="0=None,1=rigid(SPM8 default),2=affine") + field="estwrite.output.CSF.dartel", + desc="0=None,1=rigid(SPM8 default),2=affine", + ) bias_corrected_native = traits.Bool( - False, - usedefault=True, - field='estwrite.output.bias.native', + False, usedefault=True, field="estwrite.output.bias.native", ) bias_corrected_normalized = traits.Bool( - True, - usedefault=True, - field='estwrite.output.bias.warped', + True, usedefault=True, field="estwrite.output.bias.warped", ) bias_corrected_affine = traits.Bool( - False, - usedefault=True, - field='estwrite.output.bias.affine', + False, usedefault=True, field="estwrite.output.bias.affine", ) pve_label_native = traits.Bool( - False, usedefault=True, field='estwrite.output.label.native') + False, usedefault=True, field="estwrite.output.label.native" + ) pve_label_normalized = traits.Bool( - False, usedefault=True, field='estwrite.output.label.warped') + False, usedefault=True, field="estwrite.output.label.warped" + ) pve_label_dartel = traits.Range( 0, 2, 0, usedefault=True, - field='estwrite.output.label.dartel', - desc="0=None,1=rigid(SPM8 default),2=affine") + field="estwrite.output.label.dartel", + desc="0=None,1=rigid(SPM8 default),2=affine", + ) jacobian_determinant = traits.Bool( - False, usedefault=True, field='estwrite.jacobian.warped') + False, usedefault=True, field="estwrite.jacobian.warped" + ) deformation_field = traits.Tuple( (0, 0), traits.Bool, traits.Bool, usedefault=True, - field='estwrite.output.warps', - desc='forward and inverse field') + field="estwrite.output.warps", + desc="forward and inverse field", + ) class VBMSegmentOuputSpec(TraitedSpec): native_class_images = traits.List( - traits.List(File(exists=True)), desc='native space probability maps') + traits.List(File(exists=True)), desc="native space probability maps" + ) dartel_input_images = traits.List( - traits.List(File(exists=True)), desc='dartel imported class images') + traits.List(File(exists=True)), desc="dartel imported class images" + ) normalized_class_images = traits.List( - traits.List(File(exists=True)), desc='normalized class images') + traits.List(File(exists=True)), desc="normalized class images" + ) modulated_class_images = traits.List( - traits.List(File(exists=True)), - desc=('modulated+normalized class ' - 'images')) + traits.List(File(exists=True)), desc=("modulated+normalized class " "images") + ) transformation_mat = OutputMultiPath( - File(exists=True), desc='Normalization transformation') + File(exists=True), desc="Normalization transformation" + ) bias_corrected_images = OutputMultiPath( - File(exists=True), desc='bias corrected images') + File(exists=True), desc="bias corrected images" + ) normalized_bias_corrected_images = OutputMultiPath( - File(exists=True), desc='bias corrected images') + File(exists=True), desc="bias corrected images" + ) pve_label_native_images = OutputMultiPath(File(exists=True)) pve_label_normalized_images = OutputMultiPath(File(exists=True)) @@ -2342,124 +2536,137 @@ class VBMSegment(SPMCommand): input_spec = VBMSegmentInputSpec output_spec = VBMSegmentOuputSpec - _jobtype = 'tools' - _jobname = 'vbm8' + _jobtype = "tools" + _jobname = "vbm8" def _list_outputs(self): outputs = self._outputs().get() do_dartel = self.inputs.spatial_normalization - dartel_px = '' + dartel_px = "" if do_dartel: - dartel_px = 'r' + dartel_px = "r" - outputs['native_class_images'] = [[], [], []] - outputs['dartel_input_images'] = [[], [], []] - outputs['normalized_class_images'] = [[], [], []] - outputs['modulated_class_images'] = [[], [], []] + outputs["native_class_images"] = [[], [], []] + outputs["dartel_input_images"] = [[], [], []] + outputs["normalized_class_images"] = [[], [], []] + outputs["modulated_class_images"] = [[], [], []] - outputs['transformation_mat'] = [] + outputs["transformation_mat"] = [] - outputs['bias_corrected_images'] = [] - outputs['normalized_bias_corrected_images'] = [] + outputs["bias_corrected_images"] = [] + outputs["normalized_bias_corrected_images"] = [] - outputs['inverse_deformation_field'] = [] - outputs['forward_deformation_field'] = [] - outputs['jacobian_determinant_images'] = [] + outputs["inverse_deformation_field"] = [] + outputs["forward_deformation_field"] = [] + outputs["jacobian_determinant_images"] = [] - outputs['pve_label_native_images'] = [] - outputs['pve_label_normalized_images'] = [] - outputs['pve_label_registered_images'] = [] + outputs["pve_label_native_images"] = [] + outputs["pve_label_normalized_images"] = [] + outputs["pve_label_registered_images"] = [] for filename in self.inputs.in_files: pth, base, ext = split_filename(filename) - outputs['transformation_mat'].append( - os.path.join(pth, "%s_seg8.mat" % base)) + outputs["transformation_mat"].append( + os.path.join(pth, "%s_seg8.mat" % base) + ) - for i, tis in enumerate(['gm', 'wm', 'csf']): + for i, tis in enumerate(["gm", "wm", "csf"]): # native space - if getattr(self.inputs, '%s_native' % tis): - outputs['native_class_images'][i].append( - os.path.join(pth, "p%d%s.nii" % (i + 1, base))) - if getattr(self.inputs, '%s_dartel' % tis) == 1: - outputs['dartel_input_images'][i].append( - os.path.join(pth, "rp%d%s.nii" % (i + 1, base))) - elif getattr(self.inputs, '%s_dartel' % tis) == 2: - outputs['dartel_input_images'][i].append( - os.path.join(pth, "rp%d%s_affine.nii" % (i + 1, base))) - - # normalized space - if getattr(self.inputs, '%s_normalized' % tis): - outputs['normalized_class_images'][i].append( - os.path.join(pth, "w%sp%d%s.nii" % (dartel_px, i + 1, - base))) - - if getattr(self.inputs, '%s_modulated_normalized' % tis) == 1: - outputs['modulated_class_images'][i].append( - os.path.join(pth, "mw%sp%d%s.nii" % (dartel_px, i + 1, - base))) - elif getattr(self.inputs, - '%s_modulated_normalized' % tis) == 2: - outputs['normalized_class_images'][i].append( - os.path.join(pth, "m0w%sp%d%s.nii" % (dartel_px, i + 1, - base))) + if getattr(self.inputs, "%s_native" % tis): + outputs["native_class_images"][i].append( + os.path.join(pth, "p%d%s.nii" % (i + 1, base)) + ) + if getattr(self.inputs, "%s_dartel" % tis) == 1: + outputs["dartel_input_images"][i].append( + os.path.join(pth, "rp%d%s.nii" % (i + 1, base)) + ) + elif getattr(self.inputs, "%s_dartel" % tis) == 2: + outputs["dartel_input_images"][i].append( + os.path.join(pth, "rp%d%s_affine.nii" % (i + 1, base)) + ) + + # normalized space + if getattr(self.inputs, "%s_normalized" % tis): + outputs["normalized_class_images"][i].append( + os.path.join(pth, "w%sp%d%s.nii" % (dartel_px, i + 1, base)) + ) + + if getattr(self.inputs, "%s_modulated_normalized" % tis) == 1: + outputs["modulated_class_images"][i].append( + os.path.join(pth, "mw%sp%d%s.nii" % (dartel_px, i + 1, base)) + ) + elif getattr(self.inputs, "%s_modulated_normalized" % tis) == 2: + outputs["normalized_class_images"][i].append( + os.path.join(pth, "m0w%sp%d%s.nii" % (dartel_px, i + 1, base)) + ) if self.inputs.pve_label_native: - outputs['pve_label_native_images'].append( - os.path.join(pth, "p0%s.nii" % (base))) + outputs["pve_label_native_images"].append( + os.path.join(pth, "p0%s.nii" % (base)) + ) if self.inputs.pve_label_normalized: - outputs['pve_label_normalized_images'].append( - os.path.join(pth, "w%sp0%s.nii" % (dartel_px, base))) + outputs["pve_label_normalized_images"].append( + os.path.join(pth, "w%sp0%s.nii" % (dartel_px, base)) + ) if self.inputs.pve_label_dartel == 1: - outputs['pve_label_registered_images'].append( - os.path.join(pth, "rp0%s.nii" % (base))) + outputs["pve_label_registered_images"].append( + os.path.join(pth, "rp0%s.nii" % (base)) + ) elif self.inputs.pve_label_dartel == 2: - outputs['pve_label_registered_images'].append( - os.path.join(pth, "rp0%s_affine.nii" % (base))) + outputs["pve_label_registered_images"].append( + os.path.join(pth, "rp0%s_affine.nii" % (base)) + ) if self.inputs.bias_corrected_native: - outputs['bias_corrected_images'].append( - os.path.join(pth, "m%s.nii" % (base))) + outputs["bias_corrected_images"].append( + os.path.join(pth, "m%s.nii" % (base)) + ) if self.inputs.bias_corrected_normalized: - outputs['normalized_bias_corrected_images'].append( - os.path.join(pth, "wm%s%s.nii" % (dartel_px, base))) + outputs["normalized_bias_corrected_images"].append( + os.path.join(pth, "wm%s%s.nii" % (dartel_px, base)) + ) if self.inputs.deformation_field[0]: - outputs['forward_deformation_field'].append( - os.path.join(pth, "y_%s%s.nii" % (dartel_px, base))) + outputs["forward_deformation_field"].append( + os.path.join(pth, "y_%s%s.nii" % (dartel_px, base)) + ) if self.inputs.deformation_field[1]: - outputs['inverse_deformation_field'].append( - os.path.join(pth, "iy_%s%s.nii" % (dartel_px, base))) + outputs["inverse_deformation_field"].append( + os.path.join(pth, "iy_%s%s.nii" % (dartel_px, base)) + ) if self.inputs.jacobian_determinant and do_dartel: - outputs['jacobian_determinant_images'].append( - os.path.join(pth, "jac_wrp1%s.nii" % (base))) + outputs["jacobian_determinant_images"].append( + os.path.join(pth, "jac_wrp1%s.nii" % (base)) + ) return outputs def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt in ['in_files']: + if opt in ["in_files"]: return scans_for_fnames(val, keep4d=True) - elif opt in ['spatial_normalization']: - if val == 'low': - return {'normlow': []} - elif opt in ['dartel_template']: + elif opt in ["spatial_normalization"]: + if val == "low": + return {"normlow": []} + elif opt in ["dartel_template"]: return np.array([val], dtype=object) - elif opt in ['deformation_field']: + elif opt in ["deformation_field"]: return super(VBMSegment, self)._format_arg( - opt, spec, [int(val[0]), int(val[1])]) + opt, spec, [int(val[0]), int(val[1])] + ) else: return super(VBMSegment, self)._format_arg(opt, spec, val) def _parse_inputs(self): - if self.inputs.spatial_normalization == 'low': + if self.inputs.spatial_normalization == "low": einputs = super(VBMSegment, self)._parse_inputs( - skip=('spatial_normalization', 'dartel_template')) - einputs[0]['estwrite']['extopts']['dartelwarp'] = {'normlow': 1} + skip=("spatial_normalization", "dartel_template") + ) + einputs[0]["estwrite"]["extopts"]["dartelwarp"] = {"normlow": 1} return einputs else: - return super(VBMSegment, - self)._parse_inputs(skip=('spatial_normalization')) + return super(VBMSegment, self)._parse_inputs(skip=("spatial_normalization")) diff --git a/nipype/interfaces/spm/tests/test_auto_Analyze2nii.py b/nipype/interfaces/spm/tests/test_auto_Analyze2nii.py index 92ca0baf27..771cb640b1 100644 --- a/nipype/interfaces/spm/tests/test_auto_Analyze2nii.py +++ b/nipype/interfaces/spm/tests/test_auto_Analyze2nii.py @@ -4,35 +4,28 @@ def test_Analyze2nii_inputs(): input_map = dict( - analyze_file=dict( - extensions=None, - mandatory=True, - ), + analyze_file=dict(extensions=None, mandatory=True,), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict(usedefault=True,), paths=dict(), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), + use_v8struct=dict(min_ver="8", usedefault=True,), ) inputs = Analyze2nii.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Analyze2nii_outputs(): output_map = dict( matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - nifti_file=dict(extensions=None, ), + mfile=dict(usedefault=True,), + nifti_file=dict(extensions=None,), paths=dict(), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), + use_v8struct=dict(min_ver="8", usedefault=True,), ) outputs = Analyze2nii.output_spec() diff --git a/nipype/interfaces/spm/tests/test_auto_ApplyDeformations.py b/nipype/interfaces/spm/tests/test_auto_ApplyDeformations.py index 8d7c371f40..6e741ae607 100644 --- a/nipype/interfaces/spm/tests/test_auto_ApplyDeformations.py +++ b/nipype/interfaces/spm/tests/test_auto_ApplyDeformations.py @@ -4,37 +4,29 @@ def test_ApplyDeformations_inputs(): input_map = dict( - deformation_field=dict( - extensions=None, - field='comp{1}.def', - mandatory=True, - ), - in_files=dict( - field='fnames', - mandatory=True, - ), - interp=dict(field='interp', ), + deformation_field=dict(extensions=None, field="comp{1}.def", mandatory=True,), + in_files=dict(field="fnames", mandatory=True,), + interp=dict(field="interp",), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict(usedefault=True,), paths=dict(), reference_volume=dict( - extensions=['.hdr', '.img', '.img.gz', '.nii'], - field='comp{2}.id.space', + extensions=[".hdr", ".img", ".img.gz", ".nii"], + field="comp{2}.id.space", mandatory=True, ), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), + use_v8struct=dict(min_ver="8", usedefault=True,), ) inputs = ApplyDeformations.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyDeformations_outputs(): - output_map = dict(out_files=dict(), ) + output_map = dict(out_files=dict(),) outputs = ApplyDeformations.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_ApplyInverseDeformation.py b/nipype/interfaces/spm/tests/test_auto_ApplyInverseDeformation.py index 270fc8cb75..b9389091b3 100644 --- a/nipype/interfaces/spm/tests/test_auto_ApplyInverseDeformation.py +++ b/nipype/interfaces/spm/tests/test_auto_ApplyInverseDeformation.py @@ -4,43 +4,34 @@ def test_ApplyInverseDeformation_inputs(): input_map = dict( - bounding_box=dict(field='comp{1}.inv.comp{1}.sn2def.bb', ), + bounding_box=dict(field="comp{1}.inv.comp{1}.sn2def.bb",), deformation=dict( extensions=None, - field='comp{1}.inv.comp{1}.sn2def.matname', - xor=['deformation_field'], + field="comp{1}.inv.comp{1}.sn2def.matname", + xor=["deformation_field"], ), deformation_field=dict( - extensions=None, - field='comp{1}.inv.comp{1}.def', - xor=['deformation'], - ), - in_files=dict( - field='fnames', - mandatory=True, + extensions=None, field="comp{1}.inv.comp{1}.def", xor=["deformation"], ), - interpolation=dict(field='interp', ), + in_files=dict(field="fnames", mandatory=True,), + interpolation=dict(field="interp",), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict(usedefault=True,), paths=dict(), - target=dict( - extensions=None, - field='comp{1}.inv.space', - ), + target=dict(extensions=None, field="comp{1}.inv.space",), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), - voxel_sizes=dict(field='comp{1}.inv.comp{1}.sn2def.vox', ), + use_v8struct=dict(min_ver="8", usedefault=True,), + voxel_sizes=dict(field="comp{1}.inv.comp{1}.sn2def.vox",), ) inputs = ApplyInverseDeformation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyInverseDeformation_outputs(): - output_map = dict(out_files=dict(), ) + output_map = dict(out_files=dict(),) outputs = ApplyInverseDeformation.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_ApplyTransform.py b/nipype/interfaces/spm/tests/test_auto_ApplyTransform.py index be07ee26ce..99f140ecac 100644 --- a/nipype/interfaces/spm/tests/test_auto_ApplyTransform.py +++ b/nipype/interfaces/spm/tests/test_auto_ApplyTransform.py @@ -4,35 +4,24 @@ def test_ApplyTransform_inputs(): input_map = dict( - in_file=dict( - copyfile=True, - extensions=None, - mandatory=True, - ), - mat=dict( - extensions=None, - mandatory=True, - ), + in_file=dict(copyfile=True, extensions=None, mandatory=True,), + mat=dict(extensions=None, mandatory=True,), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - out_file=dict( - extensions=None, - genfile=True, - ), + mfile=dict(usedefault=True,), + out_file=dict(extensions=None, genfile=True,), paths=dict(), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), + use_v8struct=dict(min_ver="8", usedefault=True,), ) inputs = ApplyTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ApplyTransform_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ApplyTransform.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_CalcCoregAffine.py b/nipype/interfaces/spm/tests/test_auto_CalcCoregAffine.py index a4f7b5b516..3c67fe75c6 100644 --- a/nipype/interfaces/spm/tests/test_auto_CalcCoregAffine.py +++ b/nipype/interfaces/spm/tests/test_auto_CalcCoregAffine.py @@ -4,36 +4,25 @@ def test_CalcCoregAffine_inputs(): input_map = dict( - invmat=dict(extensions=None, ), - mat=dict(extensions=None, ), + invmat=dict(extensions=None,), + mat=dict(extensions=None,), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - moving=dict( - copyfile=False, - extensions=None, - mandatory=True, - ), + mfile=dict(usedefault=True,), + moving=dict(copyfile=False, extensions=None, mandatory=True,), paths=dict(), - target=dict( - extensions=None, - mandatory=True, - ), + target=dict(extensions=None, mandatory=True,), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), + use_v8struct=dict(min_ver="8", usedefault=True,), ) inputs = CalcCoregAffine.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CalcCoregAffine_outputs(): - output_map = dict( - invmat=dict(extensions=None, ), - mat=dict(extensions=None, ), - ) + output_map = dict(invmat=dict(extensions=None,), mat=dict(extensions=None,),) outputs = CalcCoregAffine.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_Coregister.py b/nipype/interfaces/spm/tests/test_auto_Coregister.py index 276f02c47a..04e39ce23b 100644 --- a/nipype/interfaces/spm/tests/test_auto_Coregister.py +++ b/nipype/interfaces/spm/tests/test_auto_Coregister.py @@ -4,52 +4,38 @@ def test_Coregister_inputs(): input_map = dict( - apply_to_files=dict( - copyfile=True, - field='other', - ), - cost_function=dict(field='eoptions.cost_fun', ), - fwhm=dict(field='eoptions.fwhm', ), - jobtype=dict(usedefault=True, ), + apply_to_files=dict(copyfile=True, field="other",), + cost_function=dict(field="eoptions.cost_fun",), + fwhm=dict(field="eoptions.fwhm",), + jobtype=dict(usedefault=True,), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - out_prefix=dict( - field='roptions.prefix', - usedefault=True, - ), + mfile=dict(usedefault=True,), + out_prefix=dict(field="roptions.prefix", usedefault=True,), paths=dict(), - separation=dict(field='eoptions.sep', ), - source=dict( - copyfile=True, - field='source', - mandatory=True, - ), + separation=dict(field="eoptions.sep",), + source=dict(copyfile=True, field="source", mandatory=True,), target=dict( copyfile=False, - extensions=['.hdr', '.img', '.img.gz', '.nii'], - field='ref', + extensions=[".hdr", ".img", ".img.gz", ".nii"], + field="ref", mandatory=True, ), - tolerance=dict(field='eoptions.tol', ), + tolerance=dict(field="eoptions.tol",), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), - write_interp=dict(field='roptions.interp', ), - write_mask=dict(field='roptions.mask', ), - write_wrap=dict(field='roptions.wrap', ), + use_v8struct=dict(min_ver="8", usedefault=True,), + write_interp=dict(field="roptions.interp",), + write_mask=dict(field="roptions.mask",), + write_wrap=dict(field="roptions.wrap",), ) inputs = Coregister.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Coregister_outputs(): - output_map = dict( - coregistered_files=dict(), - coregistered_source=dict(), - ) + output_map = dict(coregistered_files=dict(), coregistered_source=dict(),) outputs = Coregister.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_CreateWarped.py b/nipype/interfaces/spm/tests/test_auto_CreateWarped.py index b25e377ad3..b172972fe7 100644 --- a/nipype/interfaces/spm/tests/test_auto_CreateWarped.py +++ b/nipype/interfaces/spm/tests/test_auto_CreateWarped.py @@ -5,34 +5,27 @@ def test_CreateWarped_inputs(): input_map = dict( flowfield_files=dict( - copyfile=False, - field='crt_warped.flowfields', - mandatory=True, + copyfile=False, field="crt_warped.flowfields", mandatory=True, ), - image_files=dict( - copyfile=False, - field='crt_warped.images', - mandatory=True, - ), - interp=dict(field='crt_warped.interp', ), - iterations=dict(field='crt_warped.K', ), + image_files=dict(copyfile=False, field="crt_warped.images", mandatory=True,), + interp=dict(field="crt_warped.interp",), + iterations=dict(field="crt_warped.K",), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - modulate=dict(field='crt_warped.jactransf', ), + mfile=dict(usedefault=True,), + modulate=dict(field="crt_warped.jactransf",), paths=dict(), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), + use_v8struct=dict(min_ver="8", usedefault=True,), ) inputs = CreateWarped.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CreateWarped_outputs(): - output_map = dict(warped_files=dict(), ) + output_map = dict(warped_files=dict(),) outputs = CreateWarped.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_DARTEL.py b/nipype/interfaces/spm/tests/test_auto_DARTEL.py index de7eb7e158..ca031dfd1e 100644 --- a/nipype/interfaces/spm/tests/test_auto_DARTEL.py +++ b/nipype/interfaces/spm/tests/test_auto_DARTEL.py @@ -4,36 +4,28 @@ def test_DARTEL_inputs(): input_map = dict( - image_files=dict( - copyfile=False, - field='warp.images', - mandatory=True, - ), - iteration_parameters=dict(field='warp.settings.param', ), + image_files=dict(copyfile=False, field="warp.images", mandatory=True,), + iteration_parameters=dict(field="warp.settings.param",), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - optimization_parameters=dict(field='warp.settings.optim', ), + mfile=dict(usedefault=True,), + optimization_parameters=dict(field="warp.settings.optim",), paths=dict(), - regularization_form=dict(field='warp.settings.rform', ), - template_prefix=dict( - field='warp.settings.template', - usedefault=True, - ), + regularization_form=dict(field="warp.settings.rform",), + template_prefix=dict(field="warp.settings.template", usedefault=True,), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), + use_v8struct=dict(min_ver="8", usedefault=True,), ) inputs = DARTEL.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DARTEL_outputs(): output_map = dict( dartel_flow_fields=dict(), - final_template_file=dict(extensions=None, ), + final_template_file=dict(extensions=None,), template_files=dict(), ) outputs = DARTEL.output_spec() diff --git a/nipype/interfaces/spm/tests/test_auto_DARTELNorm2MNI.py b/nipype/interfaces/spm/tests/test_auto_DARTELNorm2MNI.py index 9870b2b55b..3b406b3c27 100644 --- a/nipype/interfaces/spm/tests/test_auto_DARTELNorm2MNI.py +++ b/nipype/interfaces/spm/tests/test_auto_DARTELNorm2MNI.py @@ -5,42 +5,35 @@ def test_DARTELNorm2MNI_inputs(): input_map = dict( apply_to_files=dict( - copyfile=False, - field='mni_norm.data.subjs.images', - mandatory=True, - ), - bounding_box=dict(field='mni_norm.bb', ), - flowfield_files=dict( - field='mni_norm.data.subjs.flowfields', - mandatory=True, + copyfile=False, field="mni_norm.data.subjs.images", mandatory=True, ), - fwhm=dict(field='mni_norm.fwhm', ), + bounding_box=dict(field="mni_norm.bb",), + flowfield_files=dict(field="mni_norm.data.subjs.flowfields", mandatory=True,), + fwhm=dict(field="mni_norm.fwhm",), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - modulate=dict(field='mni_norm.preserve', ), + mfile=dict(usedefault=True,), + modulate=dict(field="mni_norm.preserve",), paths=dict(), template_file=dict( copyfile=False, - extensions=['.hdr', '.img', '.img.gz', '.nii'], - field='mni_norm.template', + extensions=[".hdr", ".img", ".img.gz", ".nii"], + field="mni_norm.template", mandatory=True, ), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), - voxel_size=dict(field='mni_norm.vox', ), + use_v8struct=dict(min_ver="8", usedefault=True,), + voxel_size=dict(field="mni_norm.vox",), ) inputs = DARTELNorm2MNI.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DARTELNorm2MNI_outputs(): output_map = dict( - normalization_parameter_file=dict(extensions=None, ), - normalized_files=dict(), + normalization_parameter_file=dict(extensions=None,), normalized_files=dict(), ) outputs = DARTELNorm2MNI.output_spec() diff --git a/nipype/interfaces/spm/tests/test_auto_DicomImport.py b/nipype/interfaces/spm/tests/test_auto_DicomImport.py index 833794b628..ddb3f81c78 100644 --- a/nipype/interfaces/spm/tests/test_auto_DicomImport.py +++ b/nipype/interfaces/spm/tests/test_auto_DicomImport.py @@ -4,42 +4,26 @@ def test_DicomImport_inputs(): input_map = dict( - format=dict( - field='convopts.format', - usedefault=True, - ), - icedims=dict( - field='convopts.icedims', - usedefault=True, - ), - in_files=dict( - field='data', - mandatory=True, - ), + format=dict(field="convopts.format", usedefault=True,), + icedims=dict(field="convopts.icedims", usedefault=True,), + in_files=dict(field="data", mandatory=True,), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - output_dir=dict( - field='outdir', - usedefault=True, - ), - output_dir_struct=dict( - field='root', - usedefault=True, - ), + mfile=dict(usedefault=True,), + output_dir=dict(field="outdir", usedefault=True,), + output_dir_struct=dict(field="root", usedefault=True,), paths=dict(), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), + use_v8struct=dict(min_ver="8", usedefault=True,), ) inputs = DicomImport.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DicomImport_outputs(): - output_map = dict(out_files=dict(), ) + output_map = dict(out_files=dict(),) outputs = DicomImport.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_EstimateContrast.py b/nipype/interfaces/spm/tests/test_auto_EstimateContrast.py index cc2d50f5a7..323cb3707c 100644 --- a/nipype/interfaces/spm/tests/test_auto_EstimateContrast.py +++ b/nipype/interfaces/spm/tests/test_auto_EstimateContrast.py @@ -4,45 +4,34 @@ def test_EstimateContrast_inputs(): input_map = dict( - beta_images=dict( - copyfile=False, - mandatory=True, - ), - contrasts=dict(mandatory=True, ), - group_contrast=dict(xor=['use_derivs'], ), + beta_images=dict(copyfile=False, mandatory=True,), + contrasts=dict(mandatory=True,), + group_contrast=dict(xor=["use_derivs"],), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict(usedefault=True,), paths=dict(), - residual_image=dict( - copyfile=False, - extensions=None, - mandatory=True, - ), + residual_image=dict(copyfile=False, extensions=None, mandatory=True,), spm_mat_file=dict( - copyfile=True, - extensions=None, - field='spmmat', - mandatory=True, + copyfile=True, extensions=None, field="spmmat", mandatory=True, ), - use_derivs=dict(xor=['group_contrast'], ), + use_derivs=dict(xor=["group_contrast"],), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), + use_v8struct=dict(min_ver="8", usedefault=True,), ) inputs = EstimateContrast.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EstimateContrast_outputs(): output_map = dict( con_images=dict(), ess_images=dict(), spmF_images=dict(), spmT_images=dict(), - spm_mat_file=dict(extensions=None, ), + spm_mat_file=dict(extensions=None,), ) outputs = EstimateContrast.output_spec() diff --git a/nipype/interfaces/spm/tests/test_auto_EstimateModel.py b/nipype/interfaces/spm/tests/test_auto_EstimateModel.py index 9893f2bfcb..5e2b25e0c7 100644 --- a/nipype/interfaces/spm/tests/test_auto_EstimateModel.py +++ b/nipype/interfaces/spm/tests/test_auto_EstimateModel.py @@ -4,45 +4,38 @@ def test_EstimateModel_inputs(): input_map = dict( - estimation_method=dict( - field='method', - mandatory=True, - ), + estimation_method=dict(field="method", mandatory=True,), flags=dict(), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict(usedefault=True,), paths=dict(), spm_mat_file=dict( - copyfile=True, - extensions=None, - field='spmmat', - mandatory=True, + copyfile=True, extensions=None, field="spmmat", mandatory=True, ), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), - write_residuals=dict(field='write_residuals', ), + use_v8struct=dict(min_ver="8", usedefault=True,), + write_residuals=dict(field="write_residuals",), ) inputs = EstimateModel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_EstimateModel_outputs(): output_map = dict( ARcoef=dict(), Cbetas=dict(), - RPVimage=dict(extensions=['.hdr', '.img', '.img.gz', '.nii'], ), + RPVimage=dict(extensions=[".hdr", ".img", ".img.gz", ".nii"],), SDbetas=dict(), SDerror=dict(), beta_images=dict(), - labels=dict(extensions=['.hdr', '.img', '.img.gz', '.nii'], ), - mask_image=dict(extensions=['.hdr', '.img', '.img.gz', '.nii'], ), - residual_image=dict(extensions=['.hdr', '.img', '.img.gz', '.nii'], ), + labels=dict(extensions=[".hdr", ".img", ".img.gz", ".nii"],), + mask_image=dict(extensions=[".hdr", ".img", ".img.gz", ".nii"],), + residual_image=dict(extensions=[".hdr", ".img", ".img.gz", ".nii"],), residual_images=dict(), - spm_mat_file=dict(extensions=None, ), + spm_mat_file=dict(extensions=None,), ) outputs = EstimateModel.output_spec() diff --git a/nipype/interfaces/spm/tests/test_auto_FactorialDesign.py b/nipype/interfaces/spm/tests/test_auto_FactorialDesign.py index 814cb173fb..0fb35c010f 100644 --- a/nipype/interfaces/spm/tests/test_auto_FactorialDesign.py +++ b/nipype/interfaces/spm/tests/test_auto_FactorialDesign.py @@ -4,55 +4,49 @@ def test_FactorialDesign_inputs(): input_map = dict( - covariates=dict(field='cov', ), - explicit_mask_file=dict( - extensions=None, - field='masking.em', - ), + covariates=dict(field="cov",), + explicit_mask_file=dict(extensions=None, field="masking.em",), global_calc_mean=dict( - field='globalc.g_mean', - xor=['global_calc_omit', 'global_calc_values'], + field="globalc.g_mean", xor=["global_calc_omit", "global_calc_values"], ), global_calc_omit=dict( - field='globalc.g_omit', - xor=['global_calc_mean', 'global_calc_values'], + field="globalc.g_omit", xor=["global_calc_mean", "global_calc_values"], ), global_calc_values=dict( - field='globalc.g_user.global_uval', - xor=['global_calc_mean', 'global_calc_omit'], + field="globalc.g_user.global_uval", + xor=["global_calc_mean", "global_calc_omit"], ), - global_normalization=dict(field='globalm.glonorm', ), + global_normalization=dict(field="globalm.glonorm",), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - no_grand_mean_scaling=dict(field='globalm.gmsca.gmsca_no', ), + mfile=dict(usedefault=True,), + no_grand_mean_scaling=dict(field="globalm.gmsca.gmsca_no",), paths=dict(), - spm_mat_dir=dict(field='dir', ), + spm_mat_dir=dict(field="dir",), threshold_mask_absolute=dict( - field='masking.tm.tma.athresh', - xor=['threshold_mask_none', 'threshold_mask_relative'], + field="masking.tm.tma.athresh", + xor=["threshold_mask_none", "threshold_mask_relative"], ), threshold_mask_none=dict( - field='masking.tm.tm_none', - xor=['threshold_mask_absolute', 'threshold_mask_relative'], + field="masking.tm.tm_none", + xor=["threshold_mask_absolute", "threshold_mask_relative"], ), threshold_mask_relative=dict( - field='masking.tm.tmr.rthresh', - xor=['threshold_mask_absolute', 'threshold_mask_none'], + field="masking.tm.tmr.rthresh", + xor=["threshold_mask_absolute", "threshold_mask_none"], ), - use_implicit_threshold=dict(field='masking.im', ), + use_implicit_threshold=dict(field="masking.im",), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), + use_v8struct=dict(min_ver="8", usedefault=True,), ) inputs = FactorialDesign.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FactorialDesign_outputs(): - output_map = dict(spm_mat_file=dict(extensions=None, ), ) + output_map = dict(spm_mat_file=dict(extensions=None,),) outputs = FactorialDesign.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_FieldMap.py b/nipype/interfaces/spm/tests/test_auto_FieldMap.py index ad0375e7c8..19d0bc3e19 100644 --- a/nipype/interfaces/spm/tests/test_auto_FieldMap.py +++ b/nipype/interfaces/spm/tests/test_auto_FieldMap.py @@ -4,127 +4,70 @@ def test_FieldMap_inputs(): input_map = dict( - anat_file=dict( - copyfile=False, - extensions=None, - field='subj.anat', - ), - blip_direction=dict( - field='subj.defaults.defaultsval.blipdir', - mandatory=True, - ), - echo_times=dict( - field='subj.defaults.defaultsval.et', - mandatory=True, - ), + anat_file=dict(copyfile=False, extensions=None, field="subj.anat",), + blip_direction=dict(field="subj.defaults.defaultsval.blipdir", mandatory=True,), + echo_times=dict(field="subj.defaults.defaultsval.et", mandatory=True,), epi_file=dict( - copyfile=False, - extensions=None, - field='subj.session.epi', - mandatory=True, - ), - epifm=dict( - field='subj.defaults.defaultsval.epifm', - usedefault=True, + copyfile=False, extensions=None, field="subj.session.epi", mandatory=True, ), + epifm=dict(field="subj.defaults.defaultsval.epifm", usedefault=True,), jacobian_modulation=dict( - field='subj.defaults.defaultsval.ajm', - usedefault=True, + field="subj.defaults.defaultsval.ajm", usedefault=True, ), - jobtype=dict(usedefault=True, ), + jobtype=dict(usedefault=True,), magnitude_file=dict( copyfile=False, extensions=None, - field='subj.data.presubphasemag.magnitude', + field="subj.data.presubphasemag.magnitude", mandatory=True, ), - mask_fwhm=dict( - field='subj.defaults.defaultsval.mflags.fwhm', - usedefault=True, - ), - maskbrain=dict( - field='subj.defaults.defaultsval.maskbrain', - usedefault=True, - ), - matchanat=dict( - field='subj.matchanat', - usedefault=True, - ), - matchvdm=dict( - field='subj.matchvdm', - usedefault=True, - ), + mask_fwhm=dict(field="subj.defaults.defaultsval.mflags.fwhm", usedefault=True,), + maskbrain=dict(field="subj.defaults.defaultsval.maskbrain", usedefault=True,), + matchanat=dict(field="subj.matchanat", usedefault=True,), + matchvdm=dict(field="subj.matchvdm", usedefault=True,), matlab_cmd=dict(), - method=dict( - field='subj.defaults.defaultsval.uflags.method', - usedefault=True, - ), - mfile=dict(usedefault=True, ), + method=dict(field="subj.defaults.defaultsval.uflags.method", usedefault=True,), + mfile=dict(usedefault=True,), ndilate=dict( - field='subj.defaults.defaultsval.mflags.ndilate', - usedefault=True, - ), - nerode=dict( - field='subj.defaults.defaultsval.mflags.nerode', - usedefault=True, - ), - pad=dict( - field='subj.defaults.defaultsval.uflags.pad', - usedefault=True, + field="subj.defaults.defaultsval.mflags.ndilate", usedefault=True, ), + nerode=dict(field="subj.defaults.defaultsval.mflags.nerode", usedefault=True,), + pad=dict(field="subj.defaults.defaultsval.uflags.pad", usedefault=True,), paths=dict(), phase_file=dict( copyfile=False, extensions=None, - field='subj.data.presubphasemag.phase', + field="subj.data.presubphasemag.phase", mandatory=True, ), - reg=dict( - field='subj.defaults.defaultsval.mflags.reg', - usedefault=True, - ), - sessname=dict( - field='subj.sessname', - usedefault=True, - ), + reg=dict(field="subj.defaults.defaultsval.mflags.reg", usedefault=True,), + sessname=dict(field="subj.sessname", usedefault=True,), template=dict( copyfile=False, extensions=None, - field='subj.defaults.defaultsval.mflags.template', - ), - thresh=dict( - field='subj.defaults.defaultsval.mflags.thresh', - usedefault=True, + field="subj.defaults.defaultsval.mflags.template", ), + thresh=dict(field="subj.defaults.defaultsval.mflags.thresh", usedefault=True,), total_readout_time=dict( - field='subj.defaults.defaultsval.tert', - mandatory=True, + field="subj.defaults.defaultsval.tert", mandatory=True, ), unwarp_fwhm=dict( - field='subj.defaults.defaultsval.uflags.fwhm', - usedefault=True, + field="subj.defaults.defaultsval.uflags.fwhm", usedefault=True, ), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), - writeunwarped=dict( - field='subj.writeunwarped', - usedefault=True, - ), - ws=dict( - field='subj.defaults.defaultsval.uflags.ws', - usedefault=True, - ), + use_v8struct=dict(min_ver="8", usedefault=True,), + writeunwarped=dict(field="subj.writeunwarped", usedefault=True,), + ws=dict(field="subj.defaults.defaultsval.uflags.ws", usedefault=True,), ) inputs = FieldMap.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FieldMap_outputs(): - output_map = dict(vdm=dict(extensions=None, ), ) + output_map = dict(vdm=dict(extensions=None,),) outputs = FieldMap.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_Level1Design.py b/nipype/interfaces/spm/tests/test_auto_Level1Design.py index 6d941dbec9..8a57a7b86e 100644 --- a/nipype/interfaces/spm/tests/test_auto_Level1Design.py +++ b/nipype/interfaces/spm/tests/test_auto_Level1Design.py @@ -4,51 +4,35 @@ def test_Level1Design_inputs(): input_map = dict( - bases=dict( - field='bases', - mandatory=True, - ), - factor_info=dict(field='fact', ), + bases=dict(field="bases", mandatory=True,), + factor_info=dict(field="fact",), flags=dict(), - global_intensity_normalization=dict(field='global', ), - interscan_interval=dict( - field='timing.RT', - mandatory=True, - ), - mask_image=dict( - extensions=None, - field='mask', - ), - mask_threshold=dict(usedefault=True, ), + global_intensity_normalization=dict(field="global",), + interscan_interval=dict(field="timing.RT", mandatory=True,), + mask_image=dict(extensions=None, field="mask",), + mask_threshold=dict(usedefault=True,), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - microtime_onset=dict(field='timing.fmri_t0', ), - microtime_resolution=dict(field='timing.fmri_t', ), - model_serial_correlations=dict(field='cvi', ), + mfile=dict(usedefault=True,), + microtime_onset=dict(field="timing.fmri_t0",), + microtime_resolution=dict(field="timing.fmri_t",), + model_serial_correlations=dict(field="cvi",), paths=dict(), - session_info=dict( - field='sess', - mandatory=True, - ), - spm_mat_dir=dict(field='dir', ), - timing_units=dict( - field='timing.units', - mandatory=True, - ), + session_info=dict(field="sess", mandatory=True,), + spm_mat_dir=dict(field="dir",), + timing_units=dict(field="timing.units", mandatory=True,), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), - volterra_expansion_order=dict(field='volt', ), + use_v8struct=dict(min_ver="8", usedefault=True,), + volterra_expansion_order=dict(field="volt",), ) inputs = Level1Design.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Level1Design_outputs(): - output_map = dict(spm_mat_file=dict(extensions=None, ), ) + output_map = dict(spm_mat_file=dict(extensions=None,),) outputs = Level1Design.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_MultipleRegressionDesign.py b/nipype/interfaces/spm/tests/test_auto_MultipleRegressionDesign.py index 1fdc1d1ff1..478c869474 100644 --- a/nipype/interfaces/spm/tests/test_auto_MultipleRegressionDesign.py +++ b/nipype/interfaces/spm/tests/test_auto_MultipleRegressionDesign.py @@ -4,64 +4,52 @@ def test_MultipleRegressionDesign_inputs(): input_map = dict( - covariates=dict(field='cov', ), - explicit_mask_file=dict( - extensions=None, - field='masking.em', - ), + covariates=dict(field="cov",), + explicit_mask_file=dict(extensions=None, field="masking.em",), global_calc_mean=dict( - field='globalc.g_mean', - xor=['global_calc_omit', 'global_calc_values'], + field="globalc.g_mean", xor=["global_calc_omit", "global_calc_values"], ), global_calc_omit=dict( - field='globalc.g_omit', - xor=['global_calc_mean', 'global_calc_values'], + field="globalc.g_omit", xor=["global_calc_mean", "global_calc_values"], ), global_calc_values=dict( - field='globalc.g_user.global_uval', - xor=['global_calc_mean', 'global_calc_omit'], - ), - global_normalization=dict(field='globalm.glonorm', ), - in_files=dict( - field='des.mreg.scans', - mandatory=True, - ), - include_intercept=dict( - field='des.mreg.incint', - usedefault=True, + field="globalc.g_user.global_uval", + xor=["global_calc_mean", "global_calc_omit"], ), + global_normalization=dict(field="globalm.glonorm",), + in_files=dict(field="des.mreg.scans", mandatory=True,), + include_intercept=dict(field="des.mreg.incint", usedefault=True,), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - no_grand_mean_scaling=dict(field='globalm.gmsca.gmsca_no', ), + mfile=dict(usedefault=True,), + no_grand_mean_scaling=dict(field="globalm.gmsca.gmsca_no",), paths=dict(), - spm_mat_dir=dict(field='dir', ), + spm_mat_dir=dict(field="dir",), threshold_mask_absolute=dict( - field='masking.tm.tma.athresh', - xor=['threshold_mask_none', 'threshold_mask_relative'], + field="masking.tm.tma.athresh", + xor=["threshold_mask_none", "threshold_mask_relative"], ), threshold_mask_none=dict( - field='masking.tm.tm_none', - xor=['threshold_mask_absolute', 'threshold_mask_relative'], + field="masking.tm.tm_none", + xor=["threshold_mask_absolute", "threshold_mask_relative"], ), threshold_mask_relative=dict( - field='masking.tm.tmr.rthresh', - xor=['threshold_mask_absolute', 'threshold_mask_none'], + field="masking.tm.tmr.rthresh", + xor=["threshold_mask_absolute", "threshold_mask_none"], ), - use_implicit_threshold=dict(field='masking.im', ), + use_implicit_threshold=dict(field="masking.im",), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), - user_covariates=dict(field='des.mreg.mcov', ), + use_v8struct=dict(min_ver="8", usedefault=True,), + user_covariates=dict(field="des.mreg.mcov",), ) inputs = MultipleRegressionDesign.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MultipleRegressionDesign_outputs(): - output_map = dict(spm_mat_file=dict(extensions=None, ), ) + output_map = dict(spm_mat_file=dict(extensions=None,),) outputs = MultipleRegressionDesign.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_NewSegment.py b/nipype/interfaces/spm/tests/test_auto_NewSegment.py index 505296bbf4..e05643b92e 100644 --- a/nipype/interfaces/spm/tests/test_auto_NewSegment.py +++ b/nipype/interfaces/spm/tests/test_auto_NewSegment.py @@ -4,31 +4,26 @@ def test_NewSegment_inputs(): input_map = dict( - affine_regularization=dict(field='warp.affreg', ), - channel_files=dict( - copyfile=False, - field='channel', - mandatory=True, - ), - channel_info=dict(field='channel', ), + affine_regularization=dict(field="warp.affreg",), + channel_files=dict(copyfile=False, field="channel", mandatory=True,), + channel_info=dict(field="channel",), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict(usedefault=True,), paths=dict(), - sampling_distance=dict(field='warp.samp', ), - tissues=dict(field='tissue', ), + sampling_distance=dict(field="warp.samp",), + tissues=dict(field="tissue",), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), - warping_regularization=dict(field='warp.reg', ), - write_deformation_fields=dict(field='warp.write', ), + use_v8struct=dict(min_ver="8", usedefault=True,), + warping_regularization=dict(field="warp.reg",), + write_deformation_fields=dict(field="warp.write",), ) inputs = NewSegment.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_NewSegment_outputs(): output_map = dict( bias_corrected_images=dict(), diff --git a/nipype/interfaces/spm/tests/test_auto_Normalize.py b/nipype/interfaces/spm/tests/test_auto_Normalize.py index dfa33b5106..e028c609c9 100644 --- a/nipype/interfaces/spm/tests/test_auto_Normalize.py +++ b/nipype/interfaces/spm/tests/test_auto_Normalize.py @@ -4,70 +4,52 @@ def test_Normalize_inputs(): input_map = dict( - DCT_period_cutoff=dict(field='eoptions.cutoff', ), - affine_regularization_type=dict(field='eoptions.regtype', ), - apply_to_files=dict( - copyfile=True, - field='subj.resample', - ), - jobtype=dict(usedefault=True, ), + DCT_period_cutoff=dict(field="eoptions.cutoff",), + affine_regularization_type=dict(field="eoptions.regtype",), + apply_to_files=dict(copyfile=True, field="subj.resample",), + jobtype=dict(usedefault=True,), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - nonlinear_iterations=dict(field='eoptions.nits', ), - nonlinear_regularization=dict(field='eoptions.reg', ), - out_prefix=dict( - field='roptions.prefix', - usedefault=True, - ), + mfile=dict(usedefault=True,), + nonlinear_iterations=dict(field="eoptions.nits",), + nonlinear_regularization=dict(field="eoptions.reg",), + out_prefix=dict(field="roptions.prefix", usedefault=True,), parameter_file=dict( copyfile=False, extensions=None, - field='subj.matname', + field="subj.matname", mandatory=True, - xor=['source', 'template'], + xor=["source", "template"], ), paths=dict(), source=dict( - copyfile=True, - field='subj.source', - mandatory=True, - xor=['parameter_file'], - ), - source_image_smoothing=dict(field='eoptions.smosrc', ), - source_weight=dict( - copyfile=False, - extensions=None, - field='subj.wtsrc', + copyfile=True, field="subj.source", mandatory=True, xor=["parameter_file"], ), + source_image_smoothing=dict(field="eoptions.smosrc",), + source_weight=dict(copyfile=False, extensions=None, field="subj.wtsrc",), template=dict( copyfile=False, extensions=None, - field='eoptions.template', + field="eoptions.template", mandatory=True, - xor=['parameter_file'], - ), - template_image_smoothing=dict(field='eoptions.smoref', ), - template_weight=dict( - copyfile=False, - extensions=None, - field='eoptions.weight', + xor=["parameter_file"], ), + template_image_smoothing=dict(field="eoptions.smoref",), + template_weight=dict(copyfile=False, extensions=None, field="eoptions.weight",), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), - write_bounding_box=dict(field='roptions.bb', ), - write_interp=dict(field='roptions.interp', ), - write_preserve=dict(field='roptions.preserve', ), - write_voxel_sizes=dict(field='roptions.vox', ), - write_wrap=dict(field='roptions.wrap', ), + use_v8struct=dict(min_ver="8", usedefault=True,), + write_bounding_box=dict(field="roptions.bb",), + write_interp=dict(field="roptions.interp",), + write_preserve=dict(field="roptions.preserve",), + write_voxel_sizes=dict(field="roptions.vox",), + write_wrap=dict(field="roptions.wrap",), ) inputs = Normalize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Normalize_outputs(): output_map = dict( normalization_parameters=dict(), diff --git a/nipype/interfaces/spm/tests/test_auto_Normalize12.py b/nipype/interfaces/spm/tests/test_auto_Normalize12.py index 74abdb1d60..b64475eab3 100644 --- a/nipype/interfaces/spm/tests/test_auto_Normalize12.py +++ b/nipype/interfaces/spm/tests/test_auto_Normalize12.py @@ -4,63 +4,54 @@ def test_Normalize12_inputs(): input_map = dict( - affine_regularization_type=dict(field='eoptions.affreg', ), - apply_to_files=dict( - copyfile=True, - field='subj.resample', - ), - bias_fwhm=dict(field='eoptions.biasfwhm', ), - bias_regularization=dict(field='eoptions.biasreg', ), + affine_regularization_type=dict(field="eoptions.affreg",), + apply_to_files=dict(copyfile=True, field="subj.resample",), + bias_fwhm=dict(field="eoptions.biasfwhm",), + bias_regularization=dict(field="eoptions.biasreg",), deformation_file=dict( copyfile=False, - extensions=['.hdr', '.img', '.img.gz', '.nii'], - field='subj.def', + extensions=[".hdr", ".img", ".img.gz", ".nii"], + field="subj.def", mandatory=True, - xor=['image_to_align', 'tpm'], + xor=["image_to_align", "tpm"], ), image_to_align=dict( copyfile=True, - extensions=['.hdr', '.img', '.img.gz', '.nii'], - field='subj.vol', + extensions=[".hdr", ".img", ".img.gz", ".nii"], + field="subj.vol", mandatory=True, - xor=['deformation_file'], + xor=["deformation_file"], ), - jobtype=dict(usedefault=True, ), + jobtype=dict(usedefault=True,), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - out_prefix=dict( - field='woptions.prefix', - usedefault=True, - ), + mfile=dict(usedefault=True,), + out_prefix=dict(field="woptions.prefix", usedefault=True,), paths=dict(), - sampling_distance=dict(field='eoptions.samp', ), - smoothness=dict(field='eoptions.fwhm', ), + sampling_distance=dict(field="eoptions.samp",), + smoothness=dict(field="eoptions.fwhm",), tpm=dict( copyfile=False, extensions=None, - field='eoptions.tpm', - xor=['deformation_file'], + field="eoptions.tpm", + xor=["deformation_file"], ), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), - warping_regularization=dict(field='eoptions.reg', ), - write_bounding_box=dict(field='woptions.bb', ), - write_interp=dict(field='woptions.interp', ), - write_voxel_sizes=dict(field='woptions.vox', ), + use_v8struct=dict(min_ver="8", usedefault=True,), + warping_regularization=dict(field="eoptions.reg",), + write_bounding_box=dict(field="woptions.bb",), + write_interp=dict(field="woptions.interp",), + write_voxel_sizes=dict(field="woptions.vox",), ) inputs = Normalize12.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Normalize12_outputs(): output_map = dict( - deformation_field=dict(), - normalized_files=dict(), - normalized_image=dict(), + deformation_field=dict(), normalized_files=dict(), normalized_image=dict(), ) outputs = Normalize12.output_spec() diff --git a/nipype/interfaces/spm/tests/test_auto_OneSampleTTestDesign.py b/nipype/interfaces/spm/tests/test_auto_OneSampleTTestDesign.py index 552ca0e701..cd5197602c 100644 --- a/nipype/interfaces/spm/tests/test_auto_OneSampleTTestDesign.py +++ b/nipype/interfaces/spm/tests/test_auto_OneSampleTTestDesign.py @@ -4,59 +4,50 @@ def test_OneSampleTTestDesign_inputs(): input_map = dict( - covariates=dict(field='cov', ), - explicit_mask_file=dict( - extensions=None, - field='masking.em', - ), + covariates=dict(field="cov",), + explicit_mask_file=dict(extensions=None, field="masking.em",), global_calc_mean=dict( - field='globalc.g_mean', - xor=['global_calc_omit', 'global_calc_values'], + field="globalc.g_mean", xor=["global_calc_omit", "global_calc_values"], ), global_calc_omit=dict( - field='globalc.g_omit', - xor=['global_calc_mean', 'global_calc_values'], + field="globalc.g_omit", xor=["global_calc_mean", "global_calc_values"], ), global_calc_values=dict( - field='globalc.g_user.global_uval', - xor=['global_calc_mean', 'global_calc_omit'], - ), - global_normalization=dict(field='globalm.glonorm', ), - in_files=dict( - field='des.t1.scans', - mandatory=True, + field="globalc.g_user.global_uval", + xor=["global_calc_mean", "global_calc_omit"], ), + global_normalization=dict(field="globalm.glonorm",), + in_files=dict(field="des.t1.scans", mandatory=True,), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - no_grand_mean_scaling=dict(field='globalm.gmsca.gmsca_no', ), + mfile=dict(usedefault=True,), + no_grand_mean_scaling=dict(field="globalm.gmsca.gmsca_no",), paths=dict(), - spm_mat_dir=dict(field='dir', ), + spm_mat_dir=dict(field="dir",), threshold_mask_absolute=dict( - field='masking.tm.tma.athresh', - xor=['threshold_mask_none', 'threshold_mask_relative'], + field="masking.tm.tma.athresh", + xor=["threshold_mask_none", "threshold_mask_relative"], ), threshold_mask_none=dict( - field='masking.tm.tm_none', - xor=['threshold_mask_absolute', 'threshold_mask_relative'], + field="masking.tm.tm_none", + xor=["threshold_mask_absolute", "threshold_mask_relative"], ), threshold_mask_relative=dict( - field='masking.tm.tmr.rthresh', - xor=['threshold_mask_absolute', 'threshold_mask_none'], + field="masking.tm.tmr.rthresh", + xor=["threshold_mask_absolute", "threshold_mask_none"], ), - use_implicit_threshold=dict(field='masking.im', ), + use_implicit_threshold=dict(field="masking.im",), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), + use_v8struct=dict(min_ver="8", usedefault=True,), ) inputs = OneSampleTTestDesign.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_OneSampleTTestDesign_outputs(): - output_map = dict(spm_mat_file=dict(extensions=None, ), ) + output_map = dict(spm_mat_file=dict(extensions=None,),) outputs = OneSampleTTestDesign.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_PairedTTestDesign.py b/nipype/interfaces/spm/tests/test_auto_PairedTTestDesign.py index d8e9ef1615..bb516488ee 100644 --- a/nipype/interfaces/spm/tests/test_auto_PairedTTestDesign.py +++ b/nipype/interfaces/spm/tests/test_auto_PairedTTestDesign.py @@ -4,61 +4,52 @@ def test_PairedTTestDesign_inputs(): input_map = dict( - ancova=dict(field='des.pt.ancova', ), - covariates=dict(field='cov', ), - explicit_mask_file=dict( - extensions=None, - field='masking.em', - ), + ancova=dict(field="des.pt.ancova",), + covariates=dict(field="cov",), + explicit_mask_file=dict(extensions=None, field="masking.em",), global_calc_mean=dict( - field='globalc.g_mean', - xor=['global_calc_omit', 'global_calc_values'], + field="globalc.g_mean", xor=["global_calc_omit", "global_calc_values"], ), global_calc_omit=dict( - field='globalc.g_omit', - xor=['global_calc_mean', 'global_calc_values'], + field="globalc.g_omit", xor=["global_calc_mean", "global_calc_values"], ), global_calc_values=dict( - field='globalc.g_user.global_uval', - xor=['global_calc_mean', 'global_calc_omit'], + field="globalc.g_user.global_uval", + xor=["global_calc_mean", "global_calc_omit"], ), - global_normalization=dict(field='globalm.glonorm', ), - grand_mean_scaling=dict(field='des.pt.gmsca', ), + global_normalization=dict(field="globalm.glonorm",), + grand_mean_scaling=dict(field="des.pt.gmsca",), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - no_grand_mean_scaling=dict(field='globalm.gmsca.gmsca_no', ), - paired_files=dict( - field='des.pt.pair', - mandatory=True, - ), + mfile=dict(usedefault=True,), + no_grand_mean_scaling=dict(field="globalm.gmsca.gmsca_no",), + paired_files=dict(field="des.pt.pair", mandatory=True,), paths=dict(), - spm_mat_dir=dict(field='dir', ), + spm_mat_dir=dict(field="dir",), threshold_mask_absolute=dict( - field='masking.tm.tma.athresh', - xor=['threshold_mask_none', 'threshold_mask_relative'], + field="masking.tm.tma.athresh", + xor=["threshold_mask_none", "threshold_mask_relative"], ), threshold_mask_none=dict( - field='masking.tm.tm_none', - xor=['threshold_mask_absolute', 'threshold_mask_relative'], + field="masking.tm.tm_none", + xor=["threshold_mask_absolute", "threshold_mask_relative"], ), threshold_mask_relative=dict( - field='masking.tm.tmr.rthresh', - xor=['threshold_mask_absolute', 'threshold_mask_none'], + field="masking.tm.tmr.rthresh", + xor=["threshold_mask_absolute", "threshold_mask_none"], ), - use_implicit_threshold=dict(field='masking.im', ), + use_implicit_threshold=dict(field="masking.im",), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), + use_v8struct=dict(min_ver="8", usedefault=True,), ) inputs = PairedTTestDesign.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PairedTTestDesign_outputs(): - output_map = dict(spm_mat_file=dict(extensions=None, ), ) + output_map = dict(spm_mat_file=dict(extensions=None,),) outputs = PairedTTestDesign.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_Realign.py b/nipype/interfaces/spm/tests/test_auto_Realign.py index c65181e277..ab76f6a82d 100644 --- a/nipype/interfaces/spm/tests/test_auto_Realign.py +++ b/nipype/interfaces/spm/tests/test_auto_Realign.py @@ -4,52 +4,36 @@ def test_Realign_inputs(): input_map = dict( - fwhm=dict(field='eoptions.fwhm', ), - in_files=dict( - copyfile=True, - field='data', - mandatory=True, - ), - interp=dict(field='eoptions.interp', ), - jobtype=dict(usedefault=True, ), + fwhm=dict(field="eoptions.fwhm",), + in_files=dict(copyfile=True, field="data", mandatory=True,), + interp=dict(field="eoptions.interp",), + jobtype=dict(usedefault=True,), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - out_prefix=dict( - field='roptions.prefix', - usedefault=True, - ), + mfile=dict(usedefault=True,), + out_prefix=dict(field="roptions.prefix", usedefault=True,), paths=dict(), - quality=dict(field='eoptions.quality', ), - register_to_mean=dict(field='eoptions.rtm', ), - separation=dict(field='eoptions.sep', ), + quality=dict(field="eoptions.quality",), + register_to_mean=dict(field="eoptions.rtm",), + separation=dict(field="eoptions.sep",), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), - weight_img=dict( - extensions=None, - field='eoptions.weight', - ), - wrap=dict(field='eoptions.wrap', ), - write_interp=dict(field='roptions.interp', ), - write_mask=dict(field='roptions.mask', ), - write_which=dict( - field='roptions.which', - maxlen=2, - minlen=2, - usedefault=True, - ), - write_wrap=dict(field='roptions.wrap', ), + use_v8struct=dict(min_ver="8", usedefault=True,), + weight_img=dict(extensions=None, field="eoptions.weight",), + wrap=dict(field="eoptions.wrap",), + write_interp=dict(field="roptions.interp",), + write_mask=dict(field="roptions.mask",), + write_which=dict(field="roptions.which", maxlen=2, minlen=2, usedefault=True,), + write_wrap=dict(field="roptions.wrap",), ) inputs = Realign.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Realign_outputs(): output_map = dict( - mean_image=dict(extensions=None, ), + mean_image=dict(extensions=None,), modified_in_files=dict(), realigned_files=dict(), realignment_parameters=dict(), diff --git a/nipype/interfaces/spm/tests/test_auto_RealignUnwarp.py b/nipype/interfaces/spm/tests/test_auto_RealignUnwarp.py index 85d0ba3d94..fd4e420423 100644 --- a/nipype/interfaces/spm/tests/test_auto_RealignUnwarp.py +++ b/nipype/interfaces/spm/tests/test_auto_RealignUnwarp.py @@ -4,79 +4,52 @@ def test_RealignUnwarp_inputs(): input_map = dict( - est_basis_func=dict(field='uweoptions.basfcn', ), - est_first_order_effects=dict(field='uweoptions.fot', ), - est_jacobian_deformations=dict(field='uweoptions.jm', ), + est_basis_func=dict(field="uweoptions.basfcn",), + est_first_order_effects=dict(field="uweoptions.fot",), + est_jacobian_deformations=dict(field="uweoptions.jm",), est_num_of_iterations=dict( - field='uweoptions.noi', - maxlen=1, - minlen=1, - usedefault=True, + field="uweoptions.noi", maxlen=1, minlen=1, usedefault=True, ), - est_re_est_mov_par=dict(field='uweoptions.rem', ), + est_re_est_mov_par=dict(field="uweoptions.rem",), est_reg_factor=dict( - field='uweoptions.lambda', - maxlen=1, - minlen=1, - usedefault=True, - ), - est_reg_order=dict(field='uweoptions.regorder', ), - est_second_order_effects=dict(field='uweoptions.sot', ), - est_taylor_expansion_point=dict( - field='uweoptions.expround', - usedefault=True, - ), - est_unwarp_fwhm=dict(field='uweoptions.uwfwhm', ), - fwhm=dict(field='eoptions.fwhm', ), - in_files=dict( - copyfile=True, - field='data.scans', - mandatory=True, - ), - interp=dict(field='eoptions.einterp', ), + field="uweoptions.lambda", maxlen=1, minlen=1, usedefault=True, + ), + est_reg_order=dict(field="uweoptions.regorder",), + est_second_order_effects=dict(field="uweoptions.sot",), + est_taylor_expansion_point=dict(field="uweoptions.expround", usedefault=True,), + est_unwarp_fwhm=dict(field="uweoptions.uwfwhm",), + fwhm=dict(field="eoptions.fwhm",), + in_files=dict(copyfile=True, field="data.scans", mandatory=True,), + interp=dict(field="eoptions.einterp",), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - out_prefix=dict( - field='uwroptions.prefix', - usedefault=True, - ), + mfile=dict(usedefault=True,), + out_prefix=dict(field="uwroptions.prefix", usedefault=True,), paths=dict(), - phase_map=dict( - copyfile=False, - extensions=None, - field='data.pmscan', - ), - quality=dict(field='eoptions.quality', ), - register_to_mean=dict(field='eoptions.rtm', ), - reslice_interp=dict(field='uwroptions.rinterp', ), - reslice_mask=dict(field='uwroptions.mask', ), + phase_map=dict(copyfile=False, extensions=None, field="data.pmscan",), + quality=dict(field="eoptions.quality",), + register_to_mean=dict(field="eoptions.rtm",), + reslice_interp=dict(field="uwroptions.rinterp",), + reslice_mask=dict(field="uwroptions.mask",), reslice_which=dict( - field='uwroptions.uwwhich', - maxlen=2, - minlen=2, - usedefault=True, + field="uwroptions.uwwhich", maxlen=2, minlen=2, usedefault=True, ), - reslice_wrap=dict(field='uwroptions.wrap', ), - separation=dict(field='eoptions.sep', ), + reslice_wrap=dict(field="uwroptions.wrap",), + separation=dict(field="eoptions.sep",), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), - weight_img=dict( - extensions=None, - field='eoptions.weight', - ), - wrap=dict(field='eoptions.ewrap', ), + use_v8struct=dict(min_ver="8", usedefault=True,), + weight_img=dict(extensions=None, field="eoptions.weight",), + wrap=dict(field="eoptions.ewrap",), ) inputs = RealignUnwarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_RealignUnwarp_outputs(): output_map = dict( - mean_image=dict(extensions=None, ), + mean_image=dict(extensions=None,), modified_in_files=dict(), realigned_unwarped_files=dict(), realignment_parameters=dict(), diff --git a/nipype/interfaces/spm/tests/test_auto_Reslice.py b/nipype/interfaces/spm/tests/test_auto_Reslice.py index ca6a89ff67..46083f8192 100644 --- a/nipype/interfaces/spm/tests/test_auto_Reslice.py +++ b/nipype/interfaces/spm/tests/test_auto_Reslice.py @@ -4,32 +4,25 @@ def test_Reslice_inputs(): input_map = dict( - in_file=dict( - extensions=None, - mandatory=True, - ), - interp=dict(usedefault=True, ), + in_file=dict(extensions=None, mandatory=True,), + interp=dict(usedefault=True,), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - out_file=dict(extensions=None, ), + mfile=dict(usedefault=True,), + out_file=dict(extensions=None,), paths=dict(), - space_defining=dict( - extensions=None, - mandatory=True, - ), + space_defining=dict(extensions=None, mandatory=True,), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), + use_v8struct=dict(min_ver="8", usedefault=True,), ) inputs = Reslice.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Reslice_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Reslice.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_ResliceToReference.py b/nipype/interfaces/spm/tests/test_auto_ResliceToReference.py index f1ec8393ff..ebea9a0bf4 100644 --- a/nipype/interfaces/spm/tests/test_auto_ResliceToReference.py +++ b/nipype/interfaces/spm/tests/test_auto_ResliceToReference.py @@ -4,33 +4,26 @@ def test_ResliceToReference_inputs(): input_map = dict( - bounding_box=dict(field='comp{2}.idbbvox.bb', ), - in_files=dict( - field='fnames', - mandatory=True, - ), - interpolation=dict(field='interp', ), + bounding_box=dict(field="comp{2}.idbbvox.bb",), + in_files=dict(field="fnames", mandatory=True,), + interpolation=dict(field="interp",), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict(usedefault=True,), paths=dict(), - target=dict( - extensions=None, - field='comp{1}.id.space', - ), + target=dict(extensions=None, field="comp{1}.id.space",), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), - voxel_sizes=dict(field='comp{2}.idbbvox.vox', ), + use_v8struct=dict(min_ver="8", usedefault=True,), + voxel_sizes=dict(field="comp{2}.idbbvox.vox",), ) inputs = ResliceToReference.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ResliceToReference_outputs(): - output_map = dict(out_files=dict(), ) + output_map = dict(out_files=dict(),) outputs = ResliceToReference.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_SPMCommand.py b/nipype/interfaces/spm/tests/test_auto_SPMCommand.py index 32a6b28b5c..bde05ad1be 100644 --- a/nipype/interfaces/spm/tests/test_auto_SPMCommand.py +++ b/nipype/interfaces/spm/tests/test_auto_SPMCommand.py @@ -5,13 +5,10 @@ def test_SPMCommand_inputs(): input_map = dict( matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict(usedefault=True,), paths=dict(), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), + use_v8struct=dict(min_ver="8", usedefault=True,), ) inputs = SPMCommand.input_spec() diff --git a/nipype/interfaces/spm/tests/test_auto_Segment.py b/nipype/interfaces/spm/tests/test_auto_Segment.py index c746cd258e..ea07881981 100644 --- a/nipype/interfaces/spm/tests/test_auto_Segment.py +++ b/nipype/interfaces/spm/tests/test_auto_Segment.py @@ -4,61 +4,51 @@ def test_Segment_inputs(): input_map = dict( - affine_regularization=dict(field='opts.regtype', ), - bias_fwhm=dict(field='opts.biasfwhm', ), - bias_regularization=dict(field='opts.biasreg', ), - clean_masks=dict(field='output.cleanup', ), - csf_output_type=dict(field='output.CSF', ), - data=dict( - copyfile=False, - field='data', - mandatory=True, - ), - gaussians_per_class=dict(field='opts.ngaus', ), - gm_output_type=dict(field='output.GM', ), - mask_image=dict( - extensions=None, - field='opts.msk', - ), + affine_regularization=dict(field="opts.regtype",), + bias_fwhm=dict(field="opts.biasfwhm",), + bias_regularization=dict(field="opts.biasreg",), + clean_masks=dict(field="output.cleanup",), + csf_output_type=dict(field="output.CSF",), + data=dict(copyfile=False, field="data", mandatory=True,), + gaussians_per_class=dict(field="opts.ngaus",), + gm_output_type=dict(field="output.GM",), + mask_image=dict(extensions=None, field="opts.msk",), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict(usedefault=True,), paths=dict(), - sampling_distance=dict(field='opts.samp', ), - save_bias_corrected=dict(field='output.biascor', ), - tissue_prob_maps=dict(field='opts.tpm', ), + sampling_distance=dict(field="opts.samp",), + save_bias_corrected=dict(field="output.biascor",), + tissue_prob_maps=dict(field="opts.tpm",), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), - warp_frequency_cutoff=dict(field='opts.warpco', ), - warping_regularization=dict(field='opts.warpreg', ), - wm_output_type=dict(field='output.WM', ), + use_v8struct=dict(min_ver="8", usedefault=True,), + warp_frequency_cutoff=dict(field="opts.warpco",), + warping_regularization=dict(field="opts.warpreg",), + wm_output_type=dict(field="output.WM",), ) inputs = Segment.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Segment_outputs(): output_map = dict( - bias_corrected_image=dict(extensions=None, ), - inverse_transformation_mat=dict(extensions=None, ), - modulated_csf_image=dict(extensions=None, ), - modulated_gm_image=dict(extensions=None, ), + bias_corrected_image=dict(extensions=None,), + inverse_transformation_mat=dict(extensions=None,), + modulated_csf_image=dict(extensions=None,), + modulated_gm_image=dict(extensions=None,), modulated_input_image=dict( - deprecated='0.10', - extensions=None, - new_name='bias_corrected_image', + deprecated="0.10", extensions=None, new_name="bias_corrected_image", ), - modulated_wm_image=dict(extensions=None, ), - native_csf_image=dict(extensions=None, ), - native_gm_image=dict(extensions=None, ), - native_wm_image=dict(extensions=None, ), - normalized_csf_image=dict(extensions=None, ), - normalized_gm_image=dict(extensions=None, ), - normalized_wm_image=dict(extensions=None, ), - transformation_mat=dict(extensions=None, ), + modulated_wm_image=dict(extensions=None,), + native_csf_image=dict(extensions=None,), + native_gm_image=dict(extensions=None,), + native_wm_image=dict(extensions=None,), + normalized_csf_image=dict(extensions=None,), + normalized_gm_image=dict(extensions=None,), + normalized_wm_image=dict(extensions=None,), + transformation_mat=dict(extensions=None,), ) outputs = Segment.output_spec() diff --git a/nipype/interfaces/spm/tests/test_auto_SliceTiming.py b/nipype/interfaces/spm/tests/test_auto_SliceTiming.py index 369392b5e9..8c99e4c4e3 100644 --- a/nipype/interfaces/spm/tests/test_auto_SliceTiming.py +++ b/nipype/interfaces/spm/tests/test_auto_SliceTiming.py @@ -4,51 +4,28 @@ def test_SliceTiming_inputs(): input_map = dict( - in_files=dict( - copyfile=False, - field='scans', - mandatory=True, - ), + in_files=dict(copyfile=False, field="scans", mandatory=True,), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - num_slices=dict( - field='nslices', - mandatory=True, - ), - out_prefix=dict( - field='prefix', - usedefault=True, - ), + mfile=dict(usedefault=True,), + num_slices=dict(field="nslices", mandatory=True,), + out_prefix=dict(field="prefix", usedefault=True,), paths=dict(), - ref_slice=dict( - field='refslice', - mandatory=True, - ), - slice_order=dict( - field='so', - mandatory=True, - ), - time_acquisition=dict( - field='ta', - mandatory=True, - ), - time_repetition=dict( - field='tr', - mandatory=True, - ), + ref_slice=dict(field="refslice", mandatory=True,), + slice_order=dict(field="so", mandatory=True,), + time_acquisition=dict(field="ta", mandatory=True,), + time_repetition=dict(field="tr", mandatory=True,), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), + use_v8struct=dict(min_ver="8", usedefault=True,), ) inputs = SliceTiming.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SliceTiming_outputs(): - output_map = dict(timecorrected_files=dict(), ) + output_map = dict(timecorrected_files=dict(),) outputs = SliceTiming.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_Smooth.py b/nipype/interfaces/spm/tests/test_auto_Smooth.py index b4e2d42b0f..4e0025a292 100644 --- a/nipype/interfaces/spm/tests/test_auto_Smooth.py +++ b/nipype/interfaces/spm/tests/test_auto_Smooth.py @@ -4,34 +4,26 @@ def test_Smooth_inputs(): input_map = dict( - data_type=dict(field='dtype', ), - fwhm=dict(field='fwhm', ), - implicit_masking=dict(field='im', ), - in_files=dict( - copyfile=False, - field='data', - mandatory=True, - ), + data_type=dict(field="dtype",), + fwhm=dict(field="fwhm",), + implicit_masking=dict(field="im",), + in_files=dict(copyfile=False, field="data", mandatory=True,), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - out_prefix=dict( - field='prefix', - usedefault=True, - ), + mfile=dict(usedefault=True,), + out_prefix=dict(field="prefix", usedefault=True,), paths=dict(), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), + use_v8struct=dict(min_ver="8", usedefault=True,), ) inputs = Smooth.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Smooth_outputs(): - output_map = dict(smoothed_files=dict(), ) + output_map = dict(smoothed_files=dict(),) outputs = Smooth.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_Threshold.py b/nipype/interfaces/spm/tests/test_auto_Threshold.py index 3f73cf91e0..75fcbf06e2 100644 --- a/nipype/interfaces/spm/tests/test_auto_Threshold.py +++ b/nipype/interfaces/spm/tests/test_auto_Threshold.py @@ -4,46 +4,37 @@ def test_Threshold_inputs(): input_map = dict( - contrast_index=dict(mandatory=True, ), - extent_fdr_p_threshold=dict(usedefault=True, ), - extent_threshold=dict(usedefault=True, ), - force_activation=dict(usedefault=True, ), - height_threshold=dict(usedefault=True, ), - height_threshold_type=dict(usedefault=True, ), + contrast_index=dict(mandatory=True,), + extent_fdr_p_threshold=dict(usedefault=True,), + extent_threshold=dict(usedefault=True,), + force_activation=dict(usedefault=True,), + height_threshold=dict(usedefault=True,), + height_threshold_type=dict(usedefault=True,), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict(usedefault=True,), paths=dict(), - spm_mat_file=dict( - copyfile=True, - extensions=None, - mandatory=True, - ), - stat_image=dict( - copyfile=False, - extensions=None, - mandatory=True, - ), - use_fwe_correction=dict(usedefault=True, ), + spm_mat_file=dict(copyfile=True, extensions=None, mandatory=True,), + stat_image=dict(copyfile=False, extensions=None, mandatory=True,), + use_fwe_correction=dict(usedefault=True,), use_mcr=dict(), - use_topo_fdr=dict(usedefault=True, ), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), + use_topo_fdr=dict(usedefault=True,), + use_v8struct=dict(min_ver="8", usedefault=True,), ) inputs = Threshold.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Threshold_outputs(): output_map = dict( activation_forced=dict(), cluster_forming_thr=dict(), n_clusters=dict(), - pre_topo_fdr_map=dict(extensions=None, ), + pre_topo_fdr_map=dict(extensions=None,), pre_topo_n_clusters=dict(), - thresholded_map=dict(extensions=None, ), + thresholded_map=dict(extensions=None,), ) outputs = Threshold.output_spec() diff --git a/nipype/interfaces/spm/tests/test_auto_ThresholdStatistics.py b/nipype/interfaces/spm/tests/test_auto_ThresholdStatistics.py index 33c40f26ce..c654be7b3d 100644 --- a/nipype/interfaces/spm/tests/test_auto_ThresholdStatistics.py +++ b/nipype/interfaces/spm/tests/test_auto_ThresholdStatistics.py @@ -4,33 +4,24 @@ def test_ThresholdStatistics_inputs(): input_map = dict( - contrast_index=dict(mandatory=True, ), - extent_threshold=dict(usedefault=True, ), - height_threshold=dict(mandatory=True, ), + contrast_index=dict(mandatory=True,), + extent_threshold=dict(usedefault=True,), + height_threshold=dict(mandatory=True,), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), + mfile=dict(usedefault=True,), paths=dict(), - spm_mat_file=dict( - copyfile=True, - extensions=None, - mandatory=True, - ), - stat_image=dict( - copyfile=False, - extensions=None, - mandatory=True, - ), + spm_mat_file=dict(copyfile=True, extensions=None, mandatory=True,), + stat_image=dict(copyfile=False, extensions=None, mandatory=True,), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), + use_v8struct=dict(min_ver="8", usedefault=True,), ) inputs = ThresholdStatistics.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ThresholdStatistics_outputs(): output_map = dict( clusterwise_P_FDR=dict(), diff --git a/nipype/interfaces/spm/tests/test_auto_TwoSampleTTestDesign.py b/nipype/interfaces/spm/tests/test_auto_TwoSampleTTestDesign.py index 30b99be080..fa0cc9e331 100644 --- a/nipype/interfaces/spm/tests/test_auto_TwoSampleTTestDesign.py +++ b/nipype/interfaces/spm/tests/test_auto_TwoSampleTTestDesign.py @@ -4,65 +4,53 @@ def test_TwoSampleTTestDesign_inputs(): input_map = dict( - covariates=dict(field='cov', ), - dependent=dict(field='des.t2.dept', ), - explicit_mask_file=dict( - extensions=None, - field='masking.em', - ), + covariates=dict(field="cov",), + dependent=dict(field="des.t2.dept",), + explicit_mask_file=dict(extensions=None, field="masking.em",), global_calc_mean=dict( - field='globalc.g_mean', - xor=['global_calc_omit', 'global_calc_values'], + field="globalc.g_mean", xor=["global_calc_omit", "global_calc_values"], ), global_calc_omit=dict( - field='globalc.g_omit', - xor=['global_calc_mean', 'global_calc_values'], + field="globalc.g_omit", xor=["global_calc_mean", "global_calc_values"], ), global_calc_values=dict( - field='globalc.g_user.global_uval', - xor=['global_calc_mean', 'global_calc_omit'], - ), - global_normalization=dict(field='globalm.glonorm', ), - group1_files=dict( - field='des.t2.scans1', - mandatory=True, - ), - group2_files=dict( - field='des.t2.scans2', - mandatory=True, + field="globalc.g_user.global_uval", + xor=["global_calc_mean", "global_calc_omit"], ), + global_normalization=dict(field="globalm.glonorm",), + group1_files=dict(field="des.t2.scans1", mandatory=True,), + group2_files=dict(field="des.t2.scans2", mandatory=True,), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - no_grand_mean_scaling=dict(field='globalm.gmsca.gmsca_no', ), + mfile=dict(usedefault=True,), + no_grand_mean_scaling=dict(field="globalm.gmsca.gmsca_no",), paths=dict(), - spm_mat_dir=dict(field='dir', ), + spm_mat_dir=dict(field="dir",), threshold_mask_absolute=dict( - field='masking.tm.tma.athresh', - xor=['threshold_mask_none', 'threshold_mask_relative'], + field="masking.tm.tma.athresh", + xor=["threshold_mask_none", "threshold_mask_relative"], ), threshold_mask_none=dict( - field='masking.tm.tm_none', - xor=['threshold_mask_absolute', 'threshold_mask_relative'], + field="masking.tm.tm_none", + xor=["threshold_mask_absolute", "threshold_mask_relative"], ), threshold_mask_relative=dict( - field='masking.tm.tmr.rthresh', - xor=['threshold_mask_absolute', 'threshold_mask_none'], + field="masking.tm.tmr.rthresh", + xor=["threshold_mask_absolute", "threshold_mask_none"], ), - unequal_variance=dict(field='des.t2.variance', ), - use_implicit_threshold=dict(field='masking.im', ), + unequal_variance=dict(field="des.t2.variance",), + use_implicit_threshold=dict(field="masking.im",), use_mcr=dict(), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), + use_v8struct=dict(min_ver="8", usedefault=True,), ) inputs = TwoSampleTTestDesign.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_TwoSampleTTestDesign_outputs(): - output_map = dict(spm_mat_file=dict(extensions=None, ), ) + output_map = dict(spm_mat_file=dict(extensions=None,),) outputs = TwoSampleTTestDesign.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/spm/tests/test_auto_VBMSegment.py b/nipype/interfaces/spm/tests/test_auto_VBMSegment.py index 586b7b53e1..626ce9893f 100644 --- a/nipype/interfaces/spm/tests/test_auto_VBMSegment.py +++ b/nipype/interfaces/spm/tests/test_auto_VBMSegment.py @@ -5,146 +5,72 @@ def test_VBMSegment_inputs(): input_map = dict( bias_corrected_affine=dict( - field='estwrite.output.bias.affine', - usedefault=True, + field="estwrite.output.bias.affine", usedefault=True, ), bias_corrected_native=dict( - field='estwrite.output.bias.native', - usedefault=True, + field="estwrite.output.bias.native", usedefault=True, ), bias_corrected_normalized=dict( - field='estwrite.output.bias.warped', - usedefault=True, - ), - bias_fwhm=dict( - field='estwrite.opts.biasfwhm', - usedefault=True, - ), - bias_regularization=dict( - field='estwrite.opts.biasreg', - usedefault=True, - ), - cleanup_partitions=dict( - field='estwrite.extopts.cleanup', - usedefault=True, - ), - csf_dartel=dict( - field='estwrite.output.CSF.dartel', - usedefault=True, + field="estwrite.output.bias.warped", usedefault=True, ), + bias_fwhm=dict(field="estwrite.opts.biasfwhm", usedefault=True,), + bias_regularization=dict(field="estwrite.opts.biasreg", usedefault=True,), + cleanup_partitions=dict(field="estwrite.extopts.cleanup", usedefault=True,), + csf_dartel=dict(field="estwrite.output.CSF.dartel", usedefault=True,), csf_modulated_normalized=dict( - field='estwrite.output.CSF.modulated', - usedefault=True, - ), - csf_native=dict( - field='estwrite.output.CSF.native', - usedefault=True, - ), - csf_normalized=dict( - field='estwrite.output.CSF.warped', - usedefault=True, + field="estwrite.output.CSF.modulated", usedefault=True, ), + csf_native=dict(field="estwrite.output.CSF.native", usedefault=True,), + csf_normalized=dict(field="estwrite.output.CSF.warped", usedefault=True,), dartel_template=dict( - extensions=['.hdr', '.img', '.img.gz', '.nii'], - field='estwrite.extopts.dartelwarp.normhigh.darteltpm', - ), - deformation_field=dict( - field='estwrite.output.warps', - usedefault=True, - ), - display_results=dict( - field='estwrite.extopts.print', - usedefault=True, - ), - gaussians_per_class=dict(usedefault=True, ), - gm_dartel=dict( - field='estwrite.output.GM.dartel', - usedefault=True, + extensions=[".hdr", ".img", ".img.gz", ".nii"], + field="estwrite.extopts.dartelwarp.normhigh.darteltpm", ), + deformation_field=dict(field="estwrite.output.warps", usedefault=True,), + display_results=dict(field="estwrite.extopts.print", usedefault=True,), + gaussians_per_class=dict(usedefault=True,), + gm_dartel=dict(field="estwrite.output.GM.dartel", usedefault=True,), gm_modulated_normalized=dict( - field='estwrite.output.GM.modulated', - usedefault=True, - ), - gm_native=dict( - field='estwrite.output.GM.native', - usedefault=True, - ), - gm_normalized=dict( - field='estwrite.output.GM.warped', - usedefault=True, - ), - in_files=dict( - copyfile=False, - field='estwrite.data', - mandatory=True, - ), - jacobian_determinant=dict( - field='estwrite.jacobian.warped', - usedefault=True, + field="estwrite.output.GM.modulated", usedefault=True, ), + gm_native=dict(field="estwrite.output.GM.native", usedefault=True,), + gm_normalized=dict(field="estwrite.output.GM.warped", usedefault=True,), + in_files=dict(copyfile=False, field="estwrite.data", mandatory=True,), + jacobian_determinant=dict(field="estwrite.jacobian.warped", usedefault=True,), matlab_cmd=dict(), - mfile=dict(usedefault=True, ), - mrf_weighting=dict( - field='estwrite.extopts.mrf', - usedefault=True, - ), + mfile=dict(usedefault=True,), + mrf_weighting=dict(field="estwrite.extopts.mrf", usedefault=True,), paths=dict(), - pve_label_dartel=dict( - field='estwrite.output.label.dartel', - usedefault=True, - ), - pve_label_native=dict( - field='estwrite.output.label.native', - usedefault=True, - ), + pve_label_dartel=dict(field="estwrite.output.label.dartel", usedefault=True,), + pve_label_native=dict(field="estwrite.output.label.native", usedefault=True,), pve_label_normalized=dict( - field='estwrite.output.label.warped', - usedefault=True, - ), - sampling_distance=dict( - field='estwrite.opts.samp', - usedefault=True, + field="estwrite.output.label.warped", usedefault=True, ), - spatial_normalization=dict(usedefault=True, ), + sampling_distance=dict(field="estwrite.opts.samp", usedefault=True,), + spatial_normalization=dict(usedefault=True,), tissues=dict( - extensions=['.hdr', '.img', '.img.gz', '.nii'], - field='estwrite.tpm', + extensions=[".hdr", ".img", ".img.gz", ".nii"], field="estwrite.tpm", ), use_mcr=dict(), use_sanlm_denoising_filter=dict( - field='estwrite.extopts.sanlm', - usedefault=True, - ), - use_v8struct=dict( - min_ver='8', - usedefault=True, - ), - warping_regularization=dict( - field='estwrite.opts.warpreg', - usedefault=True, - ), - wm_dartel=dict( - field='estwrite.output.WM.dartel', - usedefault=True, + field="estwrite.extopts.sanlm", usedefault=True, ), + use_v8struct=dict(min_ver="8", usedefault=True,), + warping_regularization=dict(field="estwrite.opts.warpreg", usedefault=True,), + wm_dartel=dict(field="estwrite.output.WM.dartel", usedefault=True,), wm_modulated_normalized=dict( - field='estwrite.output.WM.modulated', - usedefault=True, - ), - wm_native=dict( - field='estwrite.output.WM.native', - usedefault=True, - ), - wm_normalized=dict( - field='estwrite.output.WM.warped', - usedefault=True, + field="estwrite.output.WM.modulated", usedefault=True, ), + wm_native=dict(field="estwrite.output.WM.native", usedefault=True,), + wm_normalized=dict(field="estwrite.output.WM.warped", usedefault=True,), ) inputs = VBMSegment.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_VBMSegment_outputs(): output_map = dict( bias_corrected_images=dict(), diff --git a/nipype/interfaces/spm/tests/test_base.py b/nipype/interfaces/spm/tests/test_base.py index c59cd6b234..c2c991d742 100644 --- a/nipype/interfaces/spm/tests/test_base.py +++ b/nipype/interfaces/spm/tests/test_base.py @@ -13,7 +13,7 @@ from nipype.interfaces.spm.base import SPMCommandInputSpec from nipype.interfaces.base import traits -mlab.MatlabCommand.set_default_matlab_cmd(os.getenv('MATLABCMD', 'matlab')) +mlab.MatlabCommand.set_default_matlab_cmd(os.getenv("MATLABCMD", "matlab")) def test_scan_for_fnames(create_files_in_directory): @@ -31,7 +31,7 @@ def test_spm_path(): spm_path = spm.Info.path() if spm_path is not None: assert isinstance(spm_path, (str, bytes)) - assert 'spm' in spm_path.lower() + assert "spm" in spm_path.lower() def test_use_mfile(): @@ -49,7 +49,7 @@ class TestClass(spm.SPMCommand): pass # test without FORCE_SPMMCR, SPMMCRCMD set - for varname in ['FORCE_SPMMCR', 'SPMMCRCMD']: + for varname in ["FORCE_SPMMCR", "SPMMCRCMD"]: try: del os.environ[varname] except KeyError: @@ -58,15 +58,15 @@ class TestClass(spm.SPMCommand): assert dc._use_mcr is None assert dc._matlab_cmd is None # test with only FORCE_SPMMCR set - os.environ['FORCE_SPMMCR'] = '1' + os.environ["FORCE_SPMMCR"] = "1" dc = TestClass() assert dc._use_mcr assert dc._matlab_cmd is None # test with both, FORCE_SPMMCR and SPMMCRCMD set - os.environ['SPMMCRCMD'] = 'spmcmd' + os.environ["SPMMCRCMD"] = "spmcmd" dc = TestClass() assert dc._use_mcr - assert dc._matlab_cmd == 'spmcmd' + assert dc._matlab_cmd == "spmcmd" # restore environment os.environ.clear() os.environ.update(saved_env) @@ -78,19 +78,19 @@ class TestClass(spm.SPMCommand): input_spec = spm.SPMCommandInputSpec dc = TestClass() # dc = derived_class - dc.inputs.matlab_cmd = 'foo' - assert dc.mlab._cmd == 'foo' + dc.inputs.matlab_cmd = "foo" + assert dc.mlab._cmd == "foo" def test_cmd_update2(): class TestClass(spm.SPMCommand): - _jobtype = 'jobtype' - _jobname = 'jobname' + _jobtype = "jobtype" + _jobname = "jobname" input_spec = spm.SPMCommandInputSpec dc = TestClass() # dc = derived_class - assert dc.jobtype == 'jobtype' - assert dc.jobname == 'jobname' + assert dc.jobtype == "jobtype" + assert dc.jobname == "jobname" def test_reformat_dict_for_savemat(): @@ -98,8 +98,8 @@ class TestClass(spm.SPMCommand): input_spec = spm.SPMCommandInputSpec dc = TestClass() # dc = derived_class - out = dc._reformat_dict_for_savemat({'a': {'b': {'c': []}}}) - assert out == [{'a': [{'b': [{'c': []}]}]}] + out = dc._reformat_dict_for_savemat({"a": {"b": {"c": []}}}) + assert out == [{"a": [{"b": [{"c": []}]}]}] def test_generate_job(create_files_in_directory): @@ -108,58 +108,60 @@ class TestClass(spm.SPMCommand): dc = TestClass() # dc = derived_class out = dc._generate_job() - assert out == '' + assert out == "" # struct array - contents = {'contents': [1, 2, 3, 4]} + contents = {"contents": [1, 2, 3, 4]} out = dc._generate_job(contents=contents) - assert out == ('.contents(1) = 1;\n.contents(2) = 2;' - '\n.contents(3) = 3;\n.contents(4) = 4;\n') + assert out == ( + ".contents(1) = 1;\n.contents(2) = 2;" + "\n.contents(3) = 3;\n.contents(4) = 4;\n" + ) # cell array of strings filelist, outdir = create_files_in_directory names = spm.scans_for_fnames(filelist, keep4d=True) - contents = {'files': names} - out = dc._generate_job(prefix='test', contents=contents) + contents = {"files": names} + out = dc._generate_job(prefix="test", contents=contents) assert out == "test.files = {...\n'a.nii';...\n'b.nii';...\n};\n" # string assignment - contents = 'foo' - out = dc._generate_job(prefix='test', contents=contents) + contents = "foo" + out = dc._generate_job(prefix="test", contents=contents) assert out == "test = 'foo';\n" # cell array of vectors - contents = {'onsets': np.array((1, ), dtype=object)} - contents['onsets'][0] = [1, 2, 3, 4] - out = dc._generate_job(prefix='test', contents=contents) - assert out == 'test.onsets = {...\n[1, 2, 3, 4];...\n};\n' + contents = {"onsets": np.array((1,), dtype=object)} + contents["onsets"][0] = [1, 2, 3, 4] + out = dc._generate_job(prefix="test", contents=contents) + assert out == "test.onsets = {...\n[1, 2, 3, 4];...\n};\n" def test_bool(): class TestClassInputSpec(SPMCommandInputSpec): - test_in = include_intercept = traits.Bool(field='testfield') + test_in = include_intercept = traits.Bool(field="testfield") class TestClass(spm.SPMCommand): input_spec = TestClassInputSpec - _jobtype = 'jobtype' - _jobname = 'jobname' + _jobtype = "jobtype" + _jobname = "jobname" dc = TestClass() # dc = derived_class dc.inputs.test_in = True out = dc._make_matlab_command(dc._parse_inputs()) - assert out.find('jobs{1}.spm.jobtype.jobname.testfield = 1;') > 0, 1 + assert out.find("jobs{1}.spm.jobtype.jobname.testfield = 1;") > 0, 1 dc.inputs.use_v8struct = False out = dc._make_matlab_command(dc._parse_inputs()) - assert out.find('jobs{1}.jobtype{1}.jobname{1}.testfield = 1;') > 0, 1 + assert out.find("jobs{1}.jobtype{1}.jobname{1}.testfield = 1;") > 0, 1 def test_make_matlab_command(create_files_in_directory): class TestClass(spm.SPMCommand): - _jobtype = 'jobtype' - _jobname = 'jobname' + _jobtype = "jobtype" + _jobname = "jobname" input_spec = spm.SPMCommandInputSpec dc = TestClass() # dc = derived_class filelist, outdir = create_files_in_directory - contents = {'contents': [1, 2, 3, 4]} + contents = {"contents": [1, 2, 3, 4]} script = dc._make_matlab_command([contents]) - assert 'jobs{1}.spm.jobtype.jobname.contents(3) = 3;' in script + assert "jobs{1}.spm.jobtype.jobname.contents(3) = 3;" in script dc.inputs.use_v8struct = False script = dc._make_matlab_command([contents]) - assert 'jobs{1}.jobtype{1}.jobname{1}.contents(3) = 3;' in script + assert "jobs{1}.jobtype{1}.jobname{1}.contents(3) = 3;" in script diff --git a/nipype/interfaces/spm/tests/test_model.py b/nipype/interfaces/spm/tests/test_model.py index a9cb957944..a960d06fb8 100644 --- a/nipype/interfaces/spm/tests/test_model.py +++ b/nipype/interfaces/spm/tests/test_model.py @@ -6,39 +6,39 @@ import nipype.interfaces.spm.model as spm import nipype.interfaces.matlab as mlab -mlab.MatlabCommand.set_default_matlab_cmd(os.getenv('MATLABCMD', 'matlab')) +mlab.MatlabCommand.set_default_matlab_cmd(os.getenv("MATLABCMD", "matlab")) def test_level1design(): - assert spm.Level1Design._jobtype == 'stats' - assert spm.Level1Design._jobname == 'fmri_spec' + assert spm.Level1Design._jobtype == "stats" + assert spm.Level1Design._jobname == "fmri_spec" def test_estimatemodel(): - assert spm.EstimateModel._jobtype == 'stats' - assert spm.EstimateModel._jobname == 'fmri_est' + assert spm.EstimateModel._jobtype == "stats" + assert spm.EstimateModel._jobname == "fmri_est" def test_estimatecontrast(): - assert spm.EstimateContrast._jobtype == 'stats' - assert spm.EstimateContrast._jobname == 'con' + assert spm.EstimateContrast._jobtype == "stats" + assert spm.EstimateContrast._jobname == "con" def test_threshold(): - assert spm.Threshold._jobtype == 'basetype' - assert spm.Threshold._jobname == 'basename' + assert spm.Threshold._jobtype == "basetype" + assert spm.Threshold._jobname == "basename" def test_factorialdesign(): - assert spm.FactorialDesign._jobtype == 'stats' - assert spm.FactorialDesign._jobname == 'factorial_design' + assert spm.FactorialDesign._jobtype == "stats" + assert spm.FactorialDesign._jobname == "factorial_design" def test_onesamplettestdesign(): - assert spm.OneSampleTTestDesign._jobtype == 'stats' - assert spm.OneSampleTTestDesign._jobname == 'factorial_design' + assert spm.OneSampleTTestDesign._jobtype == "stats" + assert spm.OneSampleTTestDesign._jobname == "factorial_design" def test_twosamplettestdesign(): - assert spm.TwoSampleTTestDesign._jobtype == 'stats' - assert spm.TwoSampleTTestDesign._jobname == 'factorial_design' + assert spm.TwoSampleTTestDesign._jobtype == "stats" + assert spm.TwoSampleTTestDesign._jobname == "factorial_design" diff --git a/nipype/interfaces/spm/tests/test_preprocess.py b/nipype/interfaces/spm/tests/test_preprocess.py index 2b70b7bb54..de5c79caba 100644 --- a/nipype/interfaces/spm/tests/test_preprocess.py +++ b/nipype/interfaces/spm/tests/test_preprocess.py @@ -10,107 +10,106 @@ from nipype.interfaces.spm import no_spm import nipype.interfaces.matlab as mlab -mlab.MatlabCommand.set_default_matlab_cmd(os.getenv('MATLABCMD', 'matlab')) +mlab.MatlabCommand.set_default_matlab_cmd(os.getenv("MATLABCMD", "matlab")) def test_slicetiming(): - assert spm.SliceTiming._jobtype == 'temporal' - assert spm.SliceTiming._jobname == 'st' + assert spm.SliceTiming._jobtype == "temporal" + assert spm.SliceTiming._jobname == "st" def test_slicetiming_list_outputs(create_files_in_directory): filelist, outdir = create_files_in_directory st = spm.SliceTiming(in_files=filelist[0]) - assert st._list_outputs()['timecorrected_files'][0][0] == 'a' + assert st._list_outputs()["timecorrected_files"][0][0] == "a" def test_realign(): - assert spm.Realign._jobtype == 'spatial' - assert spm.Realign._jobname == 'realign' - assert spm.Realign().inputs.jobtype == 'estwrite' + assert spm.Realign._jobtype == "spatial" + assert spm.Realign._jobname == "realign" + assert spm.Realign().inputs.jobtype == "estwrite" def test_realign_list_outputs(create_files_in_directory): filelist, outdir = create_files_in_directory rlgn = spm.Realign(in_files=filelist[0]) - assert rlgn._list_outputs()['realignment_parameters'][0].startswith('rp_') - assert rlgn._list_outputs()['realigned_files'][0].startswith('r') - assert rlgn._list_outputs()['mean_image'].startswith('mean') + assert rlgn._list_outputs()["realignment_parameters"][0].startswith("rp_") + assert rlgn._list_outputs()["realigned_files"][0].startswith("r") + assert rlgn._list_outputs()["mean_image"].startswith("mean") def test_coregister(): - assert spm.Coregister._jobtype == 'spatial' - assert spm.Coregister._jobname == 'coreg' - assert spm.Coregister().inputs.jobtype == 'estwrite' + assert spm.Coregister._jobtype == "spatial" + assert spm.Coregister._jobname == "coreg" + assert spm.Coregister().inputs.jobtype == "estwrite" def test_coregister_list_outputs(create_files_in_directory): filelist, outdir = create_files_in_directory coreg = spm.Coregister(source=filelist[0]) - assert coreg._list_outputs()['coregistered_source'][0].startswith('r') + assert coreg._list_outputs()["coregistered_source"][0].startswith("r") coreg = spm.Coregister(source=filelist[0], apply_to_files=filelist[1]) - assert coreg._list_outputs()['coregistered_files'][0].startswith('r') + assert coreg._list_outputs()["coregistered_files"][0].startswith("r") def test_normalize(): - assert spm.Normalize._jobtype == 'spatial' - assert spm.Normalize._jobname == 'normalise' - assert spm.Normalize().inputs.jobtype == 'estwrite' + assert spm.Normalize._jobtype == "spatial" + assert spm.Normalize._jobname == "normalise" + assert spm.Normalize().inputs.jobtype == "estwrite" def test_normalize_list_outputs(create_files_in_directory): filelist, outdir = create_files_in_directory norm = spm.Normalize(source=filelist[0]) - assert norm._list_outputs()['normalized_source'][0].startswith('w') + assert norm._list_outputs()["normalized_source"][0].startswith("w") norm = spm.Normalize(source=filelist[0], apply_to_files=filelist[1]) - assert norm._list_outputs()['normalized_files'][0].startswith('w') + assert norm._list_outputs()["normalized_files"][0].startswith("w") def test_normalize12(): - assert spm.Normalize12._jobtype == 'spatial' - assert spm.Normalize12._jobname == 'normalise' - assert spm.Normalize12().inputs.jobtype == 'estwrite' + assert spm.Normalize12._jobtype == "spatial" + assert spm.Normalize12._jobname == "normalise" + assert spm.Normalize12().inputs.jobtype == "estwrite" def test_normalize12_list_outputs(create_files_in_directory): filelist, outdir = create_files_in_directory norm12 = spm.Normalize12(image_to_align=filelist[0]) - assert norm12._list_outputs()['normalized_image'][0].startswith('w') - norm12 = spm.Normalize12( - image_to_align=filelist[0], apply_to_files=filelist[1]) - assert norm12._list_outputs()['normalized_files'][0].startswith('w') + assert norm12._list_outputs()["normalized_image"][0].startswith("w") + norm12 = spm.Normalize12(image_to_align=filelist[0], apply_to_files=filelist[1]) + assert norm12._list_outputs()["normalized_files"][0].startswith("w") @pytest.mark.skipif(no_spm(), reason="spm is not installed") def test_segment(): if spm.Info.name() == "SPM12": - assert spm.Segment()._jobtype == 'tools' - assert spm.Segment()._jobname == 'oldseg' + assert spm.Segment()._jobtype == "tools" + assert spm.Segment()._jobname == "oldseg" else: - assert spm.Segment()._jobtype == 'spatial' - assert spm.Segment()._jobname == 'preproc' + assert spm.Segment()._jobtype == "spatial" + assert spm.Segment()._jobname == "preproc" @pytest.mark.skipif(no_spm(), reason="spm is not installed") def test_newsegment(): if spm.Info.name() == "SPM12": - assert spm.NewSegment()._jobtype == 'spatial' - assert spm.NewSegment()._jobname == 'preproc' + assert spm.NewSegment()._jobtype == "spatial" + assert spm.NewSegment()._jobname == "preproc" else: - assert spm.NewSegment()._jobtype == 'tools' - assert spm.NewSegment()._jobname == 'preproc8' + assert spm.NewSegment()._jobtype == "tools" + assert spm.NewSegment()._jobname == "preproc8" def test_smooth(): - assert spm.Smooth._jobtype == 'spatial' - assert spm.Smooth._jobname == 'smooth' + assert spm.Smooth._jobtype == "spatial" + assert spm.Smooth._jobname == "smooth" def test_dartel(): - assert spm.DARTEL._jobtype == 'tools' - assert spm.DARTEL._jobname == 'dartel' + assert spm.DARTEL._jobtype == "tools" + assert spm.DARTEL._jobname == "dartel" def test_dartelnorm2mni(): - assert spm.DARTELNorm2MNI._jobtype == 'tools' - assert spm.DARTELNorm2MNI._jobname == 'dartel' + assert spm.DARTELNorm2MNI._jobtype == "tools" + assert spm.DARTELNorm2MNI._jobname == "dartel" diff --git a/nipype/interfaces/spm/tests/test_utils.py b/nipype/interfaces/spm/tests/test_utils.py index a574fb90a7..1afc887b06 100644 --- a/nipype/interfaces/spm/tests/test_utils.py +++ b/nipype/interfaces/spm/tests/test_utils.py @@ -11,73 +11,73 @@ def test_coreg(): - moving = example_data(infile='functional.nii') - target = example_data(infile='T1.nii') - mat = example_data(infile='trans.mat') - coreg = spmu.CalcCoregAffine(matlab_cmd='mymatlab') + moving = example_data(infile="functional.nii") + target = example_data(infile="T1.nii") + mat = example_data(infile="trans.mat") + coreg = spmu.CalcCoregAffine(matlab_cmd="mymatlab") coreg.inputs.target = target - assert coreg.inputs.matlab_cmd == 'mymatlab' + assert coreg.inputs.matlab_cmd == "mymatlab" coreg.inputs.moving = moving assert not isdefined(coreg.inputs.mat) pth, mov, _ = split_filename(moving) _, tgt, _ = split_filename(target) - mat = os.path.join(pth, '%s_to_%s.mat' % (mov, tgt)) - invmat = fname_presuffix(mat, prefix='inverse_') + mat = os.path.join(pth, "%s_to_%s.mat" % (mov, tgt)) + invmat = fname_presuffix(mat, prefix="inverse_") scrpt = coreg._make_matlab_command(None) assert coreg.inputs.mat == mat assert coreg.inputs.invmat == invmat def test_apply_transform(): - moving = example_data(infile='functional.nii') - mat = example_data(infile='trans.mat') - applymat = spmu.ApplyTransform(matlab_cmd='mymatlab') - assert applymat.inputs.matlab_cmd == 'mymatlab' + moving = example_data(infile="functional.nii") + mat = example_data(infile="trans.mat") + applymat = spmu.ApplyTransform(matlab_cmd="mymatlab") + assert applymat.inputs.matlab_cmd == "mymatlab" applymat.inputs.in_file = moving applymat.inputs.mat = mat scrpt = applymat._make_matlab_command(None) - expected = '[p n e v] = spm_fileparts(V.fname);' + expected = "[p n e v] = spm_fileparts(V.fname);" assert expected in scrpt - expected = 'V.mat = transform.M * V.mat;' + expected = "V.mat = transform.M * V.mat;" assert expected in scrpt def test_reslice(): - moving = example_data(infile='functional.nii') - space_defining = example_data(infile='T1.nii') - reslice = spmu.Reslice(matlab_cmd='mymatlab_version') - assert reslice.inputs.matlab_cmd == 'mymatlab_version' + moving = example_data(infile="functional.nii") + space_defining = example_data(infile="T1.nii") + reslice = spmu.Reslice(matlab_cmd="mymatlab_version") + assert reslice.inputs.matlab_cmd == "mymatlab_version" reslice.inputs.in_file = moving reslice.inputs.space_defining = space_defining assert reslice.inputs.interp == 0 with pytest.raises(TraitError): - reslice.inputs.trait_set(interp='nearest') + reslice.inputs.trait_set(interp="nearest") with pytest.raises(TraitError): reslice.inputs.trait_set(interp=10) reslice.inputs.interp = 1 script = reslice._make_matlab_command(None) - outfile = fname_presuffix(moving, prefix='r') + outfile = fname_presuffix(moving, prefix="r") assert reslice.inputs.out_file == outfile - expected = '\nflags.mean=0;\nflags.which=1;\nflags.mask=0;' - assert expected in script.replace(' ', '') - expected_interp = 'flags.interp = 1;\n' + expected = "\nflags.mean=0;\nflags.which=1;\nflags.mask=0;" + assert expected in script.replace(" ", "") + expected_interp = "flags.interp = 1;\n" assert expected_interp in script - assert 'spm_reslice(invols, flags);' in script + assert "spm_reslice(invols, flags);" in script def test_dicom_import(): - dicom = example_data(infile='dicomdir/123456-1-1.dcm') - di = spmu.DicomImport(matlab_cmd='mymatlab') - assert di.inputs.matlab_cmd == 'mymatlab' - assert di.inputs.output_dir_struct == 'flat' - assert di.inputs.output_dir == './converted_dicom' - assert di.inputs.format == 'nii' + dicom = example_data(infile="dicomdir/123456-1-1.dcm") + di = spmu.DicomImport(matlab_cmd="mymatlab") + assert di.inputs.matlab_cmd == "mymatlab" + assert di.inputs.output_dir_struct == "flat" + assert di.inputs.output_dir == "./converted_dicom" + assert di.inputs.format == "nii" assert not di.inputs.icedims with pytest.raises(TraitError): - di.inputs.trait_set(output_dir_struct='wrong') + di.inputs.trait_set(output_dir_struct="wrong") with pytest.raises(TraitError): - di.inputs.trait_set(format='FAT') + di.inputs.trait_set(format="FAT") with pytest.raises(TraitError): - di.inputs.trait_set(in_files=['does_sfd_not_32fn_exist.dcm']) + di.inputs.trait_set(in_files=["does_sfd_not_32fn_exist.dcm"]) di.inputs.in_files = [dicom] assert di.inputs.in_files == [dicom] diff --git a/nipype/interfaces/spm/utils.py b/nipype/interfaces/spm/utils.py index 919b5853ab..99e3d57d3b 100644 --- a/nipype/interfaces/spm/utils.py +++ b/nipype/interfaces/spm/utils.py @@ -5,12 +5,14 @@ import os import numpy as np -from ...utils.filemanip import (split_filename, fname_presuffix, - ensure_list, simplify_list) -from ..base import (TraitedSpec, isdefined, File, traits, OutputMultiPath, - InputMultiPath) -from .base import (SPMCommandInputSpec, SPMCommand, scans_for_fnames, - scans_for_fname) +from ...utils.filemanip import ( + split_filename, + fname_presuffix, + ensure_list, + simplify_list, +) +from ..base import TraitedSpec, isdefined, File, traits, OutputMultiPath, InputMultiPath +from .base import SPMCommandInputSpec, SPMCommand, scans_for_fnames, scans_for_fname class Analyze2niiInputSpec(SPMCommandInputSpec): @@ -38,28 +40,27 @@ def _make_matlab_command(self, _): def _list_outputs(self): outputs = self._outputs().get() - outputs['nifti_file'] = self.output_name + outputs["nifti_file"] = self.output_name return outputs class CalcCoregAffineInputSpec(SPMCommandInputSpec): target = File( - exists=True, - mandatory=True, - desc='target for generating affine transform') + exists=True, mandatory=True, desc="target for generating affine transform" + ) moving = File( exists=True, mandatory=True, copyfile=False, - desc=('volume transform can be applied to register with ' - 'target')) - mat = File(desc='Filename used to store affine matrix') - invmat = File(desc='Filename used to store inverse affine matrix') + desc=("volume transform can be applied to register with " "target"), + ) + mat = File(desc="Filename used to store affine matrix") + invmat = File(desc="Filename used to store inverse affine matrix") class CalcCoregAffineOutputSpec(TraitedSpec): - mat = File(exists=True, desc='Matlab file holding transform') - invmat = File(desc='Matlab file holding inverse transform') + mat = File(exists=True, desc="Matlab file holding transform") + invmat = File(desc="Matlab file holding inverse transform") class CalcCoregAffine(SPMCommand): @@ -90,14 +91,14 @@ class CalcCoregAffine(SPMCommand): def _make_inv_file(self): """ makes filename to hold inverse transform if not specified""" - invmat = fname_presuffix(self.inputs.mat, prefix='inverse_') + invmat = fname_presuffix(self.inputs.mat, prefix="inverse_") return invmat def _make_mat_file(self): """ makes name for matfile if doesn exist""" pth, mv, _ = split_filename(self.inputs.moving) _, tgt, _ = split_filename(self.inputs.target) - mat = os.path.join(pth, '%s_to_%s.mat' % (mv, tgt)) + mat = os.path.join(pth, "%s_to_%s.mat" % (mv, tgt)) return mat def _make_matlab_command(self, _): @@ -116,14 +117,18 @@ def _make_matlab_command(self, _): save('%s' , 'M' ); M = inv(M); save('%s','M') - """ % (self.inputs.target, self.inputs.moving, self.inputs.mat, - self.inputs.invmat) + """ % ( + self.inputs.target, + self.inputs.moving, + self.inputs.mat, + self.inputs.invmat, + ) return script def _list_outputs(self): outputs = self._outputs().get() - outputs['mat'] = os.path.abspath(self.inputs.mat) - outputs['invmat'] = os.path.abspath(self.inputs.invmat) + outputs["mat"] = os.path.abspath(self.inputs.mat) + outputs["invmat"] = os.path.abspath(self.inputs.invmat) return outputs @@ -132,14 +137,14 @@ class ApplyTransformInputSpec(SPMCommandInputSpec): exists=True, mandatory=True, copyfile=True, - desc='file to apply transform to, (only updates header)') - mat = File( - exists=True, mandatory=True, desc='file holding transform to apply') + desc="file to apply transform to, (only updates header)", + ) + mat = File(exists=True, mandatory=True, desc="file holding transform to apply") out_file = File(desc="output file name for transformed data", genfile=True) class ApplyTransformOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='Transformed image file') + out_file = File(exists=True, desc="Transformed image file") class ApplyTransform(SPMCommand): @@ -155,13 +160,14 @@ class ApplyTransform(SPMCommand): >>> applymat.run() # doctest: +SKIP """ + input_spec = ApplyTransformInputSpec output_spec = ApplyTransformOutputSpec def _make_matlab_command(self, _): """checks for SPM, generates script""" outputs = self._list_outputs() - self.inputs.out_file = outputs['out_file'] + self.inputs.out_file = outputs["out_file"] script = """ infile = '%s'; outfile = '%s' @@ -174,7 +180,11 @@ def _make_matlab_command(self, _): V.fname = fullfile(outfile); spm_write_vol(V,X); - """ % (self.inputs.in_file, self.inputs.out_file, self.inputs.mat) + """ % ( + self.inputs.in_file, + self.inputs.out_file, + self.inputs.mat, + ) # img_space = spm_get_space(infile); # spm_get_space(infile, transform.M * img_space); return script @@ -182,38 +192,39 @@ def _make_matlab_command(self, _): def _list_outputs(self): outputs = self.output_spec().get() if not isdefined(self.inputs.out_file): - outputs['out_file'] = os.path.abspath(self._gen_outfilename()) + outputs["out_file"] = os.path.abspath(self._gen_outfilename()) else: - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) - return name + '_trans.nii' + return name + "_trans.nii" class ResliceInputSpec(SPMCommandInputSpec): in_file = File( exists=True, mandatory=True, - desc='file to apply transform to, (only updates header)') + desc="file to apply transform to, (only updates header)", + ) space_defining = File( - exists=True, - mandatory=True, - desc='Volume defining space to slice in_file into') + exists=True, mandatory=True, desc="Volume defining space to slice in_file into" + ) interp = traits.Range( low=0, high=7, usedefault=True, - desc='degree of b-spline used for interpolation' - '0 is nearest neighbor (default)') + desc="degree of b-spline used for interpolation" + "0 is nearest neighbor (default)", + ) - out_file = File(desc='Optional file to save resliced volume') + out_file = File(desc="Optional file to save resliced volume") class ResliceOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='resliced volume') + out_file = File(exists=True, desc="resliced volume") class Reslice(SPMCommand): @@ -225,8 +236,7 @@ class Reslice(SPMCommand): def _make_matlab_command(self, _): """ generates script""" if not isdefined(self.inputs.out_file): - self.inputs.out_file = fname_presuffix( - self.inputs.in_file, prefix='r') + self.inputs.out_file = fname_presuffix(self.inputs.in_file, prefix="r") script = """ flags.mean = 0; flags.which = 1; @@ -235,13 +245,16 @@ def _make_matlab_command(self, _): infiles = strvcat(\'%s\', \'%s\'); invols = spm_vol(infiles); spm_reslice(invols, flags); - """ % (self.inputs.interp, self.inputs.space_defining, - self.inputs.in_file) + """ % ( + self.inputs.interp, + self.inputs.space_defining, + self.inputs.in_file, + ) return script def _list_outputs(self): outputs = self._outputs().get() - outputs['out_file'] = os.path.abspath(self.inputs.out_file) + outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs @@ -249,44 +262,46 @@ class ApplyInverseDeformationInput(SPMCommandInputSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, - field='fnames', - desc='Files on which deformation is applied') + field="fnames", + desc="Files on which deformation is applied", + ) target = File( - exists=True, - field='comp{1}.inv.space', - desc='File defining target space') + exists=True, field="comp{1}.inv.space", desc="File defining target space" + ) deformation = File( exists=True, - field='comp{1}.inv.comp{1}.sn2def.matname', - desc='SN SPM deformation file', - xor=['deformation_field']) + field="comp{1}.inv.comp{1}.sn2def.matname", + desc="SN SPM deformation file", + xor=["deformation_field"], + ) deformation_field = File( exists=True, - field='comp{1}.inv.comp{1}.def', - desc='SN SPM deformation file', - xor=['deformation']) + field="comp{1}.inv.comp{1}.def", + desc="SN SPM deformation file", + xor=["deformation"], + ) interpolation = traits.Range( - low=0, - high=7, - field='interp', - desc='degree of b-spline used for interpolation') + low=0, high=7, field="interp", desc="degree of b-spline used for interpolation" + ) bounding_box = traits.List( traits.Float(), - field='comp{1}.inv.comp{1}.sn2def.bb', + field="comp{1}.inv.comp{1}.sn2def.bb", minlen=6, maxlen=6, - desc='6-element list (opt)') + desc="6-element list (opt)", + ) voxel_sizes = traits.List( traits.Float(), - field='comp{1}.inv.comp{1}.sn2def.vox', + field="comp{1}.inv.comp{1}.sn2def.vox", minlen=3, maxlen=3, - desc='3-element list (opt)') + desc="3-element list (opt)", + ) class ApplyInverseDeformationOutput(TraitedSpec): - out_files = OutputMultiPath(File(exists=True), desc='Transformed files') + out_files = OutputMultiPath(File(exists=True), desc="Transformed files") class ApplyInverseDeformation(SPMCommand): @@ -307,28 +322,28 @@ class ApplyInverseDeformation(SPMCommand): input_spec = ApplyInverseDeformationInput output_spec = ApplyInverseDeformationOutput - _jobtype = 'util' - _jobname = 'defs' + _jobtype = "util" + _jobname = "defs" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt == 'in_files': + if opt == "in_files": return scans_for_fnames(ensure_list(val)) - if opt == 'target': + if opt == "target": return scans_for_fname(ensure_list(val)) - if opt == 'deformation': + if opt == "deformation": return np.array([simplify_list(val)], dtype=object) - if opt == 'deformation_field': + if opt == "deformation_field": return np.array([simplify_list(val)], dtype=object) return val def _list_outputs(self): outputs = self._outputs().get() - outputs['out_files'] = [] + outputs["out_files"] = [] for filename in self.inputs.in_files: _, fname = os.path.split(filename) - outputs['out_files'].append(os.path.realpath('w%s' % fname)) + outputs["out_files"].append(os.path.realpath("w%s" % fname)) return outputs @@ -336,34 +351,34 @@ class ResliceToReferenceInput(SPMCommandInputSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, - field='fnames', - desc='Files on which deformation is applied') + field="fnames", + desc="Files on which deformation is applied", + ) target = File( - exists=True, - field='comp{1}.id.space', - desc='File defining target space') + exists=True, field="comp{1}.id.space", desc="File defining target space" + ) interpolation = traits.Range( - low=0, - high=7, - field='interp', - desc='degree of b-spline used for interpolation') + low=0, high=7, field="interp", desc="degree of b-spline used for interpolation" + ) bounding_box = traits.List( traits.Float(), - field='comp{2}.idbbvox.bb', + field="comp{2}.idbbvox.bb", minlen=6, maxlen=6, - desc='6-element list (opt)') + desc="6-element list (opt)", + ) voxel_sizes = traits.List( traits.Float(), - field='comp{2}.idbbvox.vox', + field="comp{2}.idbbvox.vox", minlen=3, maxlen=3, - desc='3-element list (opt)') + desc="3-element list (opt)", + ) class ResliceToReferenceOutput(TraitedSpec): - out_files = OutputMultiPath(File(exists=True), desc='Transformed files') + out_files = OutputMultiPath(File(exists=True), desc="Transformed files") class ResliceToReference(SPMCommand): @@ -383,28 +398,28 @@ class ResliceToReference(SPMCommand): input_spec = ResliceToReferenceInput output_spec = ResliceToReferenceOutput - _jobtype = 'util' - _jobname = 'defs' + _jobtype = "util" + _jobname = "defs" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt == 'in_files': + if opt == "in_files": return scans_for_fnames(ensure_list(val)) - if opt == 'target': + if opt == "target": return scans_for_fname(ensure_list(val)) - if opt == 'deformation': + if opt == "deformation": return np.array([simplify_list(val)], dtype=object) - if opt == 'deformation_field': + if opt == "deformation_field": return np.array([simplify_list(val)], dtype=object) return val def _list_outputs(self): outputs = self._outputs().get() - outputs['out_files'] = [] + outputs["out_files"] = [] for filename in self.inputs.in_files: _, fname = os.path.split(filename) - outputs['out_files'].append(os.path.realpath('w%s' % fname)) + outputs["out_files"].append(os.path.realpath("w%s" % fname)) return outputs @@ -412,42 +427,42 @@ class DicomImportInputSpec(SPMCommandInputSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, - field='data', - desc='dicom files to be converted') + field="data", + desc="dicom files to be converted", + ) output_dir_struct = traits.Enum( - 'flat', - 'series', - 'patname', - 'patid_date', - 'patid', - 'date_time', - field='root', + "flat", + "series", + "patname", + "patid_date", + "patid", + "date_time", + field="root", usedefault=True, - desc='directory structure for the output.') + desc="directory structure for the output.", + ) output_dir = traits.Str( - './converted_dicom', - field='outdir', - usedefault=True, - desc='output directory.') + "./converted_dicom", field="outdir", usedefault=True, desc="output directory." + ) format = traits.Enum( - 'nii', - 'img', - field='convopts.format', - usedefault=True, - desc='output format.') + "nii", "img", field="convopts.format", usedefault=True, desc="output format." + ) icedims = traits.Bool( False, - field='convopts.icedims', + field="convopts.icedims", usedefault=True, - desc=('If image sorting fails, one can try using ' - 'the additional SIEMENS ICEDims information ' - 'to create unique filenames. Use this only if ' - 'there would be multiple volumes with exactly ' - 'the same file names.')) + desc=( + "If image sorting fails, one can try using " + "the additional SIEMENS ICEDims information " + "to create unique filenames. Use this only if " + "there would be multiple volumes with exactly " + "the same file names." + ), + ) class DicomImportOutputSpec(TraitedSpec): - out_files = OutputMultiPath(File(exists=True), desc='converted files') + out_files = OutputMultiPath(File(exists=True), desc="converted files") class DicomImport(SPMCommand): @@ -465,19 +480,19 @@ class DicomImport(SPMCommand): input_spec = DicomImportInputSpec output_spec = DicomImportOutputSpec - _jobtype = 'util' - _jobname = 'dicom' + _jobtype = "util" + _jobname = "dicom" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm """ - if opt == 'in_files': + if opt == "in_files": return np.array(val, dtype=object) - if opt == 'output_dir': + if opt == "output_dir": return np.array([val], dtype=object) - if opt == 'output_dir': + if opt == "output_dir": return os.path.abspath(val) - if opt == 'icedims': + if opt == "icedims": if val: return 1 return 0 @@ -491,21 +506,23 @@ def _run_interface(self, runtime): def _list_outputs(self): from glob import glob + outputs = self._outputs().get() od = os.path.abspath(self.inputs.output_dir) ext = self.inputs.format if self.inputs.output_dir_struct == "flat": - outputs['out_files'] = glob(os.path.join(od, '*.%s' % ext)) - elif self.inputs.output_dir_struct == 'series': - outputs['out_files'] = glob( - os.path.join(od, os.path.join('*', '*.%s' % ext))) - elif (self.inputs.output_dir_struct in [ - 'patid', 'date_time', 'patname' - ]): - outputs['out_files'] = glob( - os.path.join(od, os.path.join('*', '*', '*.%s' % ext))) - elif self.inputs.output_dir_struct == 'patid_date': - outputs['out_files'] = glob( - os.path.join(od, os.path.join('*', '*', '*', '*.%s' % ext))) + outputs["out_files"] = glob(os.path.join(od, "*.%s" % ext)) + elif self.inputs.output_dir_struct == "series": + outputs["out_files"] = glob( + os.path.join(od, os.path.join("*", "*.%s" % ext)) + ) + elif self.inputs.output_dir_struct in ["patid", "date_time", "patname"]: + outputs["out_files"] = glob( + os.path.join(od, os.path.join("*", "*", "*.%s" % ext)) + ) + elif self.inputs.output_dir_struct == "patid_date": + outputs["out_files"] = glob( + os.path.join(od, os.path.join("*", "*", "*", "*.%s" % ext)) + ) return outputs diff --git a/nipype/interfaces/tests/test_auto_BIDSDataGrabber.py b/nipype/interfaces/tests/test_auto_BIDSDataGrabber.py index 65cd8ea0c4..0263177c5e 100644 --- a/nipype/interfaces/tests/test_auto_BIDSDataGrabber.py +++ b/nipype/interfaces/tests/test_auto_BIDSDataGrabber.py @@ -4,20 +4,19 @@ def test_BIDSDataGrabber_inputs(): input_map = dict( - base_dir=dict(mandatory=True, ), + base_dir=dict(mandatory=True,), extra_derivatives=dict(), - index_derivatives=dict( - mandatory=True, - usedefault=True, - ), + index_derivatives=dict(mandatory=True, usedefault=True,), output_query=dict(), - raise_on_empty=dict(usedefault=True, ), + raise_on_empty=dict(usedefault=True,), ) inputs = BIDSDataGrabber.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_BIDSDataGrabber_outputs(): output_map = dict() outputs = BIDSDataGrabber.output_spec() diff --git a/nipype/interfaces/tests/test_auto_Bru2.py b/nipype/interfaces/tests/test_auto_Bru2.py index ae9aa850a8..5256b2732f 100644 --- a/nipype/interfaces/tests/test_auto_Bru2.py +++ b/nipype/interfaces/tests/test_auto_Bru2.py @@ -4,32 +4,24 @@ def test_Bru2_inputs(): input_map = dict( - actual_size=dict(argstr='-a', ), - append_protocol_name=dict(argstr='-p', ), - args=dict(argstr='%s', ), - compress=dict(argstr='-z', ), - environ=dict( - nohash=True, - usedefault=True, - ), - force_conversion=dict(argstr='-f', ), - input_dir=dict( - argstr='%s', - mandatory=True, - position=-1, - ), - output_filename=dict( - argstr='-o %s', - genfile=True, - ), + actual_size=dict(argstr="-a",), + append_protocol_name=dict(argstr="-p",), + args=dict(argstr="%s",), + compress=dict(argstr="-z",), + environ=dict(nohash=True, usedefault=True,), + force_conversion=dict(argstr="-f",), + input_dir=dict(argstr="%s", mandatory=True, position=-1,), + output_filename=dict(argstr="-o %s", genfile=True,), ) inputs = Bru2.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Bru2_outputs(): - output_map = dict(nii_file=dict(extensions=None, ), ) + output_map = dict(nii_file=dict(extensions=None,),) outputs = Bru2.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_C3d.py b/nipype/interfaces/tests/test_auto_C3d.py index daf1077cf4..d55216b454 100644 --- a/nipype/interfaces/tests/test_auto_C3d.py +++ b/nipype/interfaces/tests/test_auto_C3d.py @@ -4,47 +4,29 @@ def test_C3d_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - mandatory=True, - position=1, - ), - interp=dict(argstr='-interpolation %s', ), - is_4d=dict(usedefault=True, ), - multicomp_split=dict( - argstr='-mcr', - position=0, - usedefault=True, - ), - out_file=dict( - argstr='-o %s', - extensions=None, - position=-1, - xor=['out_files'], - ), - out_files=dict( - argstr='-oo %s', - position=-1, - xor=['out_file'], - ), - pix_type=dict(argstr='-type %s', ), - resample=dict(argstr='-resample %s', ), - scale=dict(argstr='-scale %s', ), - shift=dict(argstr='-shift %s', ), - smooth=dict(argstr='-smooth %s', ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", mandatory=True, position=1,), + interp=dict(argstr="-interpolation %s",), + is_4d=dict(usedefault=True,), + multicomp_split=dict(argstr="-mcr", position=0, usedefault=True,), + out_file=dict(argstr="-o %s", extensions=None, position=-1, xor=["out_files"],), + out_files=dict(argstr="-oo %s", position=-1, xor=["out_file"],), + pix_type=dict(argstr="-type %s",), + resample=dict(argstr="-resample %s",), + scale=dict(argstr="-scale %s",), + shift=dict(argstr="-shift %s",), + smooth=dict(argstr="-smooth %s",), ) inputs = C3d.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_C3d_outputs(): - output_map = dict(out_files=dict(), ) + output_map = dict(out_files=dict(),) outputs = C3d.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_C3dAffineTool.py b/nipype/interfaces/tests/test_auto_C3dAffineTool.py index dd4884557a..963d2f7931 100644 --- a/nipype/interfaces/tests/test_auto_C3dAffineTool.py +++ b/nipype/interfaces/tests/test_auto_C3dAffineTool.py @@ -4,43 +4,23 @@ def test_C3dAffineTool_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - fsl2ras=dict( - argstr='-fsl2ras', - position=4, - ), - itk_transform=dict( - argstr='-oitk %s', - hash_files=False, - position=5, - ), - reference_file=dict( - argstr='-ref %s', - extensions=None, - position=1, - ), - source_file=dict( - argstr='-src %s', - extensions=None, - position=2, - ), - transform_file=dict( - argstr='%s', - extensions=None, - position=3, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + fsl2ras=dict(argstr="-fsl2ras", position=4,), + itk_transform=dict(argstr="-oitk %s", hash_files=False, position=5,), + reference_file=dict(argstr="-ref %s", extensions=None, position=1,), + source_file=dict(argstr="-src %s", extensions=None, position=2,), + transform_file=dict(argstr="%s", extensions=None, position=3,), ) inputs = C3dAffineTool.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_C3dAffineTool_outputs(): - output_map = dict(itk_transform=dict(extensions=None, ), ) + output_map = dict(itk_transform=dict(extensions=None,),) outputs = C3dAffineTool.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_CopyMeta.py b/nipype/interfaces/tests/test_auto_CopyMeta.py index 7f13dfc927..6415514c3d 100644 --- a/nipype/interfaces/tests/test_auto_CopyMeta.py +++ b/nipype/interfaces/tests/test_auto_CopyMeta.py @@ -4,24 +4,20 @@ def test_CopyMeta_inputs(): input_map = dict( - dest_file=dict( - extensions=None, - mandatory=True, - ), + dest_file=dict(extensions=None, mandatory=True,), exclude_classes=dict(), include_classes=dict(), - src_file=dict( - extensions=None, - mandatory=True, - ), + src_file=dict(extensions=None, mandatory=True,), ) inputs = CopyMeta.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CopyMeta_outputs(): - output_map = dict(dest_file=dict(extensions=None, ), ) + output_map = dict(dest_file=dict(extensions=None,),) outputs = CopyMeta.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_DataFinder.py b/nipype/interfaces/tests/test_auto_DataFinder.py index 6ee0258218..81cdc92ac8 100644 --- a/nipype/interfaces/tests/test_auto_DataFinder.py +++ b/nipype/interfaces/tests/test_auto_DataFinder.py @@ -5,17 +5,19 @@ def test_DataFinder_inputs(): input_map = dict( ignore_regexes=dict(), - match_regex=dict(usedefault=True, ), + match_regex=dict(usedefault=True,), max_depth=dict(), min_depth=dict(), - root_paths=dict(mandatory=True, ), - unpack_single=dict(usedefault=True, ), + root_paths=dict(mandatory=True,), + unpack_single=dict(usedefault=True,), ) inputs = DataFinder.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DataFinder_outputs(): output_map = dict() outputs = DataFinder.output_spec() diff --git a/nipype/interfaces/tests/test_auto_DataGrabber.py b/nipype/interfaces/tests/test_auto_DataGrabber.py index c395eadd1a..c3817e43e7 100644 --- a/nipype/interfaces/tests/test_auto_DataGrabber.py +++ b/nipype/interfaces/tests/test_auto_DataGrabber.py @@ -5,10 +5,10 @@ def test_DataGrabber_inputs(): input_map = dict( base_directory=dict(), - drop_blank_outputs=dict(usedefault=True, ), - raise_on_empty=dict(usedefault=True, ), - sort_filelist=dict(mandatory=True, ), - template=dict(mandatory=True, ), + drop_blank_outputs=dict(usedefault=True,), + raise_on_empty=dict(usedefault=True,), + sort_filelist=dict(mandatory=True,), + template=dict(mandatory=True,), template_args=dict(), ) inputs = DataGrabber.input_spec() @@ -16,6 +16,8 @@ def test_DataGrabber_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DataGrabber_outputs(): output_map = dict() outputs = DataGrabber.output_spec() diff --git a/nipype/interfaces/tests/test_auto_DataSink.py b/nipype/interfaces/tests/test_auto_DataSink.py index 15af11755c..870dbc3c85 100644 --- a/nipype/interfaces/tests/test_auto_DataSink.py +++ b/nipype/interfaces/tests/test_auto_DataSink.py @@ -4,16 +4,16 @@ def test_DataSink_inputs(): input_map = dict( - _outputs=dict(usedefault=True, ), + _outputs=dict(usedefault=True,), base_directory=dict(), bucket=dict(), container=dict(), creds_path=dict(), encrypt_bucket_keys=dict(), local_copy=dict(), - parameterization=dict(usedefault=True, ), + parameterization=dict(usedefault=True,), regexp_substitutions=dict(), - remove_dest_dir=dict(usedefault=True, ), + remove_dest_dir=dict(usedefault=True,), strip_dir=dict(), substitutions=dict(), ) @@ -22,8 +22,10 @@ def test_DataSink_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DataSink_outputs(): - output_map = dict(out_file=dict(), ) + output_map = dict(out_file=dict(),) outputs = DataSink.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_Dcm2nii.py b/nipype/interfaces/tests/test_auto_Dcm2nii.py index 92ca835242..9aca885a64 100644 --- a/nipype/interfaces/tests/test_auto_Dcm2nii.py +++ b/nipype/interfaces/tests/test_auto_Dcm2nii.py @@ -4,88 +4,41 @@ def test_Dcm2nii_inputs(): input_map = dict( - anonymize=dict( - argstr='-a', - usedefault=True, - ), - args=dict(argstr='%s', ), - collapse_folders=dict( - argstr='-c', - usedefault=True, - ), - config_file=dict( - argstr='-b %s', - extensions=None, - genfile=True, - ), - convert_all_pars=dict( - argstr='-v', - usedefault=True, - ), - date_in_filename=dict( - argstr='-d', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - events_in_filename=dict( - argstr='-e', - usedefault=True, - ), - gzip_output=dict( - argstr='-g', - usedefault=True, - ), - id_in_filename=dict( - argstr='-i', - usedefault=True, - ), - nii_output=dict( - argstr='-n', - usedefault=True, - ), - output_dir=dict( - argstr='-o %s', - genfile=True, - ), - protocol_in_filename=dict( - argstr='-p', - usedefault=True, - ), - reorient=dict(argstr='-r', ), - reorient_and_crop=dict( - argstr='-x', - usedefault=True, - ), + anonymize=dict(argstr="-a", usedefault=True,), + args=dict(argstr="%s",), + collapse_folders=dict(argstr="-c", usedefault=True,), + config_file=dict(argstr="-b %s", extensions=None, genfile=True,), + convert_all_pars=dict(argstr="-v", usedefault=True,), + date_in_filename=dict(argstr="-d", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + events_in_filename=dict(argstr="-e", usedefault=True,), + gzip_output=dict(argstr="-g", usedefault=True,), + id_in_filename=dict(argstr="-i", usedefault=True,), + nii_output=dict(argstr="-n", usedefault=True,), + output_dir=dict(argstr="-o %s", genfile=True,), + protocol_in_filename=dict(argstr="-p", usedefault=True,), + reorient=dict(argstr="-r",), + reorient_and_crop=dict(argstr="-x", usedefault=True,), source_dir=dict( - argstr='%s', - mandatory=True, - position=-1, - xor=['source_names'], - ), - source_in_filename=dict( - argstr='-f', - usedefault=True, + argstr="%s", mandatory=True, position=-1, xor=["source_names"], ), + source_in_filename=dict(argstr="-f", usedefault=True,), source_names=dict( - argstr='%s', + argstr="%s", copyfile=False, mandatory=True, position=-1, - xor=['source_dir'], - ), - spm_analyze=dict( - argstr='-s', - xor=['nii_output'], + xor=["source_dir"], ), + spm_analyze=dict(argstr="-s", xor=["nii_output"],), ) inputs = Dcm2nii.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Dcm2nii_outputs(): output_map = dict( bvals=dict(), diff --git a/nipype/interfaces/tests/test_auto_Dcm2niix.py b/nipype/interfaces/tests/test_auto_Dcm2niix.py index 04ca6015ea..dfaa46d36a 100644 --- a/nipype/interfaces/tests/test_auto_Dcm2niix.py +++ b/nipype/interfaces/tests/test_auto_Dcm2niix.py @@ -4,80 +4,44 @@ def test_Dcm2niix_inputs(): input_map = dict( - anon_bids=dict( - argstr='-ba', - requires=['bids_format'], - ), - args=dict(argstr='%s', ), - bids_format=dict( - argstr='-b', - usedefault=True, - ), - comment=dict(argstr='-c %s', ), - compress=dict( - argstr='-z %s', - usedefault=True, - ), - compression=dict(argstr='-%d', ), - crop=dict( - argstr='-x', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - has_private=dict( - argstr='-t', - usedefault=True, - ), - ignore_deriv=dict(argstr='-i', ), - merge_imgs=dict( - argstr='-m', - usedefault=True, - ), - out_filename=dict(argstr='-f %s', ), - output_dir=dict( - argstr='-o %s', - usedefault=True, - ), - philips_float=dict(argstr='-p', ), - series_numbers=dict(argstr='-n %s...', ), - single_file=dict( - argstr='-s', - usedefault=True, - ), + anon_bids=dict(argstr="-ba", requires=["bids_format"],), + args=dict(argstr="%s",), + bids_format=dict(argstr="-b", usedefault=True,), + comment=dict(argstr="-c %s",), + compress=dict(argstr="-z %s", usedefault=True,), + compression=dict(argstr="-%d",), + crop=dict(argstr="-x", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + has_private=dict(argstr="-t", usedefault=True,), + ignore_deriv=dict(argstr="-i",), + merge_imgs=dict(argstr="-m", usedefault=True,), + out_filename=dict(argstr="-f %s",), + output_dir=dict(argstr="-o %s", usedefault=True,), + philips_float=dict(argstr="-p",), + series_numbers=dict(argstr="-n %s...",), + single_file=dict(argstr="-s", usedefault=True,), source_dir=dict( - argstr='%s', - mandatory=True, - position=-1, - xor=['source_names'], + argstr="%s", mandatory=True, position=-1, xor=["source_names"], ), source_names=dict( - argstr='%s', + argstr="%s", copyfile=False, mandatory=True, position=-1, - xor=['source_dir'], - ), - to_nrrd=dict(argstr='-e', ), - verbose=dict( - argstr='-v', - usedefault=True, + xor=["source_dir"], ), + to_nrrd=dict(argstr="-e",), + verbose=dict(argstr="-v", usedefault=True,), ) inputs = Dcm2niix.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Dcm2niix_outputs(): - output_map = dict( - bids=dict(), - bvals=dict(), - bvecs=dict(), - converted_files=dict(), - ) + output_map = dict(bids=dict(), bvals=dict(), bvecs=dict(), converted_files=dict(),) outputs = Dcm2niix.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_DcmStack.py b/nipype/interfaces/tests/test_auto_DcmStack.py index bffc1706ad..af0ff91495 100644 --- a/nipype/interfaces/tests/test_auto_DcmStack.py +++ b/nipype/interfaces/tests/test_auto_DcmStack.py @@ -4,12 +4,12 @@ def test_DcmStack_inputs(): input_map = dict( - dicom_files=dict(mandatory=True, ), + dicom_files=dict(mandatory=True,), embed_meta=dict(), exclude_regexes=dict(), - force_read=dict(usedefault=True, ), + force_read=dict(usedefault=True,), include_regexes=dict(), - out_ext=dict(usedefault=True, ), + out_ext=dict(usedefault=True,), out_format=dict(), out_path=dict(), ) @@ -18,8 +18,10 @@ def test_DcmStack_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_DcmStack_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = DcmStack.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_ExportFile.py b/nipype/interfaces/tests/test_auto_ExportFile.py index d7aa25ddd9..8dd84b29b9 100644 --- a/nipype/interfaces/tests/test_auto_ExportFile.py +++ b/nipype/interfaces/tests/test_auto_ExportFile.py @@ -6,22 +6,18 @@ def test_ExportFile_inputs(): input_map = dict( check_extension=dict(), clobber=dict(), - in_file=dict( - extensions=None, - mandatory=True, - ), - out_file=dict( - extensions=None, - mandatory=True, - ), + in_file=dict(extensions=None, mandatory=True,), + out_file=dict(extensions=None, mandatory=True,), ) inputs = ExportFile.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_ExportFile_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = ExportFile.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_FreeSurferSource.py b/nipype/interfaces/tests/test_auto_FreeSurferSource.py index 26511575ae..c7102512ae 100644 --- a/nipype/interfaces/tests/test_auto_FreeSurferSource.py +++ b/nipype/interfaces/tests/test_auto_FreeSurferSource.py @@ -4,132 +4,56 @@ def test_FreeSurferSource_inputs(): input_map = dict( - hemi=dict(usedefault=True, ), - subject_id=dict(mandatory=True, ), - subjects_dir=dict(mandatory=True, ), + hemi=dict(usedefault=True,), + subject_id=dict(mandatory=True,), + subjects_dir=dict(mandatory=True,), ) inputs = FreeSurferSource.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_FreeSurferSource_outputs(): output_map = dict( - BA_stats=dict( - altkey='BA', - loc='stats', - ), - T1=dict( - extensions=None, - loc='mri', - ), - annot=dict( - altkey='*annot', - loc='label', - ), - aparc_a2009s_stats=dict( - altkey='aparc.a2009s', - loc='stats', - ), - aparc_aseg=dict( - altkey='aparc*aseg', - loc='mri', - ), - aparc_stats=dict( - altkey='aparc', - loc='stats', - ), - area_pial=dict( - altkey='area.pial', - loc='surf', - ), - aseg=dict( - extensions=None, - loc='mri', - ), - aseg_stats=dict( - altkey='aseg', - loc='stats', - ), - avg_curv=dict(loc='surf', ), - brain=dict( - extensions=None, - loc='mri', - ), - brainmask=dict( - extensions=None, - loc='mri', - ), - curv=dict(loc='surf', ), - curv_pial=dict( - altkey='curv.pial', - loc='surf', - ), - curv_stats=dict( - altkey='curv', - loc='stats', - ), - entorhinal_exvivo_stats=dict( - altkey='entorhinal_exvivo', - loc='stats', - ), - filled=dict( - extensions=None, - loc='mri', - ), - graymid=dict( - altkey=['graymid', 'midthickness'], - loc='surf', - ), - inflated=dict(loc='surf', ), - jacobian_white=dict(loc='surf', ), - label=dict( - altkey='*label', - loc='label', - ), - norm=dict( - extensions=None, - loc='mri', - ), - nu=dict( - extensions=None, - loc='mri', - ), - orig=dict( - extensions=None, - loc='mri', - ), - pial=dict(loc='surf', ), - rawavg=dict( - extensions=None, - loc='mri', - ), - ribbon=dict( - altkey='*ribbon', - loc='mri', - ), - smoothwm=dict(loc='surf', ), - sphere=dict(loc='surf', ), - sphere_reg=dict( - altkey='sphere.reg', - loc='surf', - ), - sulc=dict(loc='surf', ), - thickness=dict(loc='surf', ), - volume=dict(loc='surf', ), - white=dict(loc='surf', ), - wm=dict( - extensions=None, - loc='mri', - ), - wmparc=dict( - extensions=None, - loc='mri', - ), - wmparc_stats=dict( - altkey='wmparc', - loc='stats', - ), + BA_stats=dict(altkey="BA", loc="stats",), + T1=dict(extensions=None, loc="mri",), + annot=dict(altkey="*annot", loc="label",), + aparc_a2009s_stats=dict(altkey="aparc.a2009s", loc="stats",), + aparc_aseg=dict(altkey="aparc*aseg", loc="mri",), + aparc_stats=dict(altkey="aparc", loc="stats",), + area_pial=dict(altkey="area.pial", loc="surf",), + aseg=dict(extensions=None, loc="mri",), + aseg_stats=dict(altkey="aseg", loc="stats",), + avg_curv=dict(loc="surf",), + brain=dict(extensions=None, loc="mri",), + brainmask=dict(extensions=None, loc="mri",), + curv=dict(loc="surf",), + curv_pial=dict(altkey="curv.pial", loc="surf",), + curv_stats=dict(altkey="curv", loc="stats",), + entorhinal_exvivo_stats=dict(altkey="entorhinal_exvivo", loc="stats",), + filled=dict(extensions=None, loc="mri",), + graymid=dict(altkey=["graymid", "midthickness"], loc="surf",), + inflated=dict(loc="surf",), + jacobian_white=dict(loc="surf",), + label=dict(altkey="*label", loc="label",), + norm=dict(extensions=None, loc="mri",), + nu=dict(extensions=None, loc="mri",), + orig=dict(extensions=None, loc="mri",), + pial=dict(loc="surf",), + rawavg=dict(extensions=None, loc="mri",), + ribbon=dict(altkey="*ribbon", loc="mri",), + smoothwm=dict(loc="surf",), + sphere=dict(loc="surf",), + sphere_reg=dict(altkey="sphere.reg", loc="surf",), + sulc=dict(loc="surf",), + thickness=dict(loc="surf",), + volume=dict(loc="surf",), + white=dict(loc="surf",), + wm=dict(extensions=None, loc="mri",), + wmparc=dict(extensions=None, loc="mri",), + wmparc_stats=dict(altkey="wmparc", loc="stats",), ) outputs = FreeSurferSource.output_spec() diff --git a/nipype/interfaces/tests/test_auto_GroupAndStack.py b/nipype/interfaces/tests/test_auto_GroupAndStack.py index a566467007..f330efde20 100644 --- a/nipype/interfaces/tests/test_auto_GroupAndStack.py +++ b/nipype/interfaces/tests/test_auto_GroupAndStack.py @@ -4,12 +4,12 @@ def test_GroupAndStack_inputs(): input_map = dict( - dicom_files=dict(mandatory=True, ), + dicom_files=dict(mandatory=True,), embed_meta=dict(), exclude_regexes=dict(), - force_read=dict(usedefault=True, ), + force_read=dict(usedefault=True,), include_regexes=dict(), - out_ext=dict(usedefault=True, ), + out_ext=dict(usedefault=True,), out_format=dict(), out_path=dict(), ) @@ -18,8 +18,10 @@ def test_GroupAndStack_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_GroupAndStack_outputs(): - output_map = dict(out_list=dict(), ) + output_map = dict(out_list=dict(),) outputs = GroupAndStack.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_JSONFileGrabber.py b/nipype/interfaces/tests/test_auto_JSONFileGrabber.py index 61aea81044..c01a584949 100644 --- a/nipype/interfaces/tests/test_auto_JSONFileGrabber.py +++ b/nipype/interfaces/tests/test_auto_JSONFileGrabber.py @@ -3,15 +3,14 @@ def test_JSONFileGrabber_inputs(): - input_map = dict( - defaults=dict(), - in_file=dict(extensions=None, ), - ) + input_map = dict(defaults=dict(), in_file=dict(extensions=None,),) inputs = JSONFileGrabber.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JSONFileGrabber_outputs(): output_map = dict() outputs = JSONFileGrabber.output_spec() diff --git a/nipype/interfaces/tests/test_auto_JSONFileSink.py b/nipype/interfaces/tests/test_auto_JSONFileSink.py index d28df1cb66..c88faba852 100644 --- a/nipype/interfaces/tests/test_auto_JSONFileSink.py +++ b/nipype/interfaces/tests/test_auto_JSONFileSink.py @@ -4,17 +4,19 @@ def test_JSONFileSink_inputs(): input_map = dict( - _outputs=dict(usedefault=True, ), - in_dict=dict(usedefault=True, ), - out_file=dict(extensions=None, ), + _outputs=dict(usedefault=True,), + in_dict=dict(usedefault=True,), + out_file=dict(extensions=None,), ) inputs = JSONFileSink.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_JSONFileSink_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = JSONFileSink.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_LookupMeta.py b/nipype/interfaces/tests/test_auto_LookupMeta.py index 8b5424ab6a..fa0129798e 100644 --- a/nipype/interfaces/tests/test_auto_LookupMeta.py +++ b/nipype/interfaces/tests/test_auto_LookupMeta.py @@ -4,17 +4,15 @@ def test_LookupMeta_inputs(): input_map = dict( - in_file=dict( - extensions=None, - mandatory=True, - ), - meta_keys=dict(mandatory=True, ), + in_file=dict(extensions=None, mandatory=True,), meta_keys=dict(mandatory=True,), ) inputs = LookupMeta.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_LookupMeta_outputs(): output_map = dict() outputs = LookupMeta.output_spec() diff --git a/nipype/interfaces/tests/test_auto_MatlabCommand.py b/nipype/interfaces/tests/test_auto_MatlabCommand.py index 7e04090c78..0dea244ec8 100644 --- a/nipype/interfaces/tests/test_auto_MatlabCommand.py +++ b/nipype/interfaces/tests/test_auto_MatlabCommand.py @@ -4,45 +4,20 @@ def test_MatlabCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - logfile=dict( - argstr='-logfile %s', - extensions=None, - ), - mfile=dict(usedefault=True, ), - nodesktop=dict( - argstr='-nodesktop', - nohash=True, - usedefault=True, - ), - nosplash=dict( - argstr='-nosplash', - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + logfile=dict(argstr="-logfile %s", extensions=None,), + mfile=dict(usedefault=True,), + nodesktop=dict(argstr="-nodesktop", nohash=True, usedefault=True,), + nosplash=dict(argstr="-nosplash", nohash=True, usedefault=True,), paths=dict(), - postscript=dict(usedefault=True, ), - prescript=dict(usedefault=True, ), - script=dict( - argstr='-r "%s;exit"', - mandatory=True, - position=-1, - ), - script_file=dict( - extensions=None, - usedefault=True, - ), - single_comp_thread=dict( - argstr='-singleCompThread', - nohash=True, - ), + postscript=dict(usedefault=True,), + prescript=dict(usedefault=True,), + script=dict(argstr='-r "%s;exit"', mandatory=True, position=-1,), + script_file=dict(extensions=None, usedefault=True,), + single_comp_thread=dict(argstr="-singleCompThread", nohash=True,), uses_mcr=dict( - nohash=True, - xor=['nodesktop', 'nosplash', 'single_comp_thread'], + nohash=True, xor=["nodesktop", "nosplash", "single_comp_thread"], ), ) inputs = MatlabCommand.input_spec() diff --git a/nipype/interfaces/tests/test_auto_MergeNifti.py b/nipype/interfaces/tests/test_auto_MergeNifti.py index 4898fc7fe7..38c0f39f72 100644 --- a/nipype/interfaces/tests/test_auto_MergeNifti.py +++ b/nipype/interfaces/tests/test_auto_MergeNifti.py @@ -4,9 +4,9 @@ def test_MergeNifti_inputs(): input_map = dict( - in_files=dict(mandatory=True, ), + in_files=dict(mandatory=True,), merge_dim=dict(), - out_ext=dict(usedefault=True, ), + out_ext=dict(usedefault=True,), out_format=dict(), out_path=dict(), sort_order=dict(), @@ -16,8 +16,10 @@ def test_MergeNifti_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MergeNifti_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = MergeNifti.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_MeshFix.py b/nipype/interfaces/tests/test_auto_MeshFix.py index 557396f2fd..9a69fc0859 100644 --- a/nipype/interfaces/tests/test_auto_MeshFix.py +++ b/nipype/interfaces/tests/test_auto_MeshFix.py @@ -4,99 +4,72 @@ def test_MeshFix_inputs(): input_map = dict( - args=dict(argstr='%s', ), - cut_inner=dict(argstr='--cut-inner %d', ), - cut_outer=dict(argstr='--cut-outer %d', ), - decouple_inin=dict(argstr='--decouple-inin %d', ), - decouple_outin=dict(argstr='--decouple-outin %d', ), - decouple_outout=dict(argstr='--decouple-outout %d', ), - dilation=dict(argstr='--dilate %d', ), - dont_clean=dict(argstr='--no-clean', ), - environ=dict( - nohash=True, - usedefault=True, - ), - epsilon_angle=dict(argstr='-a %f', ), + args=dict(argstr="%s",), + cut_inner=dict(argstr="--cut-inner %d",), + cut_outer=dict(argstr="--cut-outer %d",), + decouple_inin=dict(argstr="--decouple-inin %d",), + decouple_outin=dict(argstr="--decouple-outin %d",), + decouple_outout=dict(argstr="--decouple-outout %d",), + dilation=dict(argstr="--dilate %d",), + dont_clean=dict(argstr="--no-clean",), + environ=dict(nohash=True, usedefault=True,), + epsilon_angle=dict(argstr="-a %f",), finetuning_distance=dict( - argstr='%f', - position=-2, - requires=['finetuning_substeps'], + argstr="%f", position=-2, requires=["finetuning_substeps"], ), finetuning_inwards=dict( - argstr='--fineTuneIn ', + argstr="--fineTuneIn ", position=-3, - requires=['finetuning_distance', 'finetuning_substeps'], + requires=["finetuning_distance", "finetuning_substeps"], ), finetuning_outwards=dict( - argstr='--fineTuneOut ', + argstr="--fineTuneOut ", position=-3, - requires=['finetuning_distance', 'finetuning_substeps'], - xor=['finetuning_inwards'], + requires=["finetuning_distance", "finetuning_substeps"], + xor=["finetuning_inwards"], ), finetuning_substeps=dict( - argstr='%d', - position=-1, - requires=['finetuning_distance'], - ), - in_file1=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), - in_file2=dict( - argstr='%s', - extensions=None, - position=2, - ), - join_closest_components=dict( - argstr='-jc', - xor=['join_closest_components'], + argstr="%d", position=-1, requires=["finetuning_distance"], ), + in_file1=dict(argstr="%s", extensions=None, mandatory=True, position=1,), + in_file2=dict(argstr="%s", extensions=None, position=2,), + join_closest_components=dict(argstr="-jc", xor=["join_closest_components"],), join_overlapping_largest_components=dict( - argstr='-j', - xor=['join_closest_components'], + argstr="-j", xor=["join_closest_components"], ), - laplacian_smoothing_steps=dict(argstr='--smooth %d', ), - number_of_biggest_shells=dict(argstr='--shells %d', ), - out_filename=dict( - argstr='-o %s', - extensions=None, - genfile=True, - ), - output_type=dict(usedefault=True, ), - quiet_mode=dict(argstr='-q', ), - remove_handles=dict(argstr='--remove-handles', ), + laplacian_smoothing_steps=dict(argstr="--smooth %d",), + number_of_biggest_shells=dict(argstr="--shells %d",), + out_filename=dict(argstr="-o %s", extensions=None, genfile=True,), + output_type=dict(usedefault=True,), + quiet_mode=dict(argstr="-q",), + remove_handles=dict(argstr="--remove-handles",), save_as_freesurfer_mesh=dict( - argstr='--fsmesh', - xor=['save_as_vrml', 'save_as_stl'], + argstr="--fsmesh", xor=["save_as_vrml", "save_as_stl"], ), save_as_stl=dict( - argstr='--stl', - xor=['save_as_vrml', 'save_as_freesurfer_mesh'], + argstr="--stl", xor=["save_as_vrml", "save_as_freesurfer_mesh"], ), save_as_vrml=dict( - argstr='--wrl', - xor=['save_as_stl', 'save_as_freesurfer_mesh'], + argstr="--wrl", xor=["save_as_stl", "save_as_freesurfer_mesh"], ), - set_intersections_to_one=dict(argstr='--intersect', ), + set_intersections_to_one=dict(argstr="--intersect",), uniform_remeshing_steps=dict( - argstr='-u %d', - requires=['uniform_remeshing_vertices'], + argstr="-u %d", requires=["uniform_remeshing_vertices"], ), uniform_remeshing_vertices=dict( - argstr='--vertices %d', - requires=['uniform_remeshing_steps'], + argstr="--vertices %d", requires=["uniform_remeshing_steps"], ), - x_shift=dict(argstr='--smooth %d', ), + x_shift=dict(argstr="--smooth %d",), ) inputs = MeshFix.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MeshFix_outputs(): - output_map = dict(mesh_file=dict(extensions=None, ), ) + output_map = dict(mesh_file=dict(extensions=None,),) outputs = MeshFix.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_MySQLSink.py b/nipype/interfaces/tests/test_auto_MySQLSink.py index 30899607a8..30bf18ef26 100644 --- a/nipype/interfaces/tests/test_auto_MySQLSink.py +++ b/nipype/interfaces/tests/test_auto_MySQLSink.py @@ -4,20 +4,16 @@ def test_MySQLSink_inputs(): input_map = dict( - config=dict( - extensions=None, - mandatory=True, - xor=['host'], - ), - database_name=dict(mandatory=True, ), + config=dict(extensions=None, mandatory=True, xor=["host"],), + database_name=dict(mandatory=True,), host=dict( mandatory=True, - requires=['username', 'password'], + requires=["username", "password"], usedefault=True, - xor=['config'], + xor=["config"], ), password=dict(), - table_name=dict(mandatory=True, ), + table_name=dict(mandatory=True,), username=dict(), ) inputs = MySQLSink.input_spec() diff --git a/nipype/interfaces/tests/test_auto_PETPVC.py b/nipype/interfaces/tests/test_auto_PETPVC.py index e5b116d932..49ba858e4a 100644 --- a/nipype/interfaces/tests/test_auto_PETPVC.py +++ b/nipype/interfaces/tests/test_auto_PETPVC.py @@ -4,71 +4,30 @@ def test_PETPVC_inputs(): input_map = dict( - alpha=dict( - argstr='-a %.4f', - usedefault=True, - ), - args=dict(argstr='%s', ), - debug=dict( - argstr='-d', - usedefault=True, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fwhm_x=dict( - argstr='-x %.4f', - mandatory=True, - ), - fwhm_y=dict( - argstr='-y %.4f', - mandatory=True, - ), - fwhm_z=dict( - argstr='-z %.4f', - mandatory=True, - ), - in_file=dict( - argstr='-i %s', - extensions=None, - mandatory=True, - ), - mask_file=dict( - argstr='-m %s', - extensions=None, - mandatory=True, - ), - n_deconv=dict( - argstr='-k %d', - usedefault=True, - ), - n_iter=dict( - argstr='-n %d', - usedefault=True, - ), - out_file=dict( - argstr='-o %s', - extensions=None, - genfile=True, - hash_files=False, - ), - pvc=dict( - argstr='-p %s', - mandatory=True, - ), - stop_crit=dict( - argstr='-s %.4f', - usedefault=True, - ), + alpha=dict(argstr="-a %.4f", usedefault=True,), + args=dict(argstr="%s",), + debug=dict(argstr="-d", usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + fwhm_x=dict(argstr="-x %.4f", mandatory=True,), + fwhm_y=dict(argstr="-y %.4f", mandatory=True,), + fwhm_z=dict(argstr="-z %.4f", mandatory=True,), + in_file=dict(argstr="-i %s", extensions=None, mandatory=True,), + mask_file=dict(argstr="-m %s", extensions=None, mandatory=True,), + n_deconv=dict(argstr="-k %d", usedefault=True,), + n_iter=dict(argstr="-n %d", usedefault=True,), + out_file=dict(argstr="-o %s", extensions=None, genfile=True, hash_files=False,), + pvc=dict(argstr="-p %s", mandatory=True,), + stop_crit=dict(argstr="-s %.4f", usedefault=True,), ) inputs = PETPVC.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_PETPVC_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = PETPVC.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_Quickshear.py b/nipype/interfaces/tests/test_auto_Quickshear.py index 43ccd757c1..06b829af9a 100644 --- a/nipype/interfaces/tests/test_auto_Quickshear.py +++ b/nipype/interfaces/tests/test_auto_Quickshear.py @@ -4,33 +4,17 @@ def test_Quickshear_inputs(): input_map = dict( - args=dict(argstr='%s', ), - buff=dict( - argstr='%d', - position=4, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), - mask_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), + args=dict(argstr="%s",), + buff=dict(argstr="%d", position=4,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=1,), + mask_file=dict(argstr="%s", extensions=None, mandatory=True, position=2,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, keep_extension=True, - name_source='in_file', - name_template='%s_defaced', + name_source="in_file", + name_template="%s_defaced", position=3, ), ) @@ -39,8 +23,10 @@ def test_Quickshear_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Quickshear_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Quickshear.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_Reorient.py b/nipype/interfaces/tests/test_auto_Reorient.py index d6774a483c..bb4660a05c 100644 --- a/nipype/interfaces/tests/test_auto_Reorient.py +++ b/nipype/interfaces/tests/test_auto_Reorient.py @@ -4,21 +4,19 @@ def test_Reorient_inputs(): input_map = dict( - in_file=dict( - extensions=None, - mandatory=True, - ), - orientation=dict(usedefault=True, ), + in_file=dict(extensions=None, mandatory=True,), + orientation=dict(usedefault=True,), ) inputs = Reorient.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Reorient_outputs(): output_map = dict( - out_file=dict(extensions=None, ), - transform=dict(extensions=None, ), + out_file=dict(extensions=None,), transform=dict(extensions=None,), ) outputs = Reorient.output_spec() diff --git a/nipype/interfaces/tests/test_auto_Rescale.py b/nipype/interfaces/tests/test_auto_Rescale.py index e3e2bf151b..d8c83c24ca 100644 --- a/nipype/interfaces/tests/test_auto_Rescale.py +++ b/nipype/interfaces/tests/test_auto_Rescale.py @@ -4,24 +4,20 @@ def test_Rescale_inputs(): input_map = dict( - in_file=dict( - extensions=None, - mandatory=True, - ), + in_file=dict(extensions=None, mandatory=True,), invert=dict(), - percentile=dict(usedefault=True, ), - ref_file=dict( - extensions=None, - mandatory=True, - ), + percentile=dict(usedefault=True,), + ref_file=dict(extensions=None, mandatory=True,), ) inputs = Rescale.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Rescale_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Rescale.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_S3DataGrabber.py b/nipype/interfaces/tests/test_auto_S3DataGrabber.py index ff13619248..96b32701e2 100644 --- a/nipype/interfaces/tests/test_auto_S3DataGrabber.py +++ b/nipype/interfaces/tests/test_auto_S3DataGrabber.py @@ -4,14 +4,14 @@ def test_S3DataGrabber_inputs(): input_map = dict( - anon=dict(usedefault=True, ), - bucket=dict(mandatory=True, ), - bucket_path=dict(usedefault=True, ), + anon=dict(usedefault=True,), + bucket=dict(mandatory=True,), + bucket_path=dict(usedefault=True,), local_directory=dict(), - raise_on_empty=dict(usedefault=True, ), - region=dict(usedefault=True, ), - sort_filelist=dict(mandatory=True, ), - template=dict(mandatory=True, ), + raise_on_empty=dict(usedefault=True,), + region=dict(usedefault=True,), + sort_filelist=dict(mandatory=True,), + template=dict(mandatory=True,), template_args=dict(), ) inputs = S3DataGrabber.input_spec() @@ -19,6 +19,8 @@ def test_S3DataGrabber_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_S3DataGrabber_outputs(): output_map = dict() outputs = S3DataGrabber.output_spec() diff --git a/nipype/interfaces/tests/test_auto_SQLiteSink.py b/nipype/interfaces/tests/test_auto_SQLiteSink.py index 353e01e985..f1a62ead15 100644 --- a/nipype/interfaces/tests/test_auto_SQLiteSink.py +++ b/nipype/interfaces/tests/test_auto_SQLiteSink.py @@ -4,11 +4,8 @@ def test_SQLiteSink_inputs(): input_map = dict( - database_file=dict( - extensions=None, - mandatory=True, - ), - table_name=dict(mandatory=True, ), + database_file=dict(extensions=None, mandatory=True,), + table_name=dict(mandatory=True,), ) inputs = SQLiteSink.input_spec() diff --git a/nipype/interfaces/tests/test_auto_SSHDataGrabber.py b/nipype/interfaces/tests/test_auto_SSHDataGrabber.py index e3755c8a75..7a759cb07f 100644 --- a/nipype/interfaces/tests/test_auto_SSHDataGrabber.py +++ b/nipype/interfaces/tests/test_auto_SSHDataGrabber.py @@ -4,17 +4,17 @@ def test_SSHDataGrabber_inputs(): input_map = dict( - base_directory=dict(mandatory=True, ), - download_files=dict(usedefault=True, ), - drop_blank_outputs=dict(usedefault=True, ), - hostname=dict(mandatory=True, ), + base_directory=dict(mandatory=True,), + download_files=dict(usedefault=True,), + drop_blank_outputs=dict(usedefault=True,), + hostname=dict(mandatory=True,), password=dict(), - raise_on_empty=dict(usedefault=True, ), - sort_filelist=dict(mandatory=True, ), - ssh_log_to_file=dict(usedefault=True, ), - template=dict(mandatory=True, ), + raise_on_empty=dict(usedefault=True,), + sort_filelist=dict(mandatory=True,), + ssh_log_to_file=dict(usedefault=True,), + template=dict(mandatory=True,), template_args=dict(), - template_expression=dict(usedefault=True, ), + template_expression=dict(usedefault=True,), username=dict(), ) inputs = SSHDataGrabber.input_spec() @@ -22,6 +22,8 @@ def test_SSHDataGrabber_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SSHDataGrabber_outputs(): output_map = dict() outputs = SSHDataGrabber.output_spec() diff --git a/nipype/interfaces/tests/test_auto_SelectFiles.py b/nipype/interfaces/tests/test_auto_SelectFiles.py index ed6c276ddf..722c431b8f 100644 --- a/nipype/interfaces/tests/test_auto_SelectFiles.py +++ b/nipype/interfaces/tests/test_auto_SelectFiles.py @@ -5,15 +5,17 @@ def test_SelectFiles_inputs(): input_map = dict( base_directory=dict(), - force_lists=dict(usedefault=True, ), - raise_on_empty=dict(usedefault=True, ), - sort_filelist=dict(usedefault=True, ), + force_lists=dict(usedefault=True,), + raise_on_empty=dict(usedefault=True,), + sort_filelist=dict(usedefault=True,), ) inputs = SelectFiles.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SelectFiles_outputs(): output_map = dict() outputs = SelectFiles.output_spec() diff --git a/nipype/interfaces/tests/test_auto_SignalExtraction.py b/nipype/interfaces/tests/test_auto_SignalExtraction.py index 3ca02656b1..2af759b50a 100644 --- a/nipype/interfaces/tests/test_auto_SignalExtraction.py +++ b/nipype/interfaces/tests/test_auto_SignalExtraction.py @@ -4,27 +4,23 @@ def test_SignalExtraction_inputs(): input_map = dict( - class_labels=dict(mandatory=True, ), - detrend=dict(usedefault=True, ), - in_file=dict( - extensions=None, - mandatory=True, - ), - incl_shared_variance=dict(usedefault=True, ), - include_global=dict(usedefault=True, ), - label_files=dict(mandatory=True, ), - out_file=dict( - extensions=None, - usedefault=True, - ), + class_labels=dict(mandatory=True,), + detrend=dict(usedefault=True,), + in_file=dict(extensions=None, mandatory=True,), + incl_shared_variance=dict(usedefault=True,), + include_global=dict(usedefault=True,), + label_files=dict(mandatory=True,), + out_file=dict(extensions=None, usedefault=True,), ) inputs = SignalExtraction.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SignalExtraction_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = SignalExtraction.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_SlicerCommandLine.py b/nipype/interfaces/tests/test_auto_SlicerCommandLine.py index 43ae697eec..99bd82a8f3 100644 --- a/nipype/interfaces/tests/test_auto_SlicerCommandLine.py +++ b/nipype/interfaces/tests/test_auto_SlicerCommandLine.py @@ -4,11 +4,8 @@ def test_SlicerCommandLine_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), module=dict(), ) inputs = SlicerCommandLine.input_spec() @@ -16,6 +13,8 @@ def test_SlicerCommandLine_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SlicerCommandLine_outputs(): output_map = dict() outputs = SlicerCommandLine.output_spec() diff --git a/nipype/interfaces/tests/test_auto_SplitNifti.py b/nipype/interfaces/tests/test_auto_SplitNifti.py index 06dd71feba..219f378a82 100644 --- a/nipype/interfaces/tests/test_auto_SplitNifti.py +++ b/nipype/interfaces/tests/test_auto_SplitNifti.py @@ -4,11 +4,8 @@ def test_SplitNifti_inputs(): input_map = dict( - in_file=dict( - extensions=None, - mandatory=True, - ), - out_ext=dict(usedefault=True, ), + in_file=dict(extensions=None, mandatory=True,), + out_ext=dict(usedefault=True,), out_format=dict(), out_path=dict(), split_dim=dict(), @@ -18,8 +15,10 @@ def test_SplitNifti_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_SplitNifti_outputs(): - output_map = dict(out_list=dict(), ) + output_map = dict(out_list=dict(),) outputs = SplitNifti.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/tests/test_auto_XNATSink.py b/nipype/interfaces/tests/test_auto_XNATSink.py index 0206281382..d22e2a1a63 100644 --- a/nipype/interfaces/tests/test_auto_XNATSink.py +++ b/nipype/interfaces/tests/test_auto_XNATSink.py @@ -4,25 +4,17 @@ def test_XNATSink_inputs(): input_map = dict( - _outputs=dict(usedefault=True, ), - assessor_id=dict(xor=['reconstruction_id'], ), + _outputs=dict(usedefault=True,), + assessor_id=dict(xor=["reconstruction_id"],), cache_dir=dict(), - config=dict( - extensions=None, - mandatory=True, - xor=['server'], - ), - experiment_id=dict(mandatory=True, ), - project_id=dict(mandatory=True, ), + config=dict(extensions=None, mandatory=True, xor=["server"],), + experiment_id=dict(mandatory=True,), + project_id=dict(mandatory=True,), pwd=dict(), - reconstruction_id=dict(xor=['assessor_id'], ), - server=dict( - mandatory=True, - requires=['user', 'pwd'], - xor=['config'], - ), - share=dict(usedefault=True, ), - subject_id=dict(mandatory=True, ), + reconstruction_id=dict(xor=["assessor_id"],), + server=dict(mandatory=True, requires=["user", "pwd"], xor=["config"],), + share=dict(usedefault=True,), + subject_id=dict(mandatory=True,), user=dict(), ) inputs = XNATSink.input_spec() diff --git a/nipype/interfaces/tests/test_auto_XNATSource.py b/nipype/interfaces/tests/test_auto_XNATSource.py index c26553e390..a60af06b6a 100644 --- a/nipype/interfaces/tests/test_auto_XNATSource.py +++ b/nipype/interfaces/tests/test_auto_XNATSource.py @@ -5,19 +5,11 @@ def test_XNATSource_inputs(): input_map = dict( cache_dir=dict(), - config=dict( - extensions=None, - mandatory=True, - xor=['server'], - ), + config=dict(extensions=None, mandatory=True, xor=["server"],), pwd=dict(), - query_template=dict(mandatory=True, ), - query_template_args=dict(usedefault=True, ), - server=dict( - mandatory=True, - requires=['user', 'pwd'], - xor=['config'], - ), + query_template=dict(mandatory=True,), + query_template_args=dict(usedefault=True,), + server=dict(mandatory=True, requires=["user", "pwd"], xor=["config"],), user=dict(), ) inputs = XNATSource.input_spec() @@ -25,6 +17,8 @@ def test_XNATSource_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_XNATSource_outputs(): output_map = dict() outputs = XNATSource.output_spec() diff --git a/nipype/interfaces/tests/test_extra_dcm2nii.py b/nipype/interfaces/tests/test_extra_dcm2nii.py index 68fb626f75..e76f300ec6 100644 --- a/nipype/interfaces/tests/test_extra_dcm2nii.py +++ b/nipype/interfaces/tests/test_extra_dcm2nii.py @@ -3,15 +3,16 @@ import shutil from nipype.interfaces.dcm2nii import Dcm2niix + no_dcm2niix = not bool(Dcm2niix().version) no_datalad = False try: - from datalad import api # to pull and grab data + from datalad import api # to pull and grab data from datalad.support.exceptions import IncompleteResultsError except ImportError: no_datalad = True -DICOM_DIR = 'http://datasets-tests.datalad.org/dicoms/dcm2niix-tests' +DICOM_DIR = "http://datasets-tests.datalad.org/dicoms/dcm2niix-tests" @pytest.fixture @@ -25,23 +26,25 @@ def _fetch_data(datadir, dicoms): except IncompleteResultsError as exc: pytest.skip("Failed to fetch test data: %s" % str(exc)) return data + return _fetch_data + @pytest.mark.skipif(no_datalad, reason="Datalad required") @pytest.mark.skipif(no_dcm2niix, reason="Dcm2niix required") @pytest.mark.xfail(reason="Intermittent failures. Let's come back to this later.") def test_dcm2niix_dti(fetch_data, tmpdir): tmpdir.chdir() - datadir = tmpdir.mkdir('data').strpath - dicoms = fetch_data(datadir, 'Siemens_Sag_DTI_20160825_145811') + datadir = tmpdir.mkdir("data").strpath + dicoms = fetch_data(datadir, "Siemens_Sag_DTI_20160825_145811") def assert_dti(res): "Some assertions we will make" assert res.outputs.converted_files assert res.outputs.bvals assert res.outputs.bvecs - outputs = [y for x,y in res.outputs.get().items()] - if res.inputs.get('bids_format'): + outputs = [y for x, y in res.outputs.get().items()] + if res.inputs.get("bids_format"): # ensure all outputs are of equal lengths assert len(set(map(len, outputs))) == 1 else: @@ -49,11 +52,11 @@ def assert_dti(res): dcm = Dcm2niix() dcm.inputs.source_dir = dicoms - dcm.inputs.out_filename = '%u%z' + dcm.inputs.out_filename = "%u%z" assert_dti(dcm.run()) # now run specifying output directory and removing BIDS option - outdir = tmpdir.mkdir('conversion').strpath + outdir = tmpdir.mkdir("conversion").strpath dcm.inputs.output_dir = outdir dcm.inputs.bids_format = False assert_dti(dcm.run()) diff --git a/nipype/interfaces/tests/test_image.py b/nipype/interfaces/tests/test_image.py index 43030ff1b4..b05d55b547 100644 --- a/nipype/interfaces/tests/test_image.py +++ b/nipype/interfaces/tests/test_image.py @@ -9,11 +9,10 @@ from ..image import _as_reoriented_backport, _orientations from ... import LooseVersion -nibabel24 = LooseVersion(nb.__version__) >= LooseVersion('2.4.0') +nibabel24 = LooseVersion(nb.__version__) >= LooseVersion("2.4.0") -@pytest.mark.skipif(not nibabel24, - reason="Old nibabel - can't directly compare") +@pytest.mark.skipif(not nibabel24, reason="Old nibabel - can't directly compare") def test_reorientation_backport(): pixdims = ((1, 1, 1), (2, 2, 3)) data = np.random.normal(size=(17, 18, 19, 2)) @@ -28,7 +27,7 @@ def test_reorientation_backport(): # Create image img = nb.Nifti1Image(data, affine) - dim_info = {'freq': 0, 'phase': 1, 'slice': 2} + dim_info = {"freq": 0, "phase": 1, "slice": 2} img.header.set_dim_info(**dim_info) # Find a random, non-identity transform @@ -51,14 +50,15 @@ def test_reorientation_backport(): # Reorientation changes affine and data array assert not np.allclose(img.affine, reoriented_a.affine) - assert not (flips_only and - np.allclose(img.get_data(), reoriented_a.get_data())) + assert not (flips_only and np.allclose(img.get_data(), reoriented_a.get_data())) # Dimension info changes iff axes are reordered - assert flips_only == np.array_equal(img.header.get_dim_info(), - reoriented_a.header.get_dim_info()) + assert flips_only == np.array_equal( + img.header.get_dim_info(), reoriented_a.header.get_dim_info() + ) # Both approaches produce equivalent images assert np.allclose(reoriented_a.affine, reoriented_b.affine) assert np.array_equal(reoriented_a.get_data(), reoriented_b.get_data()) - assert np.array_equal(reoriented_a.header.get_dim_info(), - reoriented_b.header.get_dim_info()) + assert np.array_equal( + reoriented_a.header.get_dim_info(), reoriented_b.header.get_dim_info() + ) diff --git a/nipype/interfaces/tests/test_io.py b/nipype/interfaces/tests/test_io.py index 1b718f0533..ef210de030 100644 --- a/nipype/interfaces/tests/test_io.py +++ b/nipype/interfaces/tests/test_io.py @@ -36,6 +36,7 @@ # Check for paramiko try: import paramiko + no_paramiko = False # Check for localhost SSH Server @@ -45,14 +46,15 @@ client = paramiko.SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - client.connect('127.0.0.1', username=os.getenv('USER'), sock=proxy, - timeout=10) + client.connect("127.0.0.1", username=os.getenv("USER"), sock=proxy, timeout=10) no_local_ssh = False - except (paramiko.SSHException, - paramiko.ssh_exception.NoValidConnectionsError, - OSError): + except ( + paramiko.SSHException, + paramiko.ssh_exception.NoValidConnectionsError, + OSError, + ): no_local_ssh = True except ImportError: @@ -61,9 +63,10 @@ # Check for fakes3 from subprocess import check_call, CalledProcessError + try: - ret_code = check_call(['which', 'fakes3'], stdout=open(os.devnull, 'wb')) - fakes3 = (ret_code == 0) + ret_code = check_call(["which", "fakes3"], stdout=open(os.devnull, "wb")) + fakes3 = ret_code == 0 except CalledProcessError: fakes3 = False @@ -71,8 +74,9 @@ have_pybids = True try: import bids + filepath = os.path.realpath(os.path.dirname(bids.__file__)) - datadir = os.path.realpath(os.path.join(filepath, 'tests/data/')) + datadir = os.path.realpath(os.path.join(filepath, "tests/data/")) except ImportError: have_pybids = False @@ -81,7 +85,7 @@ def test_datagrabber(): dg = nio.DataGrabber() assert dg.inputs.template == Undefined assert dg.inputs.base_directory == Undefined - assert dg.inputs.template_args == {'outfiles': []} + assert dg.inputs.template_args == {"outfiles": []} @pytest.mark.skipif(noboto, reason="boto library is not available") @@ -89,97 +93,103 @@ def test_s3datagrabber(): dg = nio.S3DataGrabber() assert dg.inputs.template == Undefined assert dg.inputs.local_directory == Undefined - assert dg.inputs.template_args == {'outfiles': []} + assert dg.inputs.template_args == {"outfiles": []} templates1 = { "model": "interfaces/{package}/model.py", - "preprocess": "interfaces/{package}/pre*.py" + "preprocess": "interfaces/{package}/pre*.py", } templates2 = {"converter": "interfaces/dcm{to!s}nii.py"} templates3 = {"model": "interfaces/{package.name}/model.py"} -@pytest.mark.parametrize("SF_args, inputs_att, expected", [ - ({ - "templates": templates1 - }, { - "package": "fsl" - }, { - "infields": ["package"], - "outfields": ["model", "preprocess"], - "run_output": { - "model": - op.join(op.dirname(nipype.__file__), "interfaces/fsl/model.py"), - "preprocess": - op.join( - op.dirname(nipype.__file__), "interfaces/fsl/preprocess.py") - }, - "node_output": ["model", "preprocess"] - }), - ({ - "templates": templates1, - "force_lists": True - }, { - "package": "spm" - }, { - "infields": ["package"], - "outfields": ["model", "preprocess"], - "run_output": { - "model": - [op.join(op.dirname(nipype.__file__), "interfaces/spm/model.py")], - "preprocess": [ - op.join( - op.dirname(nipype.__file__), - "interfaces/spm/preprocess.py") - ] - }, - "node_output": ["model", "preprocess"] - }), - ({ - "templates": templates1 - }, { - "package": "fsl", - "force_lists": ["model"] - }, { - "infields": ["package"], - "outfields": ["model", "preprocess"], - "run_output": { - "model": - [op.join(op.dirname(nipype.__file__), "interfaces/fsl/model.py")], - "preprocess": - op.join( - op.dirname(nipype.__file__), "interfaces/fsl/preprocess.py") - }, - "node_output": ["model", "preprocess"] - }), - ({ - "templates": templates2 - }, { - "to": 2 - }, { - "infields": ["to"], - "outfields": ["converter"], - "run_output": { - "converter": - op.join(op.dirname(nipype.__file__), "interfaces/dcm2nii.py") - }, - "node_output": ["converter"] - }), - ({ - "templates": templates3 - }, { - "package": namedtuple("package", ["name"])("fsl") - }, { - "infields": ["package"], - "outfields": ["model"], - "run_output": { - "model": - op.join(op.dirname(nipype.__file__), "interfaces/fsl/model.py") - }, - "node_output": ["model"] - }), -]) +@pytest.mark.parametrize( + "SF_args, inputs_att, expected", + [ + ( + {"templates": templates1}, + {"package": "fsl"}, + { + "infields": ["package"], + "outfields": ["model", "preprocess"], + "run_output": { + "model": op.join( + op.dirname(nipype.__file__), "interfaces/fsl/model.py" + ), + "preprocess": op.join( + op.dirname(nipype.__file__), "interfaces/fsl/preprocess.py" + ), + }, + "node_output": ["model", "preprocess"], + }, + ), + ( + {"templates": templates1, "force_lists": True}, + {"package": "spm"}, + { + "infields": ["package"], + "outfields": ["model", "preprocess"], + "run_output": { + "model": [ + op.join(op.dirname(nipype.__file__), "interfaces/spm/model.py") + ], + "preprocess": [ + op.join( + op.dirname(nipype.__file__), "interfaces/spm/preprocess.py" + ) + ], + }, + "node_output": ["model", "preprocess"], + }, + ), + ( + {"templates": templates1}, + {"package": "fsl", "force_lists": ["model"]}, + { + "infields": ["package"], + "outfields": ["model", "preprocess"], + "run_output": { + "model": [ + op.join(op.dirname(nipype.__file__), "interfaces/fsl/model.py") + ], + "preprocess": op.join( + op.dirname(nipype.__file__), "interfaces/fsl/preprocess.py" + ), + }, + "node_output": ["model", "preprocess"], + }, + ), + ( + {"templates": templates2}, + {"to": 2}, + { + "infields": ["to"], + "outfields": ["converter"], + "run_output": { + "converter": op.join( + op.dirname(nipype.__file__), "interfaces/dcm2nii.py" + ) + }, + "node_output": ["converter"], + }, + ), + ( + {"templates": templates3}, + {"package": namedtuple("package", ["name"])("fsl")}, + { + "infields": ["package"], + "outfields": ["model"], + "run_output": { + "model": op.join( + op.dirname(nipype.__file__), "interfaces/fsl/model.py" + ) + }, + "node_output": ["model"], + }, + ), + ], +) def test_selectfiles(tmpdir, SF_args, inputs_att, expected): tmpdir.chdir() base_dir = op.dirname(nipype.__file__) @@ -201,11 +211,10 @@ def test_selectfiles_valueerror(): base_dir = op.dirname(nipype.__file__) templates = { "model": "interfaces/{package}/model.py", - "preprocess": "interfaces/{package}/pre*.py" + "preprocess": "interfaces/{package}/pre*.py", } force_lists = ["model", "preprocess", "registration"] - sf = nio.SelectFiles( - templates, base_directory=base_dir, force_lists=force_lists) + sf = nio.SelectFiles(templates, base_directory=base_dir, force_lists=force_lists) with pytest.raises(ValueError): sf.run() @@ -213,67 +222,82 @@ def test_selectfiles_valueerror(): @pytest.mark.skipif(noboto, reason="boto library is not available") def test_s3datagrabber_communication(tmpdir): dg = nio.S3DataGrabber( - infields=['subj_id', 'run_num'], outfields=['func', 'struct']) + infields=["subj_id", "run_num"], outfields=["func", "struct"] + ) dg.inputs.anon = True - dg.inputs.bucket = 'openfmri' - dg.inputs.bucket_path = 'ds001/' + dg.inputs.bucket = "openfmri" + dg.inputs.bucket_path = "ds001/" dg.inputs.local_directory = tmpdir.strpath dg.inputs.sort_filelist = True - dg.inputs.template = '*' + dg.inputs.template = "*" dg.inputs.field_template = dict( - func='%s/BOLD/task001_%s/bold.nii.gz', - struct='%s/anatomy/highres001_brain.nii.gz') - dg.inputs.subj_id = ['sub001', 'sub002'] - dg.inputs.run_num = ['run001', 'run003'] - dg.inputs.template_args = dict( - func=[['subj_id', 'run_num']], struct=[['subj_id']]) + func="%s/BOLD/task001_%s/bold.nii.gz", + struct="%s/anatomy/highres001_brain.nii.gz", + ) + dg.inputs.subj_id = ["sub001", "sub002"] + dg.inputs.run_num = ["run001", "run003"] + dg.inputs.template_args = dict(func=[["subj_id", "run_num"]], struct=[["subj_id"]]) res = dg.run() func_outfiles = res.outputs.func struct_outfiles = res.outputs.struct # check for all files - assert os.path.join( - dg.inputs.local_directory, - '/sub001/BOLD/task001_run001/bold.nii.gz') in func_outfiles[0] + assert ( + os.path.join( + dg.inputs.local_directory, "/sub001/BOLD/task001_run001/bold.nii.gz" + ) + in func_outfiles[0] + ) assert os.path.exists(func_outfiles[0]) - assert os.path.join( - dg.inputs.local_directory, - '/sub001/anatomy/highres001_brain.nii.gz') in struct_outfiles[0] + assert ( + os.path.join( + dg.inputs.local_directory, "/sub001/anatomy/highres001_brain.nii.gz" + ) + in struct_outfiles[0] + ) assert os.path.exists(struct_outfiles[0]) - assert os.path.join( - dg.inputs.local_directory, - '/sub002/BOLD/task001_run003/bold.nii.gz') in func_outfiles[1] + assert ( + os.path.join( + dg.inputs.local_directory, "/sub002/BOLD/task001_run003/bold.nii.gz" + ) + in func_outfiles[1] + ) assert os.path.exists(func_outfiles[1]) - assert os.path.join( - dg.inputs.local_directory, - '/sub002/anatomy/highres001_brain.nii.gz') in struct_outfiles[1] + assert ( + os.path.join( + dg.inputs.local_directory, "/sub002/anatomy/highres001_brain.nii.gz" + ) + in struct_outfiles[1] + ) assert os.path.exists(struct_outfiles[1]) def test_datagrabber_order(tmpdir): for file_name in [ - 'sub002_L1_R1.q', 'sub002_L1_R2.q', 'sub002_L2_R1.q', - 'sub002_L2_R2.qd', 'sub002_L3_R10.q', 'sub002_L3_R2.q' + "sub002_L1_R1.q", + "sub002_L1_R2.q", + "sub002_L2_R1.q", + "sub002_L2_R2.qd", + "sub002_L3_R10.q", + "sub002_L3_R2.q", ]: - tmpdir.join(file_name).open('a').close() + tmpdir.join(file_name).open("a").close() - dg = nio.DataGrabber(infields=['sid']) + dg = nio.DataGrabber(infields=["sid"]) dg.inputs.base_directory = tmpdir.strpath - dg.inputs.template = '%s_L%d_R*.q*' - dg.inputs.template_args = { - 'outfiles': [['sid', 1], ['sid', 2], ['sid', 3]] - } - dg.inputs.sid = 'sub002' + dg.inputs.template = "%s_L%d_R*.q*" + dg.inputs.template_args = {"outfiles": [["sid", 1], ["sid", 2], ["sid", 3]]} + dg.inputs.sid = "sub002" dg.inputs.sort_filelist = True res = dg.run() outfiles = res.outputs.outfiles - assert 'sub002_L1_R1' in outfiles[0][0] - assert 'sub002_L1_R2' in outfiles[0][1] - assert 'sub002_L2_R1' in outfiles[1][0] - assert 'sub002_L2_R2' in outfiles[1][1] - assert 'sub002_L3_R2' in outfiles[2][0] - assert 'sub002_L3_R10' in outfiles[2][1] + assert "sub002_L1_R1" in outfiles[0][0] + assert "sub002_L1_R2" in outfiles[0][1] + assert "sub002_L2_R1" in outfiles[1][0] + assert "sub002_L2_R2" in outfiles[1][1] + assert "sub002_L3_R2" in outfiles[2][0] + assert "sub002_L3_R10" in outfiles[2][1] def test_datasink(): @@ -283,26 +307,25 @@ def test_datasink(): assert ds.inputs.strip_dir == Undefined assert ds.inputs._outputs == {} - ds = nio.DataSink(base_directory='foo') - assert ds.inputs.base_directory == 'foo' + ds = nio.DataSink(base_directory="foo") + assert ds.inputs.base_directory == "foo" - ds = nio.DataSink(infields=['test']) - assert 'test' in ds.inputs.copyable_trait_names() + ds = nio.DataSink(infields=["test"]) + assert "test" in ds.inputs.copyable_trait_names() # Make dummy input file @pytest.fixture(scope="module") def dummy_input(request, tmpdir_factory): - ''' + """ Function to create a dummy file - ''' + """ # Init variables - input_path = tmpdir_factory.mktemp('input_data').join( - 'datasink_test_s3.txt') + input_path = tmpdir_factory.mktemp("input_data").join("datasink_test_s3.txt") # Create input file - input_path.write_binary(b'ABCD1234') + input_path.write_binary(b"ABCD1234") # Return path return str(input_path) @@ -310,35 +333,37 @@ def dummy_input(request, tmpdir_factory): # Test datasink writes to s3 properly @pytest.mark.skipif( - noboto3 or not fakes3, reason="boto3 or fakes3 library is not available") + noboto3 or not fakes3, reason="boto3 or fakes3 library is not available" +) def test_datasink_to_s3(dummy_input, tmpdir): - ''' + """ This function tests to see if the S3 functionality of a DataSink works properly - ''' + """ # Init variables ds = nio.DataSink() - bucket_name = 'test' - container = 'outputs' - attr_folder = 'text_file' - output_dir = 's3://' + bucket_name + bucket_name = "test" + container = "outputs" + attr_folder = "text_file" + output_dir = "s3://" + bucket_name # Local temporary filepaths for testing fakes3_dir = tmpdir.strpath input_path = dummy_input # Start up fake-S3 server proc = Popen( - ['fakes3', '-r', fakes3_dir, '-p', '4567'], - stdout=open(os.devnull, 'wb')) + ["fakes3", "-r", fakes3_dir, "-p", "4567"], stdout=open(os.devnull, "wb") + ) # Init boto3 s3 resource to talk with fakes3 resource = boto3.resource( - aws_access_key_id='mykey', - aws_secret_access_key='mysecret', - service_name='s3', - endpoint_url='http://127.0.0.1:4567', - use_ssl=False) - resource.meta.client.meta.events.unregister('before-sign.s3', fix_s3_host) + aws_access_key_id="mykey", + aws_secret_access_key="mysecret", + service_name="s3", + endpoint_url="http://127.0.0.1:4567", + use_ssl=False, + ) + resource.meta.client.meta.events.unregister("before-sign.s3", fix_s3_host) # Create bucket bucket = resource.create_bucket(Bucket=bucket_name) @@ -353,10 +378,10 @@ def test_datasink_to_s3(dummy_input, tmpdir): ds.run() # Get MD5sums and compare - key = '/'.join([container, attr_folder, os.path.basename(input_path)]) + key = "/".join([container, attr_folder, os.path.basename(input_path)]) obj = bucket.Object(key=key) - dst_md5 = obj.e_tag.replace('"', '') - src_md5 = hashlib.md5(open(input_path, 'rb').read()).hexdigest() + dst_md5 = obj.e_tag.replace('"', "") + src_md5 = hashlib.md5(open(input_path, "rb").read()).hexdigest() # Kill fakes3 proc.kill() @@ -367,21 +392,22 @@ def test_datasink_to_s3(dummy_input, tmpdir): # Test AWS creds read from env vars @pytest.mark.skipif( - noboto3 or not fakes3, reason="boto3 or fakes3 library is not available") + noboto3 or not fakes3, reason="boto3 or fakes3 library is not available" +) def test_aws_keys_from_env(): - ''' + """ Function to ensure the DataSink can successfully read in AWS credentials from the environment variables - ''' + """ # Init variables ds = nio.DataSink() - aws_access_key_id = 'ABCDACCESS' - aws_secret_access_key = 'DEFGSECRET' + aws_access_key_id = "ABCDACCESS" + aws_secret_access_key = "DEFGSECRET" # Set env vars - os.environ['AWS_ACCESS_KEY_ID'] = aws_access_key_id - os.environ['AWS_SECRET_ACCESS_KEY'] = aws_secret_access_key + os.environ["AWS_ACCESS_KEY_ID"] = aws_access_key_id + os.environ["AWS_SECRET_ACCESS_KEY"] = aws_secret_access_key # Call function to return creds access_key_test, secret_key_test = ds._return_aws_keys() @@ -393,15 +419,15 @@ def test_aws_keys_from_env(): # Test the local copy attribute def test_datasink_localcopy(dummy_input, tmpdir): - ''' + """ Function to validate DataSink will make local copy via local_copy attribute - ''' + """ # Init variables local_dir = tmpdir.strpath - container = 'outputs' - attr_folder = 'text_file' + container = "outputs" + attr_folder = "text_file" # Make dummy input file and datasink input_path = dummy_input @@ -415,44 +441,50 @@ def test_datasink_localcopy(dummy_input, tmpdir): setattr(ds.inputs, attr_folder, input_path) # Expected local copy path - local_copy = os.path.join(local_dir, container, attr_folder, - os.path.basename(input_path)) + local_copy = os.path.join( + local_dir, container, attr_folder, os.path.basename(input_path) + ) # Run the datasink ds.run() # Check md5sums of both - src_md5 = hashlib.md5(open(input_path, 'rb').read()).hexdigest() - dst_md5 = hashlib.md5(open(local_copy, 'rb').read()).hexdigest() + src_md5 = hashlib.md5(open(input_path, "rb").read()).hexdigest() + dst_md5 = hashlib.md5(open(local_copy, "rb").read()).hexdigest() # Perform test assert src_md5 == dst_md5 def test_datasink_substitutions(tmpdir): - indir = tmpdir.mkdir('-Tmp-nipype_ds_subs_in') - outdir = tmpdir.mkdir('-Tmp-nipype_ds_subs_out') + indir = tmpdir.mkdir("-Tmp-nipype_ds_subs_in") + outdir = tmpdir.mkdir("-Tmp-nipype_ds_subs_out") files = [] - for n in ['ababab.n', 'xabababyz.n']: + for n in ["ababab.n", "xabababyz.n"]: f = str(indir.join(n)) files.append(f) - open(f, 'w') + open(f, "w") ds = nio.DataSink( parametrization=False, base_directory=str(outdir), - substitutions=[('ababab', 'ABABAB')], + substitutions=[("ababab", "ABABAB")], # end archoring ($) is used to assure operation on the filename # instead of possible temporary directories names matches # Patterns should be more comprehendable in the real-world usage # cases since paths would be quite more sensible - regexp_substitutions=[(r'xABABAB(\w*)\.n$', r'a-\1-b.n'), - ('(.*%s)[-a]([^%s]*)$' % ((os.path.sep, ) * 2), - r'\1!\2')]) - setattr(ds.inputs, '@outdir', files) + regexp_substitutions=[ + (r"xABABAB(\w*)\.n$", r"a-\1-b.n"), + ("(.*%s)[-a]([^%s]*)$" % ((os.path.sep,) * 2), r"\1!\2"), + ], + ) + setattr(ds.inputs, "@outdir", files) ds.run() - assert sorted([os.path.basename(x) for - x in glob.glob(os.path.join(str(outdir), '*'))]) \ - == ['!-yz-b.n', 'ABABAB.n'] # so we got re used 2nd and both patterns + assert sorted( + [os.path.basename(x) for x in glob.glob(os.path.join(str(outdir), "*"))] + ) == [ + "!-yz-b.n", + "ABABAB.n", + ] # so we got re used 2nd and both patterns @pytest.fixture() @@ -461,8 +493,8 @@ def _temp_analyze_files(tmpdir): img_dir = tmpdir.mkdir("img") orig_img = img_dir.join("orig.img") orig_hdr = img_dir.join("orig.hdr") - orig_img.open('w') - orig_hdr.open('w') + orig_img.open("w") + orig_hdr.open("w") return orig_img.strpath, orig_hdr.strpath @@ -471,40 +503,40 @@ def test_datasink_copydir_1(_temp_analyze_files, tmpdir): outdir = tmpdir pth, fname = os.path.split(orig_img) ds = nio.DataSink( - base_directory=outdir.mkdir("basedir").strpath, parameterization=False) - setattr(ds.inputs, '@outdir', pth) + base_directory=outdir.mkdir("basedir").strpath, parameterization=False + ) + setattr(ds.inputs, "@outdir", pth) ds.run() sep = os.path.sep - assert tmpdir.join('basedir', pth.split(sep)[-1], fname).check() + assert tmpdir.join("basedir", pth.split(sep)[-1], fname).check() def test_datasink_copydir_2(_temp_analyze_files, tmpdir): orig_img, orig_hdr = _temp_analyze_files pth, fname = os.path.split(orig_img) ds = nio.DataSink( - base_directory=tmpdir.mkdir("basedir").strpath, parameterization=False) + base_directory=tmpdir.mkdir("basedir").strpath, parameterization=False + ) ds.inputs.remove_dest_dir = True - setattr(ds.inputs, 'outdir', pth) + setattr(ds.inputs, "outdir", pth) ds.run() sep = os.path.sep - assert not tmpdir.join('basedir', pth.split(sep)[-1], fname).check() - assert tmpdir.join('basedir', 'outdir', pth.split(sep)[-1], fname).check() + assert not tmpdir.join("basedir", pth.split(sep)[-1], fname).check() + assert tmpdir.join("basedir", "outdir", pth.split(sep)[-1], fname).check() def test_datafinder_depth(tmpdir): outdir = tmpdir.strpath - os.makedirs(os.path.join(outdir, '0', '1', '2', '3')) + os.makedirs(os.path.join(outdir, "0", "1", "2", "3")) df = nio.DataFinder() - df.inputs.root_paths = os.path.join(outdir, '0') + df.inputs.root_paths = os.path.join(outdir, "0") for min_depth in range(4): for max_depth in range(min_depth, 4): df.inputs.min_depth = min_depth df.inputs.max_depth = max_depth result = df.run() - expected = [ - '{}'.format(x) for x in range(min_depth, max_depth + 1) - ] + expected = ["{}".format(x) for x in range(min_depth, max_depth + 1)] for path, exp_fname in zip(result.outputs.out_paths, expected): _, fname = os.path.split(path) assert fname == exp_fname @@ -513,12 +545,12 @@ def test_datafinder_depth(tmpdir): def test_datafinder_unpack(tmpdir): outdir = tmpdir.strpath single_res = os.path.join(outdir, "findme.txt") - open(single_res, 'a').close() - open(os.path.join(outdir, "dontfindme"), 'a').close() + open(single_res, "a").close() + open(os.path.join(outdir, "dontfindme"), "a").close() df = nio.DataFinder() df.inputs.root_paths = outdir - df.inputs.match_regex = r'.+/(?P.+)\.txt' + df.inputs.match_regex = r".+/(?P.+)\.txt" df.inputs.unpack_single = True result = df.run() print(result.outputs.out_paths) @@ -527,7 +559,7 @@ def test_datafinder_unpack(tmpdir): def test_freesurfersource(): fss = nio.FreeSurferSource() - assert fss.inputs.hemi == 'both' + assert fss.inputs.hemi == "both" assert fss.inputs.subject_id == Undefined assert fss.inputs.subjects_dir == Undefined @@ -535,7 +567,7 @@ def test_freesurfersource(): def test_freesurfersource_incorrectdir(): fss = nio.FreeSurferSource() with pytest.raises(TraitError) as err: - fss.inputs.subjects_dir = 'path/to/no/existing/directory' + fss.inputs.subjects_dir = "path/to/no/existing/directory" def test_jsonsink_input(): @@ -543,87 +575,86 @@ def test_jsonsink_input(): ds = nio.JSONFileSink() assert ds.inputs._outputs == {} - ds = nio.JSONFileSink(in_dict={'foo': 'var'}) - assert ds.inputs.in_dict == {'foo': 'var'} + ds = nio.JSONFileSink(in_dict={"foo": "var"}) + assert ds.inputs.in_dict == {"foo": "var"} - ds = nio.JSONFileSink(infields=['test']) - assert 'test' in ds.inputs.copyable_trait_names() + ds = nio.JSONFileSink(infields=["test"]) + assert "test" in ds.inputs.copyable_trait_names() -@pytest.mark.parametrize("inputs_attributes", [{ - 'new_entry': 'someValue' -}, { - 'new_entry': 'someValue', - 'test': 'testInfields' -}]) +@pytest.mark.parametrize( + "inputs_attributes", + [{"new_entry": "someValue"}, {"new_entry": "someValue", "test": "testInfields"}], +) def test_jsonsink(tmpdir, inputs_attributes): tmpdir.chdir() - js = nio.JSONFileSink(infields=['test'], in_dict={'foo': 'var'}) - setattr(js.inputs, 'contrasts.alt', 'someNestedValue') + js = nio.JSONFileSink(infields=["test"], in_dict={"foo": "var"}) + setattr(js.inputs, "contrasts.alt", "someNestedValue") expected_data = {"contrasts": {"alt": "someNestedValue"}, "foo": "var"} for key, val in inputs_attributes.items(): setattr(js.inputs, key, val) expected_data[key] = val res = js.run() - with open(res.outputs.out_file, 'r') as f: + with open(res.outputs.out_file, "r") as f: data = simplejson.load(f) assert data == expected_data # There are three reasons these tests will be skipped: -@pytest.mark.skipif(not have_pybids, - reason="Pybids is not installed") -@pytest.mark.skipif(not dist_is_editable('pybids'), - reason="Pybids is not installed in editable mode") +@pytest.mark.skipif(not have_pybids, reason="Pybids is not installed") +@pytest.mark.skipif( + not dist_is_editable("pybids"), reason="Pybids is not installed in editable mode" +) def test_bids_grabber(tmpdir): tmpdir.chdir() bg = nio.BIDSDataGrabber() - bg.inputs.base_dir = os.path.join(datadir, 'ds005') - bg.inputs.subject = '01' + bg.inputs.base_dir = os.path.join(datadir, "ds005") + bg.inputs.subject = "01" results = bg.run() - assert 'sub-01_T1w.nii.gz' in map(os.path.basename, results.outputs.T1w) - assert 'sub-01_task-mixedgamblestask_run-01_bold.nii.gz' in \ - map(os.path.basename, results.outputs.bold) + assert "sub-01_T1w.nii.gz" in map(os.path.basename, results.outputs.T1w) + assert "sub-01_task-mixedgamblestask_run-01_bold.nii.gz" in map( + os.path.basename, results.outputs.bold + ) -@pytest.mark.skipif(not have_pybids, - reason="Pybids is not installed") -@pytest.mark.skipif(not dist_is_editable('pybids'), - reason="Pybids is not installed in editable mode") +@pytest.mark.skipif(not have_pybids, reason="Pybids is not installed") +@pytest.mark.skipif( + not dist_is_editable("pybids"), reason="Pybids is not installed in editable mode" +) def test_bids_fields(tmpdir): tmpdir.chdir() - bg = nio.BIDSDataGrabber(infields = ['subject'], outfields = ['dwi']) - bg.inputs.base_dir = os.path.join(datadir, 'ds005') - bg.inputs.subject = '01' - bg.inputs.output_query['dwi'] = dict(datatype='dwi') + bg = nio.BIDSDataGrabber(infields=["subject"], outfields=["dwi"]) + bg.inputs.base_dir = os.path.join(datadir, "ds005") + bg.inputs.subject = "01" + bg.inputs.output_query["dwi"] = dict(datatype="dwi") results = bg.run() - assert 'sub-01_dwi.nii.gz' in map(os.path.basename, results.outputs.dwi) + assert "sub-01_dwi.nii.gz" in map(os.path.basename, results.outputs.dwi) -@pytest.mark.skipif(not have_pybids, - reason="Pybids is not installed") -@pytest.mark.skipif(not dist_is_editable('pybids'), - reason="Pybids is not installed in editable mode") +@pytest.mark.skipif(not have_pybids, reason="Pybids is not installed") +@pytest.mark.skipif( + not dist_is_editable("pybids"), reason="Pybids is not installed in editable mode" +) def test_bids_infields_outfields(tmpdir): tmpdir.chdir() - infields = ['infield1', 'infield2'] - outfields = ['outfield1', 'outfield2'] + infields = ["infield1", "infield2"] + outfields = ["outfield1", "outfield2"] bg = nio.BIDSDataGrabber(infields=infields) for outfield in outfields: - bg.inputs.output_query[outfield] = {'key': 'value'} + bg.inputs.output_query[outfield] = {"key": "value"} for infield in infields: - assert(infield in bg.inputs.traits()) - assert(not(isdefined(bg.inputs.get()[infield]))) + assert infield in bg.inputs.traits() + assert not (isdefined(bg.inputs.get()[infield])) for outfield in outfields: - assert(outfield in bg._outputs().traits()) + assert outfield in bg._outputs().traits() # now try without defining outfields bg = nio.BIDSDataGrabber() - for outfield in ['T1w', 'bold']: + for outfield in ["T1w", "bold"]: assert outfield in bg._outputs().traits() @@ -634,11 +665,11 @@ def test_SSHDataGrabber(tmpdir): """ old_cwd = tmpdir.chdir() - source_dir = tmpdir.mkdir('source') - source_hdr = source_dir.join('somedata.hdr') - source_dat = source_dir.join('somedata.img') - source_hdr.ensure() # create - source_dat.ensure() # create + source_dir = tmpdir.mkdir("source") + source_hdr = source_dir.join("somedata.hdr") + source_dat = source_dir.join("somedata.img") + source_hdr.ensure() # create + source_dat.ensure() # create # ssh client that connects to localhost, current user, regardless of # ~/.ssh/config @@ -647,21 +678,20 @@ def _mock_get_ssh_client(self): client = paramiko.SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - client.connect('127.0.0.1', username=os.getenv('USER'), sock=proxy, - timeout=10) + client.connect("127.0.0.1", username=os.getenv("USER"), sock=proxy, timeout=10) return client + MockSSHDataGrabber = copy.copy(nio.SSHDataGrabber) MockSSHDataGrabber._get_ssh_client = _mock_get_ssh_client # grabber to get files from source_dir matching test.hdr - ssh_grabber = MockSSHDataGrabber(infields=['test'], - outfields=['test_file']) + ssh_grabber = MockSSHDataGrabber(infields=["test"], outfields=["test_file"]) ssh_grabber.inputs.base_directory = str(source_dir) - ssh_grabber.inputs.hostname = '127.0.0.1' - ssh_grabber.inputs.field_template = dict(test_file='%s.hdr') - ssh_grabber.inputs.template = '' - ssh_grabber.inputs.template_args = dict(test_file=[['test']]) - ssh_grabber.inputs.test = 'somedata' + ssh_grabber.inputs.hostname = "127.0.0.1" + ssh_grabber.inputs.field_template = dict(test_file="%s.hdr") + ssh_grabber.inputs.template = "" + ssh_grabber.inputs.template_args = dict(test_file=[["test"]]) + ssh_grabber.inputs.test = "somedata" ssh_grabber.inputs.sort_filelist = True runtime = ssh_grabber.run() @@ -669,31 +699,33 @@ def _mock_get_ssh_client(self): # did we successfully get the header? assert runtime.outputs.test_file == str(tmpdir.join(source_hdr.basename)) # did we successfully get the data? - assert (tmpdir.join(source_hdr.basename) # header file - .new(ext='.img') # data file - .check(file=True, exists=True)) # exists? + assert ( + tmpdir.join(source_hdr.basename) # header file + .new(ext=".img") # data file + .check(file=True, exists=True) + ) # exists? old_cwd.chdir() def test_ExportFile(tmp_path): - testin = tmp_path / 'in.txt' - testin.write_text('test string') + testin = tmp_path / "in.txt" + testin.write_text("test string") i = nio.ExportFile() i.inputs.in_file = str(testin) - i.inputs.out_file = str(tmp_path / 'out.tsv') + i.inputs.out_file = str(tmp_path / "out.tsv") i.inputs.check_extension = True with pytest.raises(RuntimeError): i.run() i.inputs.check_extension = False i.run() - assert (tmp_path / 'out.tsv').read_text() == 'test string' - i.inputs.out_file = str(tmp_path / 'out.txt') + assert (tmp_path / "out.tsv").read_text() == "test string" + i.inputs.out_file = str(tmp_path / "out.txt") i.inputs.check_extension = True i.run() - assert (tmp_path / 'out.txt').read_text() == 'test string' + assert (tmp_path / "out.txt").read_text() == "test string" with pytest.raises(FileExistsError): i.run() i.inputs.clobber = True i.run() - assert (tmp_path / 'out.txt').read_text() == 'test string' + assert (tmp_path / "out.txt").read_text() == "test string" diff --git a/nipype/interfaces/tests/test_matlab.py b/nipype/interfaces/tests/test_matlab.py index 2576a379e7..64f1de846f 100644 --- a/nipype/interfaces/tests/test_matlab.py +++ b/nipype/interfaces/tests/test_matlab.py @@ -26,24 +26,24 @@ def clean_workspace_and_get_default_script_file(): def test_cmdline(): default_script_file = clean_workspace_and_get_default_script_file() - mi = mlab.MatlabCommand( - script='whos', script_file='testscript', mfile=False) - - assert mi.cmdline == \ - matlab_cmd + (' -nodesktop -nosplash -singleCompThread -r "fprintf(1,' - '\'Executing code at %s:\\n\',datestr(now));ver,try,' - 'whos,catch ME,fprintf(2,\'MATLAB code threw an ' - 'exception:\\n\');fprintf(2,\'%s\\n\',ME.message);if ' - 'length(ME.stack) ~= 0, fprintf(2,\'File:%s\\nName:%s\\n' - 'Line:%d\\n\',ME.stack.file,ME.stack.name,' - 'ME.stack.line);, end;end;;exit"') - - assert mi.inputs.script == 'whos' - assert mi.inputs.script_file == 'testscript' + mi = mlab.MatlabCommand(script="whos", script_file="testscript", mfile=False) + + assert mi.cmdline == matlab_cmd + ( + ' -nodesktop -nosplash -singleCompThread -r "fprintf(1,' + "'Executing code at %s:\\n',datestr(now));ver,try," + "whos,catch ME,fprintf(2,'MATLAB code threw an " + "exception:\\n');fprintf(2,'%s\\n',ME.message);if " + "length(ME.stack) ~= 0, fprintf(2,'File:%s\\nName:%s\\n" + "Line:%d\\n',ME.stack.file,ME.stack.name," + 'ME.stack.line);, end;end;;exit"' + ) + + assert mi.inputs.script == "whos" + assert mi.inputs.script_file == "testscript" + assert not os.path.exists(mi.inputs.script_file), "scriptfile should not exist" assert not os.path.exists( - mi.inputs.script_file), 'scriptfile should not exist' - assert not os.path.exists( - default_script_file), 'default scriptfile should not exist.' + default_script_file + ), "default scriptfile should not exist." @pytest.mark.skipif(no_matlab, reason="matlab is not available") @@ -51,8 +51,13 @@ def test_mlab_inputspec(): default_script_file = clean_workspace_and_get_default_script_file() spec = mlab.MatlabInputSpec() for k in [ - 'paths', 'script', 'nosplash', 'mfile', 'logfile', 'script_file', - 'nodesktop' + "paths", + "script", + "nosplash", + "mfile", + "logfile", + "script_file", + "nodesktop", ]: assert k in spec.copyable_trait_names() assert spec.nodesktop @@ -65,54 +70,49 @@ def test_mlab_inputspec(): def test_mlab_init(): default_script_file = clean_workspace_and_get_default_script_file() - assert mlab.MatlabCommand._cmd == 'matlab' + assert mlab.MatlabCommand._cmd == "matlab" assert mlab.MatlabCommand.input_spec == mlab.MatlabInputSpec assert mlab.MatlabCommand().cmd == matlab_cmd - mc = mlab.MatlabCommand(matlab_cmd='foo_m') - assert mc.cmd == 'foo_m' + mc = mlab.MatlabCommand(matlab_cmd="foo_m") + assert mc.cmd == "foo_m" @pytest.mark.skipif(no_matlab, reason="matlab is not available") def test_run_interface(tmpdir): default_script_file = clean_workspace_and_get_default_script_file() - mc = mlab.MatlabCommand(matlab_cmd='foo_m') - assert not os.path.exists( - default_script_file), 'scriptfile should not exist 1.' + mc = mlab.MatlabCommand(matlab_cmd="foo_m") + assert not os.path.exists(default_script_file), "scriptfile should not exist 1." with pytest.raises(ValueError): mc.run() # script is mandatory - assert not os.path.exists( - default_script_file), 'scriptfile should not exist 2.' + assert not os.path.exists(default_script_file), "scriptfile should not exist 2." if os.path.exists(default_script_file): # cleanup os.remove(default_script_file) - mc.inputs.script = 'a=1;' - assert not os.path.exists( - default_script_file), 'scriptfile should not exist 3.' + mc.inputs.script = "a=1;" + assert not os.path.exists(default_script_file), "scriptfile should not exist 3." with pytest.raises(IOError): mc.run() # foo_m is not an executable - assert os.path.exists(default_script_file), 'scriptfile should exist 3.' + assert os.path.exists(default_script_file), "scriptfile should exist 3." if os.path.exists(default_script_file): # cleanup os.remove(default_script_file) cwd = tmpdir.chdir() # bypasses ubuntu dash issue - mc = mlab.MatlabCommand(script='foo;', paths=[tmpdir.strpath], mfile=True) - assert not os.path.exists( - default_script_file), 'scriptfile should not exist 4.' + mc = mlab.MatlabCommand(script="foo;", paths=[tmpdir.strpath], mfile=True) + assert not os.path.exists(default_script_file), "scriptfile should not exist 4." with pytest.raises(RuntimeError): mc.run() - assert os.path.exists(default_script_file), 'scriptfile should exist 4.' + assert os.path.exists(default_script_file), "scriptfile should exist 4." if os.path.exists(default_script_file): # cleanup os.remove(default_script_file) # bypasses ubuntu dash issue - res = mlab.MatlabCommand( - script='a=1;', paths=[tmpdir.strpath], mfile=True).run() + res = mlab.MatlabCommand(script="a=1;", paths=[tmpdir.strpath], mfile=True).run() assert res.runtime.returncode == 0 - assert os.path.exists(default_script_file), 'scriptfile should exist 5.' + assert os.path.exists(default_script_file), "scriptfile should exist 5." cwd.chdir() @@ -121,8 +121,7 @@ def test_set_matlabcmd(): default_script_file = clean_workspace_and_get_default_script_file() mi = mlab.MatlabCommand() - mi.set_default_matlab_cmd('foo') - assert not os.path.exists( - default_script_file), 'scriptfile should not exist.' - assert mi._default_matlab_cmd == 'foo' + mi.set_default_matlab_cmd("foo") + assert not os.path.exists(default_script_file), "scriptfile should not exist." + assert mi._default_matlab_cmd == "foo" mi.set_default_matlab_cmd(matlab_cmd) diff --git a/nipype/interfaces/tests/test_nilearn.py b/nipype/interfaces/tests/test_nilearn.py index 79432bc180..6c3a52a670 100644 --- a/nipype/interfaces/tests/test_nilearn.py +++ b/nipype/interfaces/tests/test_nilearn.py @@ -14,38 +14,39 @@ no_nilearn = True try: - __import__('nilearn') + __import__("nilearn") no_nilearn = False except ImportError: pass @pytest.mark.skipif(no_nilearn, reason="the nilearn library is not available") -class TestSignalExtraction(): +class TestSignalExtraction: filenames = { - 'in_file': 'fmri.nii', - 'label_files': 'labels.nii', - '4d_label_file': '4dlabels.nii', - 'out_file': 'signals.tsv' + "in_file": "fmri.nii", + "label_files": "labels.nii", + "4d_label_file": "4dlabels.nii", + "out_file": "signals.tsv", } - labels = ['CSF', 'GrayMatter', 'WhiteMatter'] - global_labels = ['GlobalSignal'] + labels + labels = ["CSF", "GrayMatter", "WhiteMatter"] + global_labels = ["GlobalSignal"] + labels - @pytest.fixture(autouse=True, scope='class') + @pytest.fixture(autouse=True, scope="class") def setup_class(self, tmpdir_factory): tempdir = tmpdir_factory.mktemp("test") self.orig_dir = tempdir.chdir() - utils.save_toy_nii(self.fake_fmri_data, self.filenames['in_file']) - utils.save_toy_nii(self.fake_label_data, self.filenames['label_files']) + utils.save_toy_nii(self.fake_fmri_data, self.filenames["in_file"]) + utils.save_toy_nii(self.fake_label_data, self.filenames["label_files"]) def test_signal_extract_no_shared(self): # run iface.SignalExtraction( - in_file=self.filenames['in_file'], - label_files=self.filenames['label_files'], + in_file=self.filenames["in_file"], + label_files=self.filenames["label_files"], class_labels=self.labels, - incl_shared_variance=False).run() + incl_shared_variance=False, + ).run() # assert self.assert_expected_output(self.labels, self.base_wanted) @@ -53,44 +54,44 @@ def test_signal_extr_bad_label_list(self): # run with pytest.raises(ValueError): iface.SignalExtraction( - in_file=self.filenames['in_file'], - label_files=self.filenames['label_files'], - class_labels=['bad'], - incl_shared_variance=False).run() + in_file=self.filenames["in_file"], + label_files=self.filenames["label_files"], + class_labels=["bad"], + incl_shared_variance=False, + ).run() def test_signal_extr_equiv_4d_no_shared(self): self._test_4d_label( - self.base_wanted, - self.fake_equiv_4d_label_data, - incl_shared_variance=False) + self.base_wanted, self.fake_equiv_4d_label_data, incl_shared_variance=False + ) def test_signal_extr_4d_no_shared(self): # set up & run & assert self._test_4d_label( - self.fourd_wanted, - self.fake_4d_label_data, - incl_shared_variance=False) + self.fourd_wanted, self.fake_4d_label_data, incl_shared_variance=False + ) def test_signal_extr_global_no_shared(self): # set up - wanted_global = [[-4. / 6], [-1. / 6], [3. / 6], [-1. / 6], [-7. / 6]] + wanted_global = [[-4.0 / 6], [-1.0 / 6], [3.0 / 6], [-1.0 / 6], [-7.0 / 6]] for i, vals in enumerate(self.base_wanted): wanted_global[i].extend(vals) # run iface.SignalExtraction( - in_file=self.filenames['in_file'], - label_files=self.filenames['label_files'], + in_file=self.filenames["in_file"], + label_files=self.filenames["label_files"], class_labels=self.labels, include_global=True, - incl_shared_variance=False).run() + incl_shared_variance=False, + ).run() # assert self.assert_expected_output(self.global_labels, wanted_global) def test_signal_extr_4d_global_no_shared(self): # set up - wanted_global = [[3. / 8], [-3. / 8], [1. / 8], [-7. / 8], [-9. / 8]] + wanted_global = [[3.0 / 8], [-3.0 / 8], [1.0 / 8], [-7.0 / 8], [-9.0 / 8]] for i, vals in enumerate(self.fourd_wanted): wanted_global[i].extend(vals) @@ -99,7 +100,8 @@ def test_signal_extr_4d_global_no_shared(self): wanted_global, self.fake_4d_label_data, include_global=True, - incl_shared_variance=False) + incl_shared_variance=False, + ) def test_signal_extr_shared(self): # set up @@ -109,45 +111,45 @@ def test_signal_extr_shared(self): wanted_row = [] for reg in range(self.fake_4d_label_data.shape[3]): region = self.fake_4d_label_data[:, :, :, reg].flatten() - wanted_row.append( - (volume * region).sum() / (region * region).sum()) + wanted_row.append((volume * region).sum() / (region * region).sum()) wanted.append(wanted_row) # run & assert self._test_4d_label(wanted, self.fake_4d_label_data) def test_signal_extr_traits_valid(self): - ''' Test a node using the SignalExtraction interface. + """ Test a node using the SignalExtraction interface. Unlike interface.run(), node.run() checks the traits - ''' + """ # run node = pe.Node( iface.SignalExtraction( - in_file=os.path.abspath(self.filenames['in_file']), - label_files=os.path.abspath(self.filenames['label_files']), + in_file=os.path.abspath(self.filenames["in_file"]), + label_files=os.path.abspath(self.filenames["label_files"]), class_labels=self.labels, - incl_shared_variance=False), - name='SignalExtraction') + incl_shared_variance=False, + ), + name="SignalExtraction", + ) node.run() # assert # just checking that it passes trait validations - def _test_4d_label(self, - wanted, - fake_labels, - include_global=False, - incl_shared_variance=True): + def _test_4d_label( + self, wanted, fake_labels, include_global=False, incl_shared_variance=True + ): # set up - utils.save_toy_nii(fake_labels, self.filenames['4d_label_file']) + utils.save_toy_nii(fake_labels, self.filenames["4d_label_file"]) # run iface.SignalExtraction( - in_file=self.filenames['in_file'], - label_files=self.filenames['4d_label_file'], + in_file=self.filenames["in_file"], + label_files=self.filenames["4d_label_file"], class_labels=self.labels, incl_shared_variance=incl_shared_variance, - include_global=include_global).run() + include_global=include_global, + ).run() wanted_labels = self.global_labels if include_global else self.labels @@ -155,12 +157,11 @@ def _test_4d_label(self, self.assert_expected_output(wanted_labels, wanted) def assert_expected_output(self, labels, wanted): - with open(self.filenames['out_file'], 'r') as output: + with open(self.filenames["out_file"], "r") as output: got = [line.split() for line in output] labels_got = got.pop(0) # remove header assert labels_got == labels - assert len(got) == self.fake_fmri_data.shape[ - 3], 'num rows and num volumes' + assert len(got) == self.fake_fmri_data.shape[3], "num rows and num volumes" # convert from string to float got = [[float(num) for num in row] for row in got] for i, time in enumerate(got): @@ -168,33 +169,52 @@ def assert_expected_output(self, labels, wanted): for j, segment in enumerate(time): npt.assert_almost_equal(segment, wanted[i][j], decimal=1) - -# dj: self doesnt have orig_dir at this point, not sure how to change it. -# should work without it -# def teardown_class(self): -# self.orig_dir.chdir() - - fake_fmri_data = np.array([[[[2, -1, 4, -2, 3], [4, -2, -5, -1, 0]], - [[-2, 0, 1, 4, 4], [-5, 3, -3, 1, -5]]], - [[[2, -2, -1, -2, -5], [3, 0, 3, -5, -2]], - [[-4, -2, -2, 1, -2], [3, 1, 4, -3, -2]]]]) + # dj: self doesnt have orig_dir at this point, not sure how to change it. + # should work without it + # def teardown_class(self): + # self.orig_dir.chdir() + + fake_fmri_data = np.array( + [ + [ + [[2, -1, 4, -2, 3], [4, -2, -5, -1, 0]], + [[-2, 0, 1, 4, 4], [-5, 3, -3, 1, -5]], + ], + [ + [[2, -2, -1, -2, -5], [3, 0, 3, -5, -2]], + [[-4, -2, -2, 1, -2], [3, 1, 4, -3, -2]], + ], + ] + ) fake_label_data = np.array([[[1, 0], [3, 1]], [[2, 0], [1, 3]]]) fake_equiv_4d_label_data = np.array( - [[[[1., 0., 0.], [0., 0., 0.]], [[0., 0., 1.], [1., 0., 0.]]], - [[[0., 1., 0.], [0., 0., 0.]], [[1., 0., 0.], [0., 0., 1.]]]]) - - base_wanted = [[-2.33333, 2, .5], [0, -2, .5], [-.3333333, -1, 2.5], - [0, -2, .5], [-1.3333333, -5, 1]] - - fake_4d_label_data = np.array([[[[0.2, 0.3, 0.5], [0.1, 0.1, 0.8]], - [[0.1, 0.3, 0.6], [0.3, 0.4, 0.3]]], - [[[0.2, 0.2, 0.6], [0., 0.3, 0.7]], - [[0.3, 0.3, 0.4], [0.3, 0.4, 0.3]]]]) - - fourd_wanted = [[-5.0652173913, -5.44565217391, 5.50543478261], [ - -7.02173913043, 11.1847826087, -4.33152173913 - ], [-19.0869565217, 21.2391304348, - -4.57608695652], [5.19565217391, -3.66304347826, -1.51630434783], - [-12.0, 3., 0.5]] + [ + [[[1.0, 0.0, 0.0], [0.0, 0.0, 0.0]], [[0.0, 0.0, 1.0], [1.0, 0.0, 0.0]]], + [[[0.0, 1.0, 0.0], [0.0, 0.0, 0.0]], [[1.0, 0.0, 0.0], [0.0, 0.0, 1.0]]], + ] + ) + + base_wanted = [ + [-2.33333, 2, 0.5], + [0, -2, 0.5], + [-0.3333333, -1, 2.5], + [0, -2, 0.5], + [-1.3333333, -5, 1], + ] + + fake_4d_label_data = np.array( + [ + [[[0.2, 0.3, 0.5], [0.1, 0.1, 0.8]], [[0.1, 0.3, 0.6], [0.3, 0.4, 0.3]]], + [[[0.2, 0.2, 0.6], [0.0, 0.3, 0.7]], [[0.3, 0.3, 0.4], [0.3, 0.4, 0.3]]], + ] + ) + + fourd_wanted = [ + [-5.0652173913, -5.44565217391, 5.50543478261], + [-7.02173913043, 11.1847826087, -4.33152173913], + [-19.0869565217, 21.2391304348, -4.57608695652], + [5.19565217391, -3.66304347826, -1.51630434783], + [-12.0, 3.0, 0.5], + ] diff --git a/nipype/interfaces/utility/__init__.py b/nipype/interfaces/utility/__init__.py index 084acb569c..f5556e7263 100644 --- a/nipype/interfaces/utility/__init__.py +++ b/nipype/interfaces/utility/__init__.py @@ -7,7 +7,6 @@ Requires Packages to be installed """ -from .base import (IdentityInterface, Rename, Select, Split, Merge, - AssertEqual) +from .base import IdentityInterface, Rename, Select, Split, Merge, AssertEqual from .csv import CSVReader from .wrappers import Function diff --git a/nipype/interfaces/utility/base.py b/nipype/interfaces/utility/base.py index 3261be0c53..9ff13011b7 100644 --- a/nipype/interfaces/utility/base.py +++ b/nipype/interfaces/utility/base.py @@ -10,9 +10,20 @@ import re import numpy as np -from ..base import (traits, TraitedSpec, DynamicTraitedSpec, File, Undefined, - isdefined, OutputMultiPath, InputMultiPath, BaseInterface, - BaseInterfaceInputSpec, Str, SimpleInterface) +from ..base import ( + traits, + TraitedSpec, + DynamicTraitedSpec, + File, + Undefined, + isdefined, + OutputMultiPath, + InputMultiPath, + BaseInterface, + BaseInterfaceInputSpec, + Str, + SimpleInterface, +) from ..io import IOBase, add_traits from ...utils.filemanip import ensure_list, copyfile, split_filename @@ -42,20 +53,20 @@ class IdentityInterface(IOBase): >>> out = ii2.run() # doctest: +SKIP ValueError: IdentityInterface requires a value for input 'b' because it was listed in 'fields' Interface IdentityInterface failed to run. """ + input_spec = DynamicTraitedSpec output_spec = DynamicTraitedSpec def __init__(self, fields=None, mandatory_inputs=True, **inputs): super(IdentityInterface, self).__init__(**inputs) if fields is None or not fields: - raise ValueError( - 'Identity Interface fields must be a non-empty list') + raise ValueError("Identity Interface fields must be a non-empty list") # Each input must be in the fields. for in_field in inputs: if in_field not in fields: raise ValueError( - 'Identity Interface input is not in the fields: %s' % - in_field) + "Identity Interface input is not in the fields: %s" % in_field + ) self._fields = fields self._mandatory_inputs = mandatory_inputs add_traits(self.inputs, fields) @@ -73,9 +84,11 @@ def _list_outputs(self): for key in self._fields: value = getattr(self.inputs, key) if not isdefined(value): - msg = "%s requires a value for input '%s' because it was listed in 'fields'. \ - You can turn off mandatory inputs checking by passing mandatory_inputs = False to the constructor." % \ - (self.__class__.__name__, key) + msg = ( + "%s requires a value for input '%s' because it was listed in 'fields'. \ + You can turn off mandatory inputs checking by passing mandatory_inputs = False to the constructor." + % (self.__class__.__name__, key) + ) raise ValueError(msg) outputs = self._outputs().get() @@ -88,22 +101,23 @@ def _list_outputs(self): class MergeInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): axis = traits.Enum( - 'vstack', - 'hstack', + "vstack", + "hstack", usedefault=True, - desc= - 'direction in which to merge, hstack requires same number of elements in each input' + desc="direction in which to merge, hstack requires same number of elements in each input", ) no_flatten = traits.Bool( False, usedefault=True, - desc='append to outlist instead of extending in vstack mode') + desc="append to outlist instead of extending in vstack mode", + ) ravel_inputs = traits.Bool( - False, usedefault=True, desc='ravel inputs when no_flatten is False') + False, usedefault=True, desc="ravel inputs when no_flatten is False" + ) class MergeOutputSpec(TraitedSpec): - out = traits.List(desc='Merged output') + out = traits.List(desc="Merged output") def _ravel(in_val): @@ -156,6 +170,7 @@ class Merge(IOBase): >>> out.outputs.out [[1, [2, 5], 3]] """ + input_spec = MergeInputSpec output_spec = MergeOutputSpec @@ -163,7 +178,7 @@ def __init__(self, numinputs=0, **inputs): super(Merge, self).__init__(**inputs) self._numinputs = numinputs if numinputs >= 1: - input_names = ['in%d' % (i + 1) for i in range(numinputs)] + input_names = ["in%d" % (i + 1) for i in range(numinputs)] else: input_names = [] add_traits(self.inputs, input_names) @@ -175,42 +190,42 @@ def _list_outputs(self): if self._numinputs < 1: return outputs else: - getval = lambda idx: getattr(self.inputs, 'in%d' % (idx + 1)) + getval = lambda idx: getattr(self.inputs, "in%d" % (idx + 1)) values = [ - getval(idx) for idx in range(self._numinputs) - if isdefined(getval(idx)) + getval(idx) for idx in range(self._numinputs) if isdefined(getval(idx)) ] - if self.inputs.axis == 'vstack': + if self.inputs.axis == "vstack": for value in values: if isinstance(value, list) and not self.inputs.no_flatten: - out.extend( - _ravel(value) if self.inputs.ravel_inputs else value) + out.extend(_ravel(value) if self.inputs.ravel_inputs else value) else: out.append(value) else: lists = [ensure_list(val) for val in values] out = [[val[i] for val in lists] for i in range(len(lists[0]))] - outputs['out'] = out + outputs["out"] = out return outputs class RenameInputSpec(DynamicTraitedSpec): in_file = File(exists=True, mandatory=True, desc="file to rename") keep_ext = traits.Bool( - desc=("Keep in_file extension, replace " - "non-extension component of name")) + desc=("Keep in_file extension, replace " "non-extension component of name") + ) format_string = Str( - mandatory=True, desc="Python formatting string for output template") - parse_string = Str(desc="Python regexp parse string to define " - "replacement inputs") + mandatory=True, desc="Python formatting string for output template" + ) + parse_string = Str( + desc="Python regexp parse string to define " "replacement inputs" + ) use_fullpath = traits.Bool( - False, usedefault=True, desc="Use full path as input to regex parser") + False, usedefault=True, desc="Use full path as input to regex parser" + ) class RenameOutputSpec(TraitedSpec): - out_file = File( - exists=True, desc="softlink to original file with new name") + out_file = File(exists=True, desc="softlink to original file with new name") class Rename(SimpleInterface, IOBase): @@ -255,6 +270,7 @@ class Rename(SimpleInterface, IOBase): 'subj_201_epi_run02.nii' # doctest: +SKIP """ + input_spec = RenameInputSpec output_spec = RenameOutputSpec @@ -270,12 +286,12 @@ def __init__(self, format_string=None, **inputs): def _rename(self): fmt_dict = dict() if isdefined(self.inputs.parse_string): - if isdefined( - self.inputs.use_fullpath) and self.inputs.use_fullpath: + if isdefined(self.inputs.use_fullpath) and self.inputs.use_fullpath: m = re.search(self.inputs.parse_string, self.inputs.in_file) else: - m = re.search(self.inputs.parse_string, - os.path.split(self.inputs.in_file)[1]) + m = re.search( + self.inputs.parse_string, os.path.split(self.inputs.in_file)[1] + ) if m: fmt_dict.update(m.groupdict()) for field in self.fmt_fields: @@ -283,10 +299,9 @@ def _rename(self): if isdefined(val): fmt_dict[field] = getattr(self.inputs, field) if self.inputs.keep_ext: - fmt_string = "".join([ - self.inputs.format_string, - split_filename(self.inputs.in_file)[2] - ]) + fmt_string = "".join( + [self.inputs.format_string, split_filename(self.inputs.in_file)[2]] + ) else: fmt_string = self.inputs.format_string return fmt_string % fmt_dict @@ -295,22 +310,20 @@ def _run_interface(self, runtime): runtime.returncode = 0 out_file = os.path.join(runtime.cwd, self._rename()) _ = copyfile(self.inputs.in_file, out_file) - self._results['out_file'] = out_file + self._results["out_file"] = out_file return runtime class SplitInputSpec(BaseInterfaceInputSpec): - inlist = traits.List( - traits.Any, mandatory=True, desc='list of values to split') + inlist = traits.List(traits.Any, mandatory=True, desc="list of values to split") splits = traits.List( traits.Int, mandatory=True, - desc='Number of outputs in each split - should add to number of inputs' + desc="Number of outputs in each split - should add to number of inputs", ) squeeze = traits.Bool( - False, - usedefault=True, - desc='unfold one-element splits removing the list') + False, usedefault=True, desc="unfold one-element splits removing the list" + ) class Split(IOBase): @@ -334,7 +347,7 @@ class Split(IOBase): def _add_output_traits(self, base): undefined_traits = {} for i in range(len(self.inputs.splits)): - key = 'out%d' % (i + 1) + key = "out%d" % (i + 1) base.add_trait(key, traits.Any) undefined_traits[key] = Undefined base.trait_set(trait_change_notify=False, **undefined_traits) @@ -344,28 +357,29 @@ def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.splits): if sum(self.inputs.splits) != len(self.inputs.inlist): - raise RuntimeError('sum of splits != num of list elements') + raise RuntimeError("sum of splits != num of list elements") splits = [0] splits.extend(self.inputs.splits) splits = np.cumsum(splits) for i in range(len(splits) - 1): - val = np.array( - self.inputs.inlist)[splits[i]:splits[i + 1]].tolist() + val = np.array(self.inputs.inlist)[splits[i] : splits[i + 1]].tolist() if self.inputs.squeeze and len(val) == 1: val = val[0] - outputs['out%d' % (i + 1)] = val + outputs["out%d" % (i + 1)] = val return outputs class SelectInputSpec(BaseInterfaceInputSpec): inlist = InputMultiPath( - traits.Any, mandatory=True, desc='list of values to choose from') + traits.Any, mandatory=True, desc="list of values to choose from" + ) index = InputMultiPath( - traits.Int, mandatory=True, desc='0-based indices of values to choose') + traits.Int, mandatory=True, desc="0-based indices of values to choose" + ) class SelectOutputSpec(TraitedSpec): - out = OutputMultiPath(traits.Any, desc='list of selected values') + out = OutputMultiPath(traits.Any, desc="list of selected values") class Select(IOBase): @@ -393,9 +407,8 @@ class Select(IOBase): def _list_outputs(self): outputs = self._outputs().get() - out = np.array(self.inputs.inlist)[np.array( - self.inputs.index)].tolist() - outputs['out'] = out + out = np.array(self.inputs.inlist)[np.array(self.inputs.index)].tolist() + outputs["out"] = out return outputs @@ -409,9 +422,10 @@ class AssertEqual(BaseInterface): def _run_interface(self, runtime): import nibabel as nb + data1 = nb.load(self.inputs.volume1).get_data() data2 = nb.load(self.inputs.volume2).get_data() if not np.all(data1 == data2): - raise RuntimeError('Input images are not exactly equal') + raise RuntimeError("Input images are not exactly equal") return runtime diff --git a/nipype/interfaces/utility/csv.py b/nipype/interfaces/utility/csv.py index d22b146d74..04cb28438e 100644 --- a/nipype/interfaces/utility/csv.py +++ b/nipype/interfaces/utility/csv.py @@ -3,20 +3,17 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """CSV Handling utilities """ -from ..base import (traits, TraitedSpec, DynamicTraitedSpec, File, - BaseInterface) +from ..base import traits, TraitedSpec, DynamicTraitedSpec, File, BaseInterface from ..io import add_traits class CSVReaderInputSpec(DynamicTraitedSpec, TraitedSpec): in_file = File( - exists=True, - mandatory=True, - desc='Input comma-seperated value (CSV) file') + exists=True, mandatory=True, desc="Input comma-seperated value (CSV) file" + ) header = traits.Bool( - False, - usedefault=True, - desc='True if the first line is a column header') + False, usedefault=True, desc="True if the first line is a column header" + ) class CSVReader(BaseInterface): @@ -46,6 +43,7 @@ class CSVReader(BaseInterface): True """ + input_spec = CSVReaderInputSpec output_spec = DynamicTraitedSpec _always_run = True @@ -56,18 +54,17 @@ def _append_entry(self, outputs, entry): return outputs def _parse_line(self, line): - line = line.replace('\n', '') - entry = [x.strip() for x in line.split(',')] + line = line.replace("\n", "") + entry = [x.strip() for x in line.split(",")] return entry def _get_outfields(self): - with open(self.inputs.in_file, 'r') as fid: + with open(self.inputs.in_file, "r") as fid: entry = self._parse_line(fid.readline()) if self.inputs.header: self._outfields = tuple(entry) else: - self._outfields = tuple( - ['column_' + str(x) for x in range(len(entry))]) + self._outfields = tuple(["column_" + str(x) for x in range(len(entry))]) return self._outfields def _run_interface(self, runtime): @@ -85,7 +82,7 @@ def _list_outputs(self): isHeader = True for key in self._outfields: outputs[key] = [] # initialize outfields - with open(self.inputs.in_file, 'r') as fid: + with open(self.inputs.in_file, "r") as fid: for line in fid.readlines(): if self.inputs.header and isHeader: # skip header line isHeader = False diff --git a/nipype/interfaces/utility/tests/test_auto_AssertEqual.py b/nipype/interfaces/utility/tests/test_auto_AssertEqual.py index c550a5efba..2045d9149a 100644 --- a/nipype/interfaces/utility/tests/test_auto_AssertEqual.py +++ b/nipype/interfaces/utility/tests/test_auto_AssertEqual.py @@ -4,14 +4,8 @@ def test_AssertEqual_inputs(): input_map = dict( - volume1=dict( - extensions=None, - mandatory=True, - ), - volume2=dict( - extensions=None, - mandatory=True, - ), + volume1=dict(extensions=None, mandatory=True,), + volume2=dict(extensions=None, mandatory=True,), ) inputs = AssertEqual.input_spec() diff --git a/nipype/interfaces/utility/tests/test_auto_CSVReader.py b/nipype/interfaces/utility/tests/test_auto_CSVReader.py index 8bd60f55ad..98adf59f6d 100644 --- a/nipype/interfaces/utility/tests/test_auto_CSVReader.py +++ b/nipype/interfaces/utility/tests/test_auto_CSVReader.py @@ -4,17 +4,15 @@ def test_CSVReader_inputs(): input_map = dict( - header=dict(usedefault=True, ), - in_file=dict( - extensions=None, - mandatory=True, - ), + header=dict(usedefault=True,), in_file=dict(extensions=None, mandatory=True,), ) inputs = CSVReader.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CSVReader_outputs(): output_map = dict() outputs = CSVReader.output_spec() diff --git a/nipype/interfaces/utility/tests/test_auto_Function.py b/nipype/interfaces/utility/tests/test_auto_Function.py index f4e353bb27..f2713a4407 100644 --- a/nipype/interfaces/utility/tests/test_auto_Function.py +++ b/nipype/interfaces/utility/tests/test_auto_Function.py @@ -3,12 +3,14 @@ def test_Function_inputs(): - input_map = dict(function_str=dict(mandatory=True, ), ) + input_map = dict(function_str=dict(mandatory=True,),) inputs = Function.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Function_outputs(): output_map = dict() outputs = Function.output_spec() diff --git a/nipype/interfaces/utility/tests/test_auto_IdentityInterface.py b/nipype/interfaces/utility/tests/test_auto_IdentityInterface.py index be12e0bad7..7adb95ee88 100644 --- a/nipype/interfaces/utility/tests/test_auto_IdentityInterface.py +++ b/nipype/interfaces/utility/tests/test_auto_IdentityInterface.py @@ -9,6 +9,8 @@ def test_IdentityInterface_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_IdentityInterface_outputs(): output_map = dict() outputs = IdentityInterface.output_spec() diff --git a/nipype/interfaces/utility/tests/test_auto_Merge.py b/nipype/interfaces/utility/tests/test_auto_Merge.py index a7ed5c3a31..7658529a9d 100644 --- a/nipype/interfaces/utility/tests/test_auto_Merge.py +++ b/nipype/interfaces/utility/tests/test_auto_Merge.py @@ -4,17 +4,19 @@ def test_Merge_inputs(): input_map = dict( - axis=dict(usedefault=True, ), - no_flatten=dict(usedefault=True, ), - ravel_inputs=dict(usedefault=True, ), + axis=dict(usedefault=True,), + no_flatten=dict(usedefault=True,), + ravel_inputs=dict(usedefault=True,), ) inputs = Merge.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Merge_outputs(): - output_map = dict(out=dict(), ) + output_map = dict(out=dict(),) outputs = Merge.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/utility/tests/test_auto_Rename.py b/nipype/interfaces/utility/tests/test_auto_Rename.py index 177f205ef0..a722afac0e 100644 --- a/nipype/interfaces/utility/tests/test_auto_Rename.py +++ b/nipype/interfaces/utility/tests/test_auto_Rename.py @@ -4,22 +4,21 @@ def test_Rename_inputs(): input_map = dict( - format_string=dict(mandatory=True, ), - in_file=dict( - extensions=None, - mandatory=True, - ), + format_string=dict(mandatory=True,), + in_file=dict(extensions=None, mandatory=True,), keep_ext=dict(), parse_string=dict(), - use_fullpath=dict(usedefault=True, ), + use_fullpath=dict(usedefault=True,), ) inputs = Rename.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Rename_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Rename.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/utility/tests/test_auto_Select.py b/nipype/interfaces/utility/tests/test_auto_Select.py index abc81b45da..76f9061446 100644 --- a/nipype/interfaces/utility/tests/test_auto_Select.py +++ b/nipype/interfaces/utility/tests/test_auto_Select.py @@ -3,17 +3,16 @@ def test_Select_inputs(): - input_map = dict( - index=dict(mandatory=True, ), - inlist=dict(mandatory=True, ), - ) + input_map = dict(index=dict(mandatory=True,), inlist=dict(mandatory=True,),) inputs = Select.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Select_outputs(): - output_map = dict(out=dict(), ) + output_map = dict(out=dict(),) outputs = Select.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/utility/tests/test_auto_Split.py b/nipype/interfaces/utility/tests/test_auto_Split.py index 20dbd948dc..901162ecab 100644 --- a/nipype/interfaces/utility/tests/test_auto_Split.py +++ b/nipype/interfaces/utility/tests/test_auto_Split.py @@ -4,15 +4,17 @@ def test_Split_inputs(): input_map = dict( - inlist=dict(mandatory=True, ), - splits=dict(mandatory=True, ), - squeeze=dict(usedefault=True, ), + inlist=dict(mandatory=True,), + splits=dict(mandatory=True,), + squeeze=dict(usedefault=True,), ) inputs = Split.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Split_outputs(): output_map = dict() outputs = Split.output_spec() diff --git a/nipype/interfaces/utility/tests/test_base.py b/nipype/interfaces/utility/tests/test_base.py index 0356452638..a19cff16b4 100644 --- a/nipype/interfaces/utility/tests/test_base.py +++ b/nipype/interfaces/utility/tests/test_base.py @@ -22,9 +22,8 @@ def test_rename(tmpdir): # Now a string-formatting version rn = utility.Rename( - in_file="file.txt", - format_string="%(field1)s_file%(field2)d", - keep_ext=True) + in_file="file.txt", format_string="%(field1)s_file%(field2)d", keep_ext=True + ) # Test .input field creation assert hasattr(rn.inputs, "field1") assert hasattr(rn.inputs, "field2") @@ -38,45 +37,42 @@ def test_rename(tmpdir): assert os.path.exists(outfile) -@pytest.mark.parametrize("args, expected", [({}, ([0], [1, 2, 3])), - ({ - "squeeze": True - }, (0, [1, 2, 3]))]) +@pytest.mark.parametrize( + "args, expected", [({}, ([0], [1, 2, 3])), ({"squeeze": True}, (0, [1, 2, 3]))] +) def test_split(tmpdir, args, expected): tmpdir.chdir() node = pe.Node( utility.Split(inlist=list(range(4)), splits=[1, 3], **args), - name='split_squeeze') + name="split_squeeze", + ) res = node.run() assert res.outputs.out1 == expected[0] assert res.outputs.out2 == expected[1] -@pytest.mark.parametrize("args, kwargs, in_lists, expected", [ - ([3], {}, [0, [1, 2], [3, 4, 5]], [0, 1, 2, 3, 4, 5]), - ([0], {}, None, None), - ([], {}, [], []), - ([], {}, [0, [1, 2], [3, 4, 5]], [0, [1, 2], [3, 4, 5]]), - ([3], { - 'axis': 'hstack' - }, [[0], [1, 2], [3, 4, 5]], [[0, 1, 3]]), - ([3], { - 'axis': 'hstack' - }, [[0, 1], [2, 3], [4, 5]], [[0, 2, 4], [1, 3, 5]]), - ([3], { - 'axis': 'hstack' - }, [[0, 1], [2, 3], [4, 5]], [[0, 2, 4], [1, 3, 5]]), -]) +@pytest.mark.parametrize( + "args, kwargs, in_lists, expected", + [ + ([3], {}, [0, [1, 2], [3, 4, 5]], [0, 1, 2, 3, 4, 5]), + ([0], {}, None, None), + ([], {}, [], []), + ([], {}, [0, [1, 2], [3, 4, 5]], [0, [1, 2], [3, 4, 5]]), + ([3], {"axis": "hstack"}, [[0], [1, 2], [3, 4, 5]], [[0, 1, 3]]), + ([3], {"axis": "hstack"}, [[0, 1], [2, 3], [4, 5]], [[0, 2, 4], [1, 3, 5]]), + ([3], {"axis": "hstack"}, [[0, 1], [2, 3], [4, 5]], [[0, 2, 4], [1, 3, 5]]), + ], +) def test_merge(tmpdir, args, kwargs, in_lists, expected): tmpdir.chdir() - node = pe.Node(utility.Merge(*args, **kwargs), name='merge') + node = pe.Node(utility.Merge(*args, **kwargs), name="merge") numinputs = args[0] if args else 0 if numinputs >= 1: for i in range(1, numinputs + 1): - setattr(node.inputs, 'in{:d}'.format(i), in_lists[i - 1]) + setattr(node.inputs, "in{:d}".format(i), in_lists[i - 1]) res = node.run() if numinputs < 1: diff --git a/nipype/interfaces/utility/tests/test_csv.py b/nipype/interfaces/utility/tests/test_csv.py index 3c15c81239..ffd69f000f 100644 --- a/nipype/interfaces/utility/tests/test_csv.py +++ b/nipype/interfaces/utility/tests/test_csv.py @@ -10,7 +10,7 @@ def test_csvReader(tmpdir): lines = ["foo,hello,300.1\n", "bar,world,5\n", "baz,goodbye,0.3\n"] for x in range(2): name = tmpdir.join("testfile.csv").strpath - with open(name, 'w') as fid: + with open(name, "w") as fid: reader = utility.CSVReader() if x % 2 == 0: fid.write(header) @@ -20,10 +20,10 @@ def test_csvReader(tmpdir): reader.inputs.in_file = name out = reader.run() if x % 2 == 0: - assert out.outputs.files == ['foo', 'bar', 'baz'] - assert out.outputs.labels == ['hello', 'world', 'goodbye'] - assert out.outputs.erosion == ['300.1', '5', '0.3'] + assert out.outputs.files == ["foo", "bar", "baz"] + assert out.outputs.labels == ["hello", "world", "goodbye"] + assert out.outputs.erosion == ["300.1", "5", "0.3"] else: - assert out.outputs.column_0 == ['foo', 'bar', 'baz'] - assert out.outputs.column_1 == ['hello', 'world', 'goodbye'] - assert out.outputs.column_2 == ['300.1', '5', '0.3'] + assert out.outputs.column_0 == ["foo", "bar", "baz"] + assert out.outputs.column_1 == ["hello", "world", "goodbye"] + assert out.outputs.column_2 == ["300.1", "5", "0.3"] diff --git a/nipype/interfaces/utility/tests/test_wrappers.py b/nipype/interfaces/utility/tests/test_wrappers.py index eb145f51ef..98ee7c7959 100644 --- a/nipype/interfaces/utility/tests/test_wrappers.py +++ b/nipype/interfaces/utility/tests/test_wrappers.py @@ -20,15 +20,18 @@ def test_function(tmpdir): def gen_random_array(size): import numpy as np + return np.random.rand(size, size) f1 = pe.MapNode( utility.Function( - input_names=['size'], - output_names=['random_array'], - function=gen_random_array), - name='random_array', - iterfield=['size']) + input_names=["size"], + output_names=["random_array"], + function=gen_random_array, + ), + name="random_array", + iterfield=["size"], + ) f1.inputs.size = [2, 3, 5] wf = pe.Workflow(name="test_workflow") @@ -38,14 +41,15 @@ def increment_array(in_array): f2 = pe.MapNode( utility.Function(function=increment_array), - name='increment_array', - iterfield=['in_array']) + name="increment_array", + iterfield=["in_array"], + ) - wf.connect(f1, 'random_array', f2, 'in_array') + wf.connect(f1, "random_array", f2, "in_array") f3 = pe.Node(utility.Function(function=concat_sort), name="concat_sort") - wf.connect(f2, 'out', f3, 'in_arrays') + wf.connect(f2, "out", f3, "in_arrays") wf.run() @@ -60,8 +64,10 @@ def should_fail(tmp): utility.Function( input_names=["size"], output_names=["random_array"], - function=make_random_array), - name="should_fail") + function=make_random_array, + ), + name="should_fail", + ) node.inputs.size = 10 node.run() @@ -79,8 +85,10 @@ def test_function_with_imports(tmpdir): input_names=["size"], output_names=["random_array"], function=make_random_array, - imports=["import numpy as np"]), - name="should_not_fail") + imports=["import numpy as np"], + ), + name="should_not_fail", + ) print(node.inputs.function_str) node.inputs.size = 10 node.run() @@ -95,9 +103,7 @@ def test_aux_connect_function(tmpdir): wf = pe.Workflow(name="test_workflow") def _gen_tuple(size): - return [ - 1, - ] * size + return [1,] * size def _sum_and_sub_mul(a, b, c): return (a + b) * c, (a - b) * c @@ -105,33 +111,35 @@ def _sum_and_sub_mul(a, b, c): def _inc(x): return x + 1 - params = pe.Node( - utility.IdentityInterface(fields=['size', 'num']), name='params') + params = pe.Node(utility.IdentityInterface(fields=["size", "num"]), name="params") params.inputs.num = 42 params.inputs.size = 1 gen_tuple = pe.Node( utility.Function( - input_names=['size'], output_names=['tuple'], function=_gen_tuple), - name='gen_tuple') + input_names=["size"], output_names=["tuple"], function=_gen_tuple + ), + name="gen_tuple", + ) ssm = pe.Node( utility.Function( - input_names=['a', 'b', 'c'], - output_names=['sum', 'sub'], - function=_sum_and_sub_mul), - name='sum_and_sub_mul') - - split = pe.Node(utility.Split(splits=[1, 1], squeeze=True), name='split') - - wf.connect([ - (params, gen_tuple, [(("size", _inc), "size")]), - (params, ssm, [(("num", _inc), "c")]), - (gen_tuple, split, [("tuple", "inlist")]), - (split, ssm, [ - (("out1", _inc), "a"), - ("out2", "b"), - ]), - ]) + input_names=["a", "b", "c"], + output_names=["sum", "sub"], + function=_sum_and_sub_mul, + ), + name="sum_and_sub_mul", + ) + + split = pe.Node(utility.Split(splits=[1, 1], squeeze=True), name="split") + + wf.connect( + [ + (params, gen_tuple, [(("size", _inc), "size")]), + (params, ssm, [(("num", _inc), "c")]), + (gen_tuple, split, [("tuple", "inlist")]), + (split, ssm, [(("out1", _inc), "a"), ("out2", "b"),]), + ] + ) wf.run() diff --git a/nipype/interfaces/utility/wrappers.py b/nipype/interfaces/utility/wrappers.py index e775c9a540..f638816166 100644 --- a/nipype/interfaces/utility/wrappers.py +++ b/nipype/interfaces/utility/wrappers.py @@ -7,17 +7,22 @@ >>> old = tmp.chdir() """ from ... import logging -from ..base import (traits, DynamicTraitedSpec, Undefined, isdefined, - BaseInterfaceInputSpec) +from ..base import ( + traits, + DynamicTraitedSpec, + Undefined, + isdefined, + BaseInterfaceInputSpec, +) from ..io import IOBase, add_traits from ...utils.filemanip import ensure_list from ...utils.functions import getsource, create_function_from_source -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") class FunctionInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): - function_str = traits.Str(mandatory=True, desc='code for function') + function_str = traits.Str(mandatory=True, desc="code for function") class Function(IOBase): @@ -38,12 +43,14 @@ class Function(IOBase): input_spec = FunctionInputSpec output_spec = DynamicTraitedSpec - def __init__(self, - input_names=None, - output_names='out', - function=None, - imports=None, - **inputs): + def __init__( + self, + input_names=None, + output_names="out", + function=None, + imports=None, + **inputs + ): """ Parameters @@ -66,26 +73,27 @@ def __init__(self, super(Function, self).__init__(**inputs) if function: - if hasattr(function, '__call__'): + if hasattr(function, "__call__"): try: self.inputs.function_str = getsource(function) except IOError: - raise Exception('Interface Function does not accept ' - 'function objects defined interactively ' - 'in a python session') + raise Exception( + "Interface Function does not accept " + "function objects defined interactively " + "in a python session" + ) else: if input_names is None: fninfo = function.__code__ elif isinstance(function, (str, bytes)): self.inputs.function_str = function if input_names is None: - fninfo = create_function_from_source(function, - imports).__code__ + fninfo = create_function_from_source(function, imports).__code__ else: - raise Exception('Unknown type of function') + raise Exception("Unknown type of function") if input_names is None: - input_names = fninfo.co_varnames[:fninfo.co_argcount] - self.inputs.on_trait_change(self._set_function_string, 'function_str') + input_names = fninfo.co_varnames[: fninfo.co_argcount] + self.inputs.on_trait_change(self._set_function_string, "function_str") self._input_names = ensure_list(input_names) self._output_names = ensure_list(output_names) add_traits(self.inputs, [name for name in self._input_names]) @@ -95,20 +103,18 @@ def __init__(self, self._out[name] = None def _set_function_string(self, obj, name, old, new): - if name == 'function_str': - if hasattr(new, '__call__'): + if name == "function_str": + if hasattr(new, "__call__"): function_source = getsource(new) fninfo = new.__code__ elif isinstance(new, (str, bytes)): function_source = new - fninfo = create_function_from_source(new, - self.imports).__code__ + fninfo = create_function_from_source(new, self.imports).__code__ self.inputs.trait_set( - trait_change_notify=False, **{ - '%s' % name: function_source - }) + trait_change_notify=False, **{"%s" % name: function_source} + ) # Update input traits - input_names = fninfo.co_varnames[:fninfo.co_argcount] + input_names = fninfo.co_varnames[: fninfo.co_argcount] new_names = set(input_names) - set(self._input_names) add_traits(self.inputs, list(new_names)) self._input_names.extend(new_names) @@ -123,8 +129,9 @@ def _add_output_traits(self, base): def _run_interface(self, runtime): # Create function handle - function_handle = create_function_from_source(self.inputs.function_str, - self.imports) + function_handle = create_function_from_source( + self.inputs.function_str, self.imports + ) # Get function args args = {} for name in self._input_names: @@ -136,9 +143,8 @@ def _run_interface(self, runtime): if len(self._output_names) == 1: self._out[self._output_names[0]] = out else: - if isinstance(out, tuple) and \ - (len(out) != len(self._output_names)): - raise RuntimeError('Mismatch in number of expected outputs') + if isinstance(out, tuple) and (len(out) != len(self._output_names)): + raise RuntimeError("Mismatch in number of expected outputs") else: for idx, name in enumerate(self._output_names): diff --git a/nipype/interfaces/vista/__init__.py b/nipype/interfaces/vista/__init__.py index d0372042aa..c44c4678d3 100644 --- a/nipype/interfaces/vista/__init__.py +++ b/nipype/interfaces/vista/__init__.py @@ -1,4 +1,4 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from .vista import (Vnifti2Image, VtoMat) +from .vista import Vnifti2Image, VtoMat diff --git a/nipype/interfaces/vista/tests/test_auto_Vnifti2Image.py b/nipype/interfaces/vista/tests/test_auto_Vnifti2Image.py index 5168f61ea7..56bf94b7cc 100644 --- a/nipype/interfaces/vista/tests/test_auto_Vnifti2Image.py +++ b/nipype/interfaces/vista/tests/test_auto_Vnifti2Image.py @@ -4,29 +4,17 @@ def test_Vnifti2Image_inputs(): input_map = dict( - args=dict(argstr='%s', ), - attributes=dict( - argstr='-attr %s', - extensions=None, - position=2, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - position=1, - ), + args=dict(argstr="%s",), + attributes=dict(argstr="-attr %s", extensions=None, position=2,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True, position=1,), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, hash_files=False, keep_extension=False, - name_source=['in_file'], - name_template='%s.v', + name_source=["in_file"], + name_template="%s.v", position=-1, ), ) @@ -35,8 +23,10 @@ def test_Vnifti2Image_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_Vnifti2Image_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = Vnifti2Image.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/vista/tests/test_auto_VtoMat.py b/nipype/interfaces/vista/tests/test_auto_VtoMat.py index 788f1e5d9e..46cac1b5b6 100644 --- a/nipype/interfaces/vista/tests/test_auto_VtoMat.py +++ b/nipype/interfaces/vista/tests/test_auto_VtoMat.py @@ -4,24 +4,16 @@ def test_VtoMat_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='-in %s', - extensions=None, - mandatory=True, - position=1, - ), + args=dict(argstr="%s",), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="-in %s", extensions=None, mandatory=True, position=1,), out_file=dict( - argstr='-out %s', + argstr="-out %s", extensions=None, hash_files=False, keep_extension=False, - name_source=['in_file'], - name_template='%s.mat', + name_source=["in_file"], + name_template="%s.mat", position=-1, ), ) @@ -30,8 +22,10 @@ def test_VtoMat_inputs(): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_VtoMat_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = VtoMat.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/vista/vista.py b/nipype/interfaces/vista/vista.py index ada6f430f1..4bb941c7f9 100644 --- a/nipype/interfaces/vista/vista.py +++ b/nipype/interfaces/vista/vista.py @@ -7,25 +7,22 @@ class Vnifti2ImageInputSpec(CommandLineInputSpec): in_file = File( - exists=True, - argstr='-in %s', - mandatory=True, - position=1, - desc='in file') - attributes = File( - exists=True, argstr='-attr %s', position=2, desc='attribute file') + exists=True, argstr="-in %s", mandatory=True, position=1, desc="in file" + ) + attributes = File(exists=True, argstr="-attr %s", position=2, desc="attribute file") out_file = File( name_template="%s.v", keep_extension=False, - argstr='-out %s', + argstr="-out %s", hash_files=False, position=-1, - desc='output data file', - name_source=["in_file"]) + desc="output data file", + name_source=["in_file"], + ) class Vnifti2ImageOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='Output vista file') + out_file = File(exists=True, desc="Output vista file") class Vnifti2Image(CommandLine): @@ -42,30 +39,28 @@ class Vnifti2Image(CommandLine): >>> vimage.run() # doctest: +SKIP """ - _cmd = 'vnifti2image' + _cmd = "vnifti2image" input_spec = Vnifti2ImageInputSpec output_spec = Vnifti2ImageOutputSpec class VtoMatInputSpec(CommandLineInputSpec): in_file = File( - exists=True, - argstr='-in %s', - mandatory=True, - position=1, - desc='in file') + exists=True, argstr="-in %s", mandatory=True, position=1, desc="in file" + ) out_file = File( name_template="%s.mat", keep_extension=False, - argstr='-out %s', + argstr="-out %s", hash_files=False, position=-1, - desc='output mat file', - name_source=["in_file"]) + desc="output mat file", + name_source=["in_file"], + ) class VtoMatOutputSpec(TraitedSpec): - out_file = File(exists=True, desc='Output mat file') + out_file = File(exists=True, desc="Output mat file") class VtoMat(CommandLine): @@ -82,6 +77,6 @@ class VtoMat(CommandLine): >>> vimage.run() # doctest: +SKIP """ - _cmd = 'vtomat' + _cmd = "vtomat" input_spec = VtoMatInputSpec output_spec = VtoMatOutputSpec diff --git a/nipype/interfaces/vtkbase.py b/nipype/interfaces/vtkbase.py index 67edef41c3..875ccb61d5 100644 --- a/nipype/interfaces/vtkbase.py +++ b/nipype/interfaces/vtkbase.py @@ -10,32 +10,36 @@ import os from .. import logging -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") # Check that VTK can be imported and get version _vtk_version = None try: import vtk - _vtk_version = (vtk.vtkVersion.GetVTKMajorVersion(), - vtk.vtkVersion.GetVTKMinorVersion()) + + _vtk_version = ( + vtk.vtkVersion.GetVTKMajorVersion(), + vtk.vtkVersion.GetVTKMinorVersion(), + ) except ImportError: - iflogger.warning('VTK was not found') + iflogger.warning("VTK was not found") # Ensure that tvtk is loaded with the appropriate ETS_TOOLKIT env var -old_ets = os.getenv('ETS_TOOLKIT') -os.environ['ETS_TOOLKIT'] = 'null' +old_ets = os.getenv("ETS_TOOLKIT") +os.environ["ETS_TOOLKIT"] = "null" _have_tvtk = False try: from tvtk.api import tvtk + _have_tvtk = True except ImportError: - iflogger.warning('tvtk wasn\'t found') + iflogger.warning("tvtk wasn't found") tvtk = None finally: if old_ets is not None: - os.environ['ETS_TOOLKIT'] = old_ets + os.environ["ETS_TOOLKIT"] = old_ets else: - del os.environ['ETS_TOOLKIT'] + del os.environ["ETS_TOOLKIT"] def vtk_version(): @@ -60,7 +64,7 @@ def vtk_old(): """ Checks if VTK uses the old-style pipeline (VTK<6.0) """ global _vtk_version if _vtk_version is None: - raise RuntimeException('VTK is not correctly installed.') + raise RuntimeException("VTK is not correctly installed.") return _vtk_version[0] < 6 diff --git a/nipype/interfaces/workbench/base.py b/nipype/interfaces/workbench/base.py index 2c2620dbb6..82c12420b9 100644 --- a/nipype/interfaces/workbench/base.py +++ b/nipype/interfaces/workbench/base.py @@ -17,7 +17,7 @@ from ...utils.filemanip import split_filename from ..base import CommandLine, PackageInfo -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") class Info(PackageInfo): @@ -25,11 +25,11 @@ class Info(PackageInfo): Handle `wb_command` version information. """ - version_cmd = 'wb_command -version' + version_cmd = "wb_command -version" @staticmethod def parse_version(raw_info): - m = re.search(r'\nVersion (\S+)', raw_info) + m = re.search(r"\nVersion (\S+)", raw_info) return m.groups()[0] if m else None @@ -40,7 +40,7 @@ class WBCommand(CommandLine): def version(self): return Info.version() - def _gen_filename(self, name, outdir=None, suffix='', ext=None): + def _gen_filename(self, name, outdir=None, suffix="", ext=None): """Generate a filename based on the given parameters. The filename will take the form: . Parameters @@ -63,5 +63,5 @@ def _gen_filename(self, name, outdir=None, suffix='', ext=None): if ext is None: ext = fext if outdir is None: - outdir = '.' + outdir = "." return os.path.join(outdir, fname + suffix + ext) diff --git a/nipype/interfaces/workbench/cifti.py b/nipype/interfaces/workbench/cifti.py index fa288aeead..272aec1a3e 100644 --- a/nipype/interfaces/workbench/cifti.py +++ b/nipype/interfaces/workbench/cifti.py @@ -2,11 +2,11 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """This module provides interfaces for workbench CIFTI commands""" -from ..base import (TraitedSpec, File, traits, CommandLineInputSpec) +from ..base import TraitedSpec, File, traits, CommandLineInputSpec from .base import WBCommand from ... import logging -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") class CiftiSmoothInputSpec(CommandLineInputSpec): @@ -15,84 +15,99 @@ class CiftiSmoothInputSpec(CommandLineInputSpec): mandatory=True, argstr="%s", position=0, - desc="The input CIFTI file") + desc="The input CIFTI file", + ) sigma_surf = traits.Float( mandatory=True, argstr="%s", position=1, - desc="the sigma for the gaussian surface smoothing kernel, in mm") + desc="the sigma for the gaussian surface smoothing kernel, in mm", + ) sigma_vol = traits.Float( mandatory=True, argstr="%s", position=2, - desc="the sigma for the gaussian volume smoothing kernel, in mm") + desc="the sigma for the gaussian volume smoothing kernel, in mm", + ) direction = traits.Enum( "ROW", "COLUMN", mandatory=True, argstr="%s", position=3, - desc="which dimension to smooth along, ROW or COLUMN") + desc="which dimension to smooth along, ROW or COLUMN", + ) out_file = File( name_source=["in_file"], name_template="smoothed_%s.nii", keep_extension=True, argstr="%s", position=4, - desc="The output CIFTI") + desc="The output CIFTI", + ) left_surf = File( exists=True, mandatory=True, position=5, argstr="-left-surface %s", - desc="Specify the left surface to use") + desc="Specify the left surface to use", + ) left_corrected_areas = File( exists=True, position=6, argstr="-left-corrected-areas %s", desc="vertex areas (as a metric) to use instead of computing them from " - "the left surface.") + "the left surface.", + ) right_surf = File( exists=True, mandatory=True, position=7, argstr="-right-surface %s", - desc="Specify the right surface to use") + desc="Specify the right surface to use", + ) right_corrected_areas = File( exists=True, position=8, argstr="-right-corrected-areas %s", desc="vertex areas (as a metric) to use instead of computing them from " - "the right surface") + "the right surface", + ) cerebellum_surf = File( exists=True, position=9, argstr="-cerebellum-surface %s", - desc="specify the cerebellum surface to use") + desc="specify the cerebellum surface to use", + ) cerebellum_corrected_areas = File( exists=True, position=10, requires=["cerebellum_surf"], argstr="cerebellum-corrected-areas %s", desc="vertex areas (as a metric) to use instead of computing them from " - "the cerebellum surface") + "the cerebellum surface", + ) cifti_roi = File( exists=True, position=11, argstr="-cifti-roi %s", - desc="CIFTI file for ROI smoothing") + desc="CIFTI file for ROI smoothing", + ) fix_zeros_vol = traits.Bool( position=12, argstr="-fix-zeros-volume", - desc="treat values of zero in the volume as missing data") + desc="treat values of zero in the volume as missing data", + ) fix_zeros_surf = traits.Bool( position=13, argstr="-fix-zeros-surface", - desc="treat values of zero on the surface as missing data") + desc="treat values of zero on the surface as missing data", + ) merged_volume = traits.Bool( position=14, argstr="-merged-volume", - desc="smooth across subcortical structure boundaries") + desc="smooth across subcortical structure boundaries", + ) class CiftiSmoothOutputSpec(TraitedSpec): @@ -135,6 +150,7 @@ class CiftiSmooth(WBCommand): -left-surface sub-01.L.midthickness.32k_fs_LR.surf.gii \ -right-surface sub-01.R.midthickness.32k_fs_LR.surf.gii' """ + input_spec = CiftiSmoothInputSpec output_spec = CiftiSmoothOutputSpec - _cmd = 'wb_command -cifti-smoothing' + _cmd = "wb_command -cifti-smoothing" diff --git a/nipype/interfaces/workbench/metric.py b/nipype/interfaces/workbench/metric.py index b3653576a4..6bbe7f98cf 100644 --- a/nipype/interfaces/workbench/metric.py +++ b/nipype/interfaces/workbench/metric.py @@ -4,11 +4,11 @@ """This module provides interfaces for workbench surface commands""" import os -from ..base import (TraitedSpec, File, traits, CommandLineInputSpec) +from ..base import TraitedSpec, File, traits, CommandLineInputSpec from .base import WBCommand from ... import logging -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") class MetricResampleInputSpec(CommandLineInputSpec): @@ -17,20 +17,23 @@ class MetricResampleInputSpec(CommandLineInputSpec): mandatory=True, argstr="%s", position=0, - desc="The metric file to resample") + desc="The metric file to resample", + ) current_sphere = File( exists=True, mandatory=True, argstr="%s", position=1, - desc="A sphere surface with the mesh that the metric is currently on") + desc="A sphere surface with the mesh that the metric is currently on", + ) new_sphere = File( exists=True, mandatory=True, argstr="%s", position=2, desc="A sphere surface that is in register with and" - " has the desired output mesh") + " has the desired output mesh", + ) method = traits.Enum( "ADAP_BARY_AREA", "BARYCENTRIC", @@ -38,51 +41,60 @@ class MetricResampleInputSpec(CommandLineInputSpec): mandatory=True, position=3, desc="The method name - ADAP_BARY_AREA method is recommended for" - " ordinary metric data, because it should use all data while" - " downsampling, unlike BARYCENTRIC. If ADAP_BARY_AREA is used," - " exactly one of area_surfs or area_metrics must be specified") + " ordinary metric data, because it should use all data while" + " downsampling, unlike BARYCENTRIC. If ADAP_BARY_AREA is used," + " exactly one of area_surfs or area_metrics must be specified", + ) out_file = File( name_source=["new_sphere"], name_template="%s.out", keep_extension=True, argstr="%s", position=4, - desc="The output metric") + desc="The output metric", + ) area_surfs = traits.Bool( position=5, argstr="-area-surfs", xor=["area_metrics"], - desc="Specify surfaces to do vertex area correction based on") + desc="Specify surfaces to do vertex area correction based on", + ) area_metrics = traits.Bool( position=5, argstr="-area-metrics", xor=["area_surfs"], - desc="Specify vertex area metrics to do area correction based on") + desc="Specify vertex area metrics to do area correction based on", + ) current_area = File( exists=True, position=6, argstr="%s", desc="A relevant anatomical surface with mesh OR" - " a metric file with vertex areas for mesh") + " a metric file with vertex areas for mesh", + ) new_area = File( exists=True, position=7, argstr="%s", desc="A relevant anatomical surface with mesh OR" - " a metric file with vertex areas for mesh") + " a metric file with vertex areas for mesh", + ) roi_metric = File( exists=True, position=8, argstr="-current-roi %s", - desc="Input roi on the current mesh used to exclude non-data vertices") + desc="Input roi on the current mesh used to exclude non-data vertices", + ) valid_roi_out = traits.Bool( position=9, argstr="-valid-roi-out", - desc="Output the ROI of vertices that got data from valid source vertices") + desc="Output the ROI of vertices that got data from valid source vertices", + ) largest = traits.Bool( position=10, argstr="-largest", - desc="Use only the value of the vertex with the largest weight") + desc="Use only the value of the vertex with the largest weight", + ) class MetricResampleOutputSpec(TraitedSpec): @@ -129,24 +141,30 @@ class MetricResample(WBCommand): -area-metrics fsaverage5.L.midthickness_va_avg.10k_fsavg_L.shape.gii \ fs_LR.L.midthickness_va_avg.32k_fs_LR.shape.gii' """ + input_spec = MetricResampleInputSpec output_spec = MetricResampleOutputSpec - _cmd = 'wb_command -metric-resample' + _cmd = "wb_command -metric-resample" def _format_arg(self, opt, spec, val): - if opt in ['current_area', 'new_area']: + if opt in ["current_area", "new_area"]: if not self.inputs.area_surfs and not self.inputs.area_metrics: - raise ValueError("{} was set but neither area_surfs or" - " area_metrics were set".format(opt)) + raise ValueError( + "{} was set but neither area_surfs or" + " area_metrics were set".format(opt) + ) if opt == "method": - if (val == "ADAP_BARY_AREA" and - not self.inputs.area_surfs and - not self.inputs.area_metrics): - raise ValueError("Exactly one of area_surfs or area_metrics" - " must be specified") + if ( + val == "ADAP_BARY_AREA" + and not self.inputs.area_surfs + and not self.inputs.area_metrics + ): + raise ValueError( + "Exactly one of area_surfs or area_metrics" " must be specified" + ) if opt == "valid_roi_out" and val: # generate a filename and add it to argstr - roi_out = self._gen_filename(self.inputs.in_file, suffix='_roi') + roi_out = self._gen_filename(self.inputs.in_file, suffix="_roi") iflogger.info("Setting roi output file as", roi_out) spec.argstr += " " + roi_out return super(MetricResample, self)._format_arg(opt, spec, val) @@ -154,6 +172,6 @@ def _format_arg(self, opt, spec, val): def _list_outputs(self): outputs = super(MetricResample, self)._list_outputs() if self.inputs.valid_roi_out: - roi_file = self._gen_filename(self.inputs.in_file, suffix='_roi') - outputs['roi_file'] = os.path.abspath(roi_file) + roi_file = self._gen_filename(self.inputs.in_file, suffix="_roi") + outputs["roi_file"] = os.path.abspath(roi_file) return outputs diff --git a/nipype/interfaces/workbench/tests/test_auto_CiftiSmooth.py b/nipype/interfaces/workbench/tests/test_auto_CiftiSmooth.py index a045eb29f5..4be8b4aba7 100644 --- a/nipype/interfaces/workbench/tests/test_auto_CiftiSmooth.py +++ b/nipype/interfaces/workbench/tests/test_auto_CiftiSmooth.py @@ -4,98 +4,55 @@ def test_CiftiSmooth_inputs(): input_map = dict( - args=dict(argstr='%s', ), + args=dict(argstr="%s",), cerebellum_corrected_areas=dict( - argstr='cerebellum-corrected-areas %s', + argstr="cerebellum-corrected-areas %s", extensions=None, position=10, - requires=['cerebellum_surf'], + requires=["cerebellum_surf"], ), cerebellum_surf=dict( - argstr='-cerebellum-surface %s', - extensions=None, - position=9, - ), - cifti_roi=dict( - argstr='-cifti-roi %s', - extensions=None, - position=11, - ), - direction=dict( - argstr='%s', - mandatory=True, - position=3, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - fix_zeros_surf=dict( - argstr='-fix-zeros-surface', - position=13, - ), - fix_zeros_vol=dict( - argstr='-fix-zeros-volume', - position=12, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=0, - ), + argstr="-cerebellum-surface %s", extensions=None, position=9, + ), + cifti_roi=dict(argstr="-cifti-roi %s", extensions=None, position=11,), + direction=dict(argstr="%s", mandatory=True, position=3,), + environ=dict(nohash=True, usedefault=True,), + fix_zeros_surf=dict(argstr="-fix-zeros-surface", position=13,), + fix_zeros_vol=dict(argstr="-fix-zeros-volume", position=12,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=0,), left_corrected_areas=dict( - argstr='-left-corrected-areas %s', - extensions=None, - position=6, + argstr="-left-corrected-areas %s", extensions=None, position=6, ), left_surf=dict( - argstr='-left-surface %s', - extensions=None, - mandatory=True, - position=5, - ), - merged_volume=dict( - argstr='-merged-volume', - position=14, + argstr="-left-surface %s", extensions=None, mandatory=True, position=5, ), + merged_volume=dict(argstr="-merged-volume", position=14,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, keep_extension=True, - name_source=['in_file'], - name_template='smoothed_%s.nii', + name_source=["in_file"], + name_template="smoothed_%s.nii", position=4, ), right_corrected_areas=dict( - argstr='-right-corrected-areas %s', - extensions=None, - position=8, + argstr="-right-corrected-areas %s", extensions=None, position=8, ), right_surf=dict( - argstr='-right-surface %s', - extensions=None, - mandatory=True, - position=7, - ), - sigma_surf=dict( - argstr='%s', - mandatory=True, - position=1, - ), - sigma_vol=dict( - argstr='%s', - mandatory=True, - position=2, + argstr="-right-surface %s", extensions=None, mandatory=True, position=7, ), + sigma_surf=dict(argstr="%s", mandatory=True, position=1,), + sigma_vol=dict(argstr="%s", mandatory=True, position=2,), ) inputs = CiftiSmooth.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_CiftiSmooth_outputs(): - output_map = dict(out_file=dict(extensions=None, ), ) + output_map = dict(out_file=dict(extensions=None,),) outputs = CiftiSmooth.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/workbench/tests/test_auto_MetricResample.py b/nipype/interfaces/workbench/tests/test_auto_MetricResample.py index f5ed55874b..eb9201b7f1 100644 --- a/nipype/interfaces/workbench/tests/test_auto_MetricResample.py +++ b/nipype/interfaces/workbench/tests/test_auto_MetricResample.py @@ -4,86 +4,37 @@ def test_MetricResample_inputs(): input_map = dict( - area_metrics=dict( - argstr='-area-metrics', - position=5, - xor=['area_surfs'], - ), - area_surfs=dict( - argstr='-area-surfs', - position=5, - xor=['area_metrics'], - ), - args=dict(argstr='%s', ), - current_area=dict( - argstr='%s', - extensions=None, - position=6, - ), - current_sphere=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=1, - ), - environ=dict( - nohash=True, - usedefault=True, - ), - in_file=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=0, - ), - largest=dict( - argstr='-largest', - position=10, - ), - method=dict( - argstr='%s', - mandatory=True, - position=3, - ), - new_area=dict( - argstr='%s', - extensions=None, - position=7, - ), - new_sphere=dict( - argstr='%s', - extensions=None, - mandatory=True, - position=2, - ), + area_metrics=dict(argstr="-area-metrics", position=5, xor=["area_surfs"],), + area_surfs=dict(argstr="-area-surfs", position=5, xor=["area_metrics"],), + args=dict(argstr="%s",), + current_area=dict(argstr="%s", extensions=None, position=6,), + current_sphere=dict(argstr="%s", extensions=None, mandatory=True, position=1,), + environ=dict(nohash=True, usedefault=True,), + in_file=dict(argstr="%s", extensions=None, mandatory=True, position=0,), + largest=dict(argstr="-largest", position=10,), + method=dict(argstr="%s", mandatory=True, position=3,), + new_area=dict(argstr="%s", extensions=None, position=7,), + new_sphere=dict(argstr="%s", extensions=None, mandatory=True, position=2,), out_file=dict( - argstr='%s', + argstr="%s", extensions=None, keep_extension=True, - name_source=['new_sphere'], - name_template='%s.out', + name_source=["new_sphere"], + name_template="%s.out", position=4, ), - roi_metric=dict( - argstr='-current-roi %s', - extensions=None, - position=8, - ), - valid_roi_out=dict( - argstr='-valid-roi-out', - position=9, - ), + roi_metric=dict(argstr="-current-roi %s", extensions=None, position=8,), + valid_roi_out=dict(argstr="-valid-roi-out", position=9,), ) inputs = MetricResample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value + + def test_MetricResample_outputs(): - output_map = dict( - out_file=dict(extensions=None, ), - roi_file=dict(extensions=None, ), - ) + output_map = dict(out_file=dict(extensions=None,), roi_file=dict(extensions=None,),) outputs = MetricResample.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/workbench/tests/test_auto_WBCommand.py b/nipype/interfaces/workbench/tests/test_auto_WBCommand.py index cccba55c95..0a32276e5f 100644 --- a/nipype/interfaces/workbench/tests/test_auto_WBCommand.py +++ b/nipype/interfaces/workbench/tests/test_auto_WBCommand.py @@ -4,11 +4,7 @@ def test_WBCommand_inputs(): input_map = dict( - args=dict(argstr='%s', ), - environ=dict( - nohash=True, - usedefault=True, - ), + args=dict(argstr="%s",), environ=dict(nohash=True, usedefault=True,), ) inputs = WBCommand.input_spec() diff --git a/nipype/pipeline/__init__.py b/nipype/pipeline/__init__.py index b410fc8ea0..75b3b17c3a 100644 --- a/nipype/pipeline/__init__.py +++ b/nipype/pipeline/__init__.py @@ -5,5 +5,5 @@ Package contains modules for generating pipelines using interfaces """ -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" from .engine import Node, MapNode, JoinNode, Workflow diff --git a/nipype/pipeline/engine/__init__.py b/nipype/pipeline/engine/__init__.py index 4dc6784276..b13ba968ac 100644 --- a/nipype/pipeline/engine/__init__.py +++ b/nipype/pipeline/engine/__init__.py @@ -7,7 +7,7 @@ """ -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" from .workflows import Workflow from .nodes import Node, MapNode, JoinNode from .utils import generate_expanded_graph diff --git a/nipype/pipeline/engine/base.py b/nipype/pipeline/engine/base.py index fef31d00b9..6735c19d49 100644 --- a/nipype/pipeline/engine/base.py +++ b/nipype/pipeline/engine/base.py @@ -33,7 +33,7 @@ def __init__(self, name=None, base_dir=None): """ self._hierarchy = None self.name = name - self._id = self.name # for compatibility with node expansion using iterables + self._id = self.name # for compatibility with node expansion using iterables self.base_dir = base_dir self.config = deepcopy(config._sections) @@ -44,14 +44,14 @@ def name(self): @name.setter def name(self, name): - if not name or not re.match(r'^[\w-]+$', name): + if not name or not re.match(r"^[\w-]+$", name): raise ValueError('[Workflow|Node] name "%s" is not valid.' % name) self._name = name @property def fullname(self): if self._hierarchy: - return '%s.%s' % (self._hierarchy, self.name) + return "%s.%s" % (self._hierarchy, self.name) return self.name @property @@ -67,7 +67,7 @@ def itername(self): """Name for expanded iterable""" itername = self._id if self._hierarchy: - itername = '%s.%s' % (self._hierarchy, self._id) + itername = "%s.%s" % (self._hierarchy, self._id) return itername def clone(self, name): @@ -80,11 +80,10 @@ def clone(self, name): A clone of node or workflow must have a new name """ if name == self.name: - raise ValueError('Cloning requires a new name, "%s" is ' - 'in use.' % name) + raise ValueError('Cloning requires a new name, "%s" is ' "in use." % name) clone = deepcopy(self) clone.name = name - if hasattr(clone, '_id'): + if hasattr(clone, "_id"): clone._id = name return clone @@ -104,7 +103,7 @@ def __repr__(self): def save(self, filename=None): if filename is None: - filename = 'temp.pklz' + filename = "temp.pklz" savepkl(filename, self) def load(self, filename): diff --git a/nipype/pipeline/engine/nodes.py b/nipype/pipeline/engine/nodes.py index c57eb4e2c8..09822cc7ff 100644 --- a/nipype/pipeline/engine/nodes.py +++ b/nipype/pipeline/engine/nodes.py @@ -19,23 +19,49 @@ from ... import config, logging from ...utils.misc import flatten, unflatten, str2bool, dict_diff -from ...utils.filemanip import (md5, ensure_list, simplify_list, copyfiles, fnames_presuffix, - loadpkl, split_filename, load_json, - emptydirs, savepkl, indirectory, silentrm) - -from ...interfaces.base import (traits, InputMultiPath, CommandLine, Undefined, - DynamicTraitedSpec, Bunch, InterfaceResult, - Interface, isdefined) +from ...utils.filemanip import ( + md5, + ensure_list, + simplify_list, + copyfiles, + fnames_presuffix, + loadpkl, + split_filename, + load_json, + emptydirs, + savepkl, + indirectory, + silentrm, +) + +from ...interfaces.base import ( + traits, + InputMultiPath, + CommandLine, + Undefined, + DynamicTraitedSpec, + Bunch, + InterfaceResult, + Interface, + isdefined, +) from ...interfaces.base.specs import get_filecopy_info from .utils import ( - _parameterization_dir, save_hashfile as _save_hashfile, load_resultfile as - _load_resultfile, save_resultfile as _save_resultfile, nodelist_runner as - _node_runner, strip_temp as _strip_temp, write_node_report, - clean_working_directory, merge_dict, evaluate_connect_function) + _parameterization_dir, + save_hashfile as _save_hashfile, + load_resultfile as _load_resultfile, + save_resultfile as _save_resultfile, + nodelist_runner as _node_runner, + strip_temp as _strip_temp, + write_node_report, + clean_working_directory, + merge_dict, + evaluate_connect_function, +) from .base import EngineBase -logger = logging.getLogger('nipype.workflow') +logger = logging.getLogger("nipype.workflow") class Node(EngineBase): @@ -60,18 +86,20 @@ class Node(EngineBase): """ - def __init__(self, - interface, - name, - iterables=None, - itersource=None, - synchronize=False, - overwrite=None, - needed_outputs=None, - run_without_submitting=False, - n_procs=None, - mem_gb=0.20, - **kwargs): + def __init__( + self, + interface, + name, + iterables=None, + itersource=None, + synchronize=False, + overwrite=None, + needed_outputs=None, + run_without_submitting=False, + n_procs=None, + mem_gb=0.20, + **kwargs + ): """ Parameters ---------- @@ -142,11 +170,11 @@ def __init__(self, """ # Make sure an interface is set, and that it is an Interface if interface is None: - raise IOError('Interface must be provided') + raise IOError("Interface must be provided") if not isinstance(interface, Interface): - raise IOError('interface must be an instance of an Interface') + raise IOError("interface must be an instance of an Interface") - super(Node, self).__init__(name, kwargs.get('base_dir')) + super(Node, self).__init__(name, kwargs.get("base_dir")) self._interface = interface self._hierarchy = None @@ -167,8 +195,7 @@ def __init__(self, self._n_procs = n_procs # Downstream n_procs - if hasattr(self._interface.inputs, - 'num_threads') and self._n_procs is not None: + if hasattr(self._interface.inputs, "num_threads") and self._n_procs is not None: self._interface.inputs.num_threads = self._n_procs # Initialize needed_outputs and hashes @@ -187,7 +214,8 @@ def interface(self): def result(self): """Get result from result file (do not hold it in memory)""" return _load_resultfile( - op.join(self.output_dir(), 'result_%s.pklz' % self.name)) + op.join(self.output_dir(), "result_%s.pklz" % self.name) + ) @property def inputs(self): @@ -216,11 +244,12 @@ def needed_outputs(self, new_outputs): @property def mem_gb(self): """Get estimated memory (GB)""" - if hasattr(self._interface, 'estimated_memory_gb'): + if hasattr(self._interface, "estimated_memory_gb"): self._mem_gb = self._interface.estimated_memory_gb logger.warning( 'Setting "estimated_memory_gb" on Interfaces has been ' - 'deprecated as of nipype 1.0, please use Node.mem_gb.') + "deprecated as of nipype 1.0, please use Node.mem_gb." + ) return self._mem_gb @@ -229,8 +258,9 @@ def n_procs(self): """Get the estimated number of processes/threads""" if self._n_procs is not None: return self._n_procs - if hasattr(self._interface.inputs, 'num_threads') and isdefined( - self._interface.inputs.num_threads): + if hasattr(self._interface.inputs, "num_threads") and isdefined( + self._interface.inputs.num_threads + ): return self._interface.inputs.num_threads return 1 @@ -240,7 +270,7 @@ def n_procs(self, value): self._n_procs = value # Overwrite interface's dynamic input of num_threads - if hasattr(self._interface.inputs, 'num_threads'): + if hasattr(self._interface.inputs, "num_threads"): self._interface.inputs.num_threads = self._n_procs def output_dir(self): @@ -254,10 +284,10 @@ def output_dir(self): self.base_dir = mkdtemp() outputdir = self.base_dir if self._hierarchy: - outputdir = op.join(outputdir, *self._hierarchy.split('.')) + outputdir = op.join(outputdir, *self._hierarchy.split(".")) if self.parameterization: - params_str = ['{}'.format(p) for p in self.parameterization] - if not str2bool(self.config['execution']['parameterize_dirs']): + params_str = ["{}".format(p) for p in self.parameterization] + if not str2bool(self.config["execution"]["parameterize_dirs"]): params_str = [_parameterization_dir(p) for p in params_str] outputdir = op.join(outputdir, *params_str) @@ -266,8 +296,9 @@ def output_dir(self): def set_input(self, parameter, val): """Set interface input value""" - logger.debug('[Node] %s - setting input %s = %s', self.name, parameter, - str(val)) + logger.debug( + "[Node] %s - setting input %s = %s", self.name, parameter, str(val) + ) setattr(self.inputs, parameter, deepcopy(val)) def get_output(self, parameter): @@ -286,40 +317,46 @@ def is_cached(self, rm_outdated=False): outdir = self.output_dir() # The output folder does not exist: not cached - if not op.exists(outdir) or \ - not op.exists(op.join(outdir, 'result_%s.pklz' % self.name)): + if not op.exists(outdir) or not op.exists( + op.join(outdir, "result_%s.pklz" % self.name) + ): logger.debug('[Node] Not cached "%s".', outdir) return False, False # Check if there are hashfiles - globhashes = glob(op.join(outdir, '_0x*.json')) - unfinished = [ - path for path in globhashes - if path.endswith('_unfinished.json') - ] + globhashes = glob(op.join(outdir, "_0x*.json")) + unfinished = [path for path in globhashes if path.endswith("_unfinished.json")] hashfiles = list(set(globhashes) - set(unfinished)) # Update hash hashed_inputs, hashvalue = self._get_hashval() - hashfile = op.join(outdir, '_0x%s.json' % hashvalue) - logger.debug('[Node] Hashes: %s, %s, %s, %s', - hashed_inputs, hashvalue, hashfile, hashfiles) + hashfile = op.join(outdir, "_0x%s.json" % hashvalue) + logger.debug( + "[Node] Hashes: %s, %s, %s, %s", + hashed_inputs, + hashvalue, + hashfile, + hashfiles, + ) cached = hashfile in hashfiles # No previous hashfiles found, we're all set. if cached and len(hashfiles) == 1: - assert(hashfile == hashfiles[0]) + assert hashfile == hashfiles[0] logger.debug('[Node] Up-to-date cache found for "%s".', self.fullname) return True, True # Cached and updated if len(hashfiles) > 1: if cached: hashfiles.remove(hashfile) # Do not clean up the node, if cached - logger.warning('[Node] Found %d previous hashfiles indicating that the working ' - 'directory of node "%s" is stale, deleting old hashfiles.', - len(hashfiles), self.fullname) + logger.warning( + "[Node] Found %d previous hashfiles indicating that the working " + 'directory of node "%s" is stale, deleting old hashfiles.', + len(hashfiles), + self.fullname, + ) for rmfile in hashfiles: os.remove(rmfile) @@ -327,7 +364,7 @@ def is_cached(self, rm_outdated=False): if not hashfiles: logger.debug('[Node] No hashfiles found in "%s".', outdir) - assert(not cached) + assert not cached return False, False # At this point only one hashfile is in the folder @@ -340,21 +377,21 @@ def is_cached(self, rm_outdated=False): loglevel = logger.getEffectiveLevel() if loglevel < INFO: # Lazy logging: only < INFO exp_hash_file_base = split_filename(hashfiles[0])[1] - exp_hash = exp_hash_file_base[len('_0x'):] - logger.log(loglevel, "[Node] Old/new hashes = %s/%s", - exp_hash, hashvalue) + exp_hash = exp_hash_file_base[len("_0x") :] + logger.log( + loglevel, "[Node] Old/new hashes = %s/%s", exp_hash, hashvalue + ) try: prev_inputs = load_json(hashfiles[0]) except Exception: pass else: - logger.log(loglevel, - dict_diff(prev_inputs, hashed_inputs, 10)) + logger.log(loglevel, dict_diff(prev_inputs, hashed_inputs, 10)) if rm_outdated: os.remove(hashfiles[0]) - assert(cached) # At this point, node is cached (may not be up-to-date) + assert cached # At this point, node is cached (may not be up-to-date) return cached, updated def hash_exists(self, updatehash=False): @@ -368,7 +405,7 @@ def hash_exists(self, updatehash=False): cached, updated = self.is_cached(rm_outdated=True) outdir = self.output_dir() - hashfile = op.join(outdir, '_0x%s.json' % self._hashvalue) + hashfile = op.join(outdir, "_0x%s.json" % self._hashvalue) if updated: return True, self._hashvalue, hashfile, self._hashed_inputs @@ -397,8 +434,9 @@ def run(self, updatehash=False): self.config = merge_dict(deepcopy(config._sections), self.config) outdir = self.output_dir() - force_run = self.overwrite or (self.overwrite is None and - self._interface.always_run) + force_run = self.overwrite or ( + self.overwrite is None and self._interface.always_run + ) # Check hash, check whether run should be enforced logger.info('[Node] Setting-up "%s" in "%s".', self.fullname, outdir) @@ -407,32 +445,38 @@ def run(self, updatehash=False): # If the node is cached, check on pklz files and finish if not force_run and (updated or (not updated and updatehash)): logger.debug("Only updating node hashes or skipping execution") - inputs_file = op.join(outdir, '_inputs.pklz') + inputs_file = op.join(outdir, "_inputs.pklz") if not op.exists(inputs_file): - logger.debug('Creating inputs file %s', inputs_file) + logger.debug("Creating inputs file %s", inputs_file) savepkl(inputs_file, self.inputs.get_traitsfree()) - node_file = op.join(outdir, '_node.pklz') + node_file = op.join(outdir, "_node.pklz") if not op.exists(node_file): - logger.debug('Creating node file %s', node_file) + logger.debug("Creating node file %s", node_file) savepkl(node_file, self) - result = self._run_interface(execute=False, - updatehash=updatehash and not updated) - logger.info('[Node] "%s" found cached%s.', self.fullname, - ' (and hash updated)' * (updatehash and not updated)) + result = self._run_interface( + execute=False, updatehash=updatehash and not updated + ) + logger.info( + '[Node] "%s" found cached%s.', + self.fullname, + " (and hash updated)" * (updatehash and not updated), + ) return result if cached and updated and not isinstance(self, MapNode): logger.debug('[Node] Rerunning cached, up-to-date node "%s"', self.fullname) if not force_run and str2bool( - self.config['execution']['stop_on_first_rerun']): + self.config["execution"]["stop_on_first_rerun"] + ): raise Exception( - 'Cannot rerun when "stop_on_first_rerun" is set to True') + 'Cannot rerun when "stop_on_first_rerun" is set to True' + ) # Remove any hashfile that exists at this point (re)running. if cached: - for outdatedhash in glob(op.join(self.output_dir(), '_0x*.json')): + for outdatedhash in glob(op.join(self.output_dir(), "_0x*.json")): os.remove(outdatedhash) # _get_hashval needs to be called before running. When there is a valid (or seemingly @@ -441,21 +485,22 @@ def run(self, updatehash=False): # the hashval needs to be generated here. See #3026 for a larger context. self._get_hashval() # Hashfile while running - hashfile_unfinished = op.join( - outdir, '_0x%s_unfinished.json' % self._hashvalue) + hashfile_unfinished = op.join(outdir, "_0x%s_unfinished.json" % self._hashvalue) # Delete directory contents if this is not a MapNode or can't resume can_resume = not (self._interface.can_resume and op.isfile(hashfile_unfinished)) if can_resume and not isinstance(self, MapNode): emptydirs(outdir, noexist_ok=True) else: - logger.debug('[%sNode] Resume - hashfile=%s', - 'Map' * int(isinstance(self, MapNode)), - hashfile_unfinished) + logger.debug( + "[%sNode] Resume - hashfile=%s", + "Map" * int(isinstance(self, MapNode)), + hashfile_unfinished, + ) if isinstance(self, MapNode): # remove old json files - for filename in glob(op.join(outdir, '_0x*.json')): + for filename in glob(op.join(outdir, "_0x*.json")): os.remove(filename) # Make sure outdir is created @@ -464,8 +509,8 @@ def run(self, updatehash=False): # Store runtime-hashfile, pre-execution report, the node and the inputs set. _save_hashfile(hashfile_unfinished, self._hashed_inputs) write_node_report(self, is_mapnode=isinstance(self, MapNode)) - savepkl(op.join(outdir, '_node.pklz'), self) - savepkl(op.join(outdir, '_inputs.pklz'), self.inputs.get_traitsfree()) + savepkl(op.join(outdir, "_node.pklz"), self) + savepkl(op.join(outdir, "_inputs.pklz"), self.inputs.get_traitsfree()) try: result = self._run_interface(execute=True) @@ -473,15 +518,17 @@ def run(self, updatehash=False): logger.warning('[Node] Error on "%s" (%s)', self.fullname, outdir) # Tear-up after error if not silentrm(hashfile_unfinished): - logger.warning("""\ + logger.warning( + """\ Interface finished unexpectedly and the corresponding unfinished hashfile %s \ does not exist. Another nipype instance may be running against the same work \ -directory. Please ensure no other concurrent workflows are racing""", hashfile_unfinished) +directory. Please ensure no other concurrent workflows are racing""", + hashfile_unfinished, + ) raise # Tear-up after success - shutil.move(hashfile_unfinished, - hashfile_unfinished.replace('_unfinished', '')) + shutil.move(hashfile_unfinished, hashfile_unfinished.replace("_unfinished", "")) write_node_report(self, result=result, is_mapnode=isinstance(self, MapNode)) logger.info('[Node] Finished "%s".', self.fullname) return result @@ -491,14 +538,15 @@ def _get_hashval(self): self._get_inputs() if self._hashvalue is None and self._hashed_inputs is None: self._hashed_inputs, self._hashvalue = self.inputs.get_hashval( - hash_method=self.config['execution']['hash_method']) - rm_extra = self.config['execution']['remove_unnecessary_outputs'] + hash_method=self.config["execution"]["hash_method"] + ) + rm_extra = self.config["execution"]["remove_unnecessary_outputs"] if str2bool(rm_extra) and self.needed_outputs: hashobject = md5() hashobject.update(self._hashvalue.encode()) hashobject.update(str(self.needed_outputs).encode()) self._hashvalue = hashobject.hexdigest() - self._hashed_inputs.append(('needed_outputs', self.needed_outputs)) + self._hashed_inputs.append(("needed_outputs", self.needed_outputs)) return self._hashed_inputs, self._hashvalue def _get_inputs(self): @@ -521,19 +569,25 @@ def _get_inputs(self): logger.debug( '[Node] Setting %d connected inputs of node "%s" from %d previous nodes.', - len(self.input_source), self.name, len(prev_results)) + len(self.input_source), + self.name, + len(prev_results), + ) for results_fname, connections in list(prev_results.items()): outputs = None try: outputs = _load_resultfile(results_fname).outputs except AttributeError as e: - logger.critical('%s', e) + logger.critical("%s", e) if outputs is None: - raise RuntimeError("""\ + raise RuntimeError( + """\ Error populating the inputs of node "%s": the results file of the source node \ -(%s) does not contain any outputs.""" % (self.name, results_fname)) +(%s) does not contain any outputs.""" + % (self.name, results_fname) + ) for key, conn in connections: output_value = Undefined @@ -541,7 +595,8 @@ def _get_inputs(self): value = getattr(outputs, conn[0]) if isdefined(value): output_value = evaluate_connect_function( - conn[1], conn[2], value) + conn[1], conn[2], value + ) else: output_name = conn try: @@ -554,19 +609,22 @@ def _get_inputs(self): self.set_input(key, deepcopy(output_value)) except traits.TraitError as e: msg = ( - e.args[0], '', 'Error setting node input:', - 'Node: %s' % self.name, 'input: %s' % key, - 'results_file: %s' % results_fname, - 'value: %s' % str(output_value), + e.args[0], + "", + "Error setting node input:", + "Node: %s" % self.name, + "input: %s" % key, + "results_file: %s" % results_fname, + "value: %s" % str(output_value), ) - e.args = ('\n'.join(msg), ) + e.args = ("\n".join(msg),) raise # Successfully set inputs self._got_inputs = True def _update_hash(self): - for outdatedhash in glob(op.join(self.output_dir(), '_0x*.json')): + for outdatedhash in glob(op.join(self.output_dir(), "_0x*.json")): os.remove(outdatedhash) _save_hashfile(self._hashvalue, self._hashed_inputs) @@ -580,15 +638,15 @@ def _load_results(self): cwd = self.output_dir() try: - result = _load_resultfile( - op.join(cwd, 'result_%s.pklz' % self.name)) + result = _load_resultfile(op.join(cwd, "result_%s.pklz" % self.name)) except (traits.TraitError, EOFError): - logger.debug( - 'Error populating inputs/outputs, (re)aggregating results...') + logger.debug("Error populating inputs/outputs, (re)aggregating results...") except (AttributeError, ImportError) as err: - logger.debug('attribute error: %s probably using ' - 'different trait pickled file', str(err)) - old_inputs = loadpkl(op.join(cwd, '_inputs.pklz')) + logger.debug( + "attribute error: %s probably using " "different trait pickled file", + str(err), + ) + old_inputs = loadpkl(op.join(cwd, "_inputs.pklz")) self.inputs.trait_set(**old_inputs) else: return result @@ -597,22 +655,28 @@ def _load_results(self): if not isinstance(self, MapNode): self._copyfiles_to_wd(linksonly=True) aggouts = self._interface.aggregate_outputs( - needed_outputs=self.needed_outputs) + needed_outputs=self.needed_outputs + ) runtime = Bunch( cwd=cwd, returncode=0, environ=dict(os.environ), - hostname=socket.gethostname()) + hostname=socket.gethostname(), + ) result = InterfaceResult( interface=self._interface.__class__, runtime=runtime, inputs=self._interface.inputs.get_traitsfree(), - outputs=aggouts) + outputs=aggouts, + ) _save_resultfile( - result, cwd, self.name, - rebase=str2bool(self.config['execution']['use_relative_paths'])) + result, + cwd, + self.name, + rebase=str2bool(self.config["execution"]["use_relative_paths"]), + ) else: - logger.debug('aggregating mapnode results') + logger.debug("aggregating mapnode results") result = self._run_interface() return result @@ -622,13 +686,15 @@ def _run_command(self, execute, copyfiles=True): result = self._load_results() except (FileNotFoundError, AttributeError): # if aggregation does not work, rerun the node - logger.info("[Node] Some of the outputs were not found: " - "rerunning node.") + logger.info( + "[Node] Some of the outputs were not found: " "rerunning node." + ) copyfiles = False # OE: this was like this before, execute = True # I'll keep them for safety else: - logger.info('[Node] Cached "%s" - collecting precomputed outputs', - self.fullname) + logger.info( + '[Node] Cached "%s" - collecting precomputed outputs', self.fullname + ) return result outdir = self.output_dir() @@ -639,46 +705,55 @@ def _run_command(self, execute, copyfiles=True): cwd=outdir, returncode=1, environ=dict(os.environ), - hostname=socket.gethostname() + hostname=socket.gethostname(), ), - inputs=self._interface.inputs.get_traitsfree()) + inputs=self._interface.inputs.get_traitsfree(), + ) if copyfiles: self._originputs = deepcopy(self._interface.inputs) self._copyfiles_to_wd(execute=execute) message = '[Node] Running "{}" ("{}.{}")'.format( - self.name, self._interface.__module__, - self._interface.__class__.__name__) + self.name, self._interface.__module__, self._interface.__class__.__name__ + ) if issubclass(self._interface.__class__, CommandLine): try: with indirectory(outdir): cmd = self._interface.cmdline except Exception as msg: - result.runtime.stderr = '{}\n\n{}'.format( - getattr(result.runtime, 'stderr', ''), msg) + result.runtime.stderr = "{}\n\n{}".format( + getattr(result.runtime, "stderr", ""), msg + ) _save_resultfile( - result, outdir, self.name, - rebase=str2bool(self.config['execution']['use_relative_paths'])) + result, + outdir, + self.name, + rebase=str2bool(self.config["execution"]["use_relative_paths"]), + ) raise - cmdfile = op.join(outdir, 'command.txt') - with open(cmdfile, 'wt') as fd: + cmdfile = op.join(outdir, "command.txt") + with open(cmdfile, "wt") as fd: print(cmd + "\n", file=fd) - message += ', a CommandLine Interface with command:\n{}'.format(cmd) + message += ", a CommandLine Interface with command:\n{}".format(cmd) logger.info(message) try: result = self._interface.run(cwd=outdir) except Exception as msg: - result.runtime.stderr = '%s\n\n%s'.format( - getattr(result.runtime, 'stderr', ''), msg) + result.runtime.stderr = "%s\n\n%s".format( + getattr(result.runtime, "stderr", ""), msg + ) _save_resultfile( - result, outdir, self.name, - rebase=str2bool(self.config['execution']['use_relative_paths'])) + result, + outdir, + self.name, + rebase=str2bool(self.config["execution"]["use_relative_paths"]), + ) raise dirs2keep = None if isinstance(self, MapNode): - dirs2keep = [op.join(outdir, 'mapflow')] + dirs2keep = [op.join(outdir, "mapflow")] result.outputs = clean_working_directory( result.outputs, @@ -686,10 +761,14 @@ def _run_command(self, execute, copyfiles=True): self._interface.inputs, self.needed_outputs, self.config, - dirs2keep=dirs2keep) + dirs2keep=dirs2keep, + ) _save_resultfile( - result, outdir, self.name, - rebase=str2bool(self.config['execution']['use_relative_paths'])) + result, + outdir, + self.name, + rebase=str2bool(self.config["execution"]["use_relative_paths"]), + ) return result @@ -700,41 +779,42 @@ def _copyfiles_to_wd(self, execute=True, linksonly=False): # Nothing to be done return - logger.debug('copying files to wd [execute=%s, linksonly=%s]', execute, - linksonly) + logger.debug( + "copying files to wd [execute=%s, linksonly=%s]", execute, linksonly + ) outdir = self.output_dir() if execute and linksonly: olddir = outdir - outdir = op.join(outdir, '_tempinput') + outdir = op.join(outdir, "_tempinput") os.makedirs(outdir, exist_ok=True) for info in filecopy_info: - files = self.inputs.trait_get().get(info['key']) + files = self.inputs.trait_get().get(info["key"]) if not isdefined(files) or not files: continue infiles = ensure_list(files) if execute: if linksonly: - if not info['copy']: + if not info["copy"]: newfiles = copyfiles( - infiles, [outdir], - copy=info['copy'], - create_new=True) + infiles, [outdir], copy=info["copy"], create_new=True + ) else: newfiles = fnames_presuffix(infiles, newpath=outdir) - newfiles = _strip_temp(newfiles, - op.abspath(olddir).split( - op.sep)[-1]) + newfiles = _strip_temp( + newfiles, op.abspath(olddir).split(op.sep)[-1] + ) else: newfiles = copyfiles( - infiles, [outdir], copy=info['copy'], create_new=True) + infiles, [outdir], copy=info["copy"], create_new=True + ) else: newfiles = fnames_presuffix(infiles, newpath=outdir) if not isinstance(files, list): newfiles = simplify_list(newfiles) - setattr(self.inputs, info['key'], newfiles) + setattr(self.inputs, info["key"], newfiles) if execute and linksonly: emptydirs(outdir, noexist_ok=True) @@ -772,13 +852,9 @@ class JoinNode(Node): """ - def __init__(self, - interface, - name, - joinsource, - joinfield=None, - unique=False, - **kwargs): + def __init__( + self, interface, name, joinsource, joinfield=None, unique=False, **kwargs + ): """ Parameters @@ -810,8 +886,9 @@ def __init__(self, self.joinfield = joinfield """the fields to join""" - self._inputs = self._override_join_traits(self._interface.inputs, - self.joinfield) + self._inputs = self._override_join_traits( + self._interface.inputs, self.joinfield + ) """the override inputs""" self._unique = unique @@ -861,8 +938,9 @@ def _add_join_item_fields(self): """ # create the new join item fields idx = self._next_slot_index - newfields = dict([(field, self._add_join_item_field(field, idx)) - for field in self.joinfield]) + newfields = dict( + [(field, self._add_join_item_field(field, idx)) for field in self.joinfield] + ) # increment the join slot index logger.debug("Added the %s join item fields %s.", self, newfields) self._next_slot_index += 1 @@ -896,8 +974,10 @@ def _override_join_traits(self, basetraits, fields): # validate the fields for field in fields: if not basetraits.trait(field): - raise ValueError("The JoinNode %s does not have a field" - " named %s" % (self.name, field)) + raise ValueError( + "The JoinNode %s does not have a field" + " named %s" % (self.name, field) + ) for name, trait in list(basetraits.items()): # if a join field has a single inner trait, then the item # trait is that inner trait. Otherwise, the item trait is @@ -908,7 +988,11 @@ def _override_join_traits(self, basetraits, fields): setattr(dyntraits, name, Undefined) logger.debug( "Converted the join node %s field %s trait type from %s to %s", - self, name, trait.trait_type.info(), item_trait.info()) + self, + name, + trait.trait_type.info(), + item_trait.info(), + ) else: dyntraits.add_trait(name, traits.Any) setattr(dyntraits, name, Undefined) @@ -930,17 +1014,27 @@ def _collate_join_field_inputs(self): try: setattr(self._interface.inputs, field, val) except Exception as e: - raise ValueError(">>JN %s %s %s %s %s: %s" % - (self, field, val, - self.inputs.copyable_trait_names(), - self.joinfield, e)) + raise ValueError( + ">>JN %s %s %s %s %s: %s" + % ( + self, + field, + val, + self.inputs.copyable_trait_names(), + self.joinfield, + e, + ) + ) elif hasattr(self._interface.inputs, field): # copy the non-join field val = getattr(self._inputs, field) if isdefined(val): setattr(self._interface.inputs, field, val) - logger.debug("Collated %d inputs into the %s node join fields", - self._next_slot_index, self) + logger.debug( + "Collated %d inputs into the %s node join fields", + self._next_slot_index, + self, + ) def _collate_input_value(self, field): """ @@ -954,10 +1048,7 @@ def _collate_input_value(self, field): the iterables order. If the ``unique`` flag is set, then duplicate values are removed but the iterables order is preserved. """ - val = [ - self._slot_value(field, idx) - for idx in range(self._next_slot_index) - ] + val = [self._slot_value(field, idx) for idx in range(self._next_slot_index)] basetrait = self._interface.inputs.trait(field) if isinstance(basetrait.trait_type, traits.Set): return set(val) @@ -974,8 +1065,9 @@ def _slot_value(self, field, index): except AttributeError as e: raise AttributeError( "The join node %s does not have a slot field %s" - " to hold the %s value at index %d: %s" % (self, slot_field, - field, index, e)) + " to hold the %s value at index %d: %s" + % (self, slot_field, field, index, e) + ) class MapNode(Node): @@ -994,13 +1086,9 @@ class MapNode(Node): """ - def __init__(self, - interface, - iterfield, - name, - serial=False, - nested=False, - **kwargs): + def __init__( + self, interface, iterfield, name, serial=False, nested=False, **kwargs + ): """ Parameters @@ -1031,7 +1119,8 @@ def __init__(self, self.iterfield = iterfield self.nested = nested self._inputs = self._create_dynamic_traits( - self._interface.inputs, fields=self.iterfield) + self._interface.inputs, fields=self.iterfield + ) self._inputs.on_trait_change(self._set_mapnode_input) self._got_inputs = False self._serial = serial @@ -1044,7 +1133,7 @@ def _create_dynamic_traits(self, basetraits, fields=None, nitems=None): fields = basetraits.copyable_trait_names() for name, spec in list(basetraits.items()): if name in fields and ((nitems is None) or (nitems > 1)): - logger.debug('adding multipath trait: %s', name) + logger.debug("adding multipath trait: %s", name) if self.nested: output.add_trait(name, InputMultiPath(traits.Any())) else: @@ -1063,13 +1152,15 @@ def set_input(self, parameter, val): Set interface input value or nodewrapper attribute Priority goes to interface. """ - logger.debug('setting nodelevel(%s) input %s = %s', str(self), - parameter, str(val)) + logger.debug( + "setting nodelevel(%s) input %s = %s", str(self), parameter, str(val) + ) self._set_mapnode_input(parameter, deepcopy(val)) def _set_mapnode_input(self, name, newvalue): - logger.debug('setting mapnode(%s) input: %s -> %s', str(self), name, - str(newvalue)) + logger.debug( + "setting mapnode(%s) input: %s -> %s", str(self), name, str(newvalue) + ) if name in self.iterfield: setattr(self._inputs, name, newvalue) else: @@ -1087,25 +1178,24 @@ def _get_hashval(self): for name in self.iterfield: hashinputs.remove_trait(name) hashinputs.add_trait( - name, - InputMultiPath( - self._interface.inputs.traits()[name].trait_type)) - logger.debug('setting hashinput %s-> %s', name, - getattr(self._inputs, name)) + name, InputMultiPath(self._interface.inputs.traits()[name].trait_type) + ) + logger.debug("setting hashinput %s-> %s", name, getattr(self._inputs, name)) if self.nested: setattr(hashinputs, name, flatten(getattr(self._inputs, name))) else: setattr(hashinputs, name, getattr(self._inputs, name)) hashed_inputs, hashvalue = hashinputs.get_hashval( - hash_method=self.config['execution']['hash_method']) - rm_extra = self.config['execution']['remove_unnecessary_outputs'] + hash_method=self.config["execution"]["hash_method"] + ) + rm_extra = self.config["execution"]["remove_unnecessary_outputs"] if str2bool(rm_extra) and self.needed_outputs: hashobject = md5() hashobject.update(hashvalue.encode()) sorted_outputs = sorted(self.needed_outputs) hashobject.update(str(sorted_outputs).encode()) hashvalue = hashobject.hexdigest() - hashed_inputs.append(('needed_outputs', sorted_outputs)) + hashed_inputs.append(("needed_outputs", sorted_outputs)) self._hashed_inputs, self._hashvalue = hashed_inputs, hashvalue return self._hashed_inputs, self._hashvalue @@ -1122,14 +1212,11 @@ def _make_nodes(self, cwd=None): if cwd is None: cwd = self.output_dir() if self.nested: - nitems = len( - flatten( - ensure_list(getattr(self.inputs, self.iterfield[0])))) + nitems = len(flatten(ensure_list(getattr(self.inputs, self.iterfield[0])))) else: - nitems = len( - ensure_list(getattr(self.inputs, self.iterfield[0]))) + nitems = len(ensure_list(getattr(self.inputs, self.iterfield[0]))) for i in range(nitems): - nodename = '_%s%d' % (self.name, i) + nodename = "_%s%d" % (self.name, i) node = Node( deepcopy(self._interface), n_procs=self._n_procs, @@ -1137,47 +1224,44 @@ def _make_nodes(self, cwd=None): overwrite=self.overwrite, needed_outputs=self.needed_outputs, run_without_submitting=self.run_without_submitting, - base_dir=op.join(cwd, 'mapflow'), - name=nodename) + base_dir=op.join(cwd, "mapflow"), + name=nodename, + ) node.plugin_args = self.plugin_args node.interface.inputs.trait_set( - **deepcopy(self._interface.inputs.trait_get())) + **deepcopy(self._interface.inputs.trait_get()) + ) node.interface.resource_monitor = self._interface.resource_monitor for field in self.iterfield: if self.nested: - fieldvals = flatten( - ensure_list(getattr(self.inputs, field))) + fieldvals = flatten(ensure_list(getattr(self.inputs, field))) else: fieldvals = ensure_list(getattr(self.inputs, field)) - logger.debug('setting input %d %s %s', i, field, fieldvals[i]) + logger.debug("setting input %d %s %s", i, field, fieldvals[i]) setattr(node.inputs, field, fieldvals[i]) node.config = self.config yield i, node def _collate_results(self, nodes): finalresult = InterfaceResult( - interface=[], - runtime=[], - provenance=[], - inputs=[], - outputs=self.outputs) + interface=[], runtime=[], provenance=[], inputs=[], outputs=self.outputs + ) returncode = [] for i, nresult, err in nodes: finalresult.runtime.insert(i, None) returncode.insert(i, err) if nresult: - if hasattr(nresult, 'runtime'): + if hasattr(nresult, "runtime"): finalresult.interface.insert(i, nresult.interface) finalresult.inputs.insert(i, nresult.inputs) finalresult.runtime[i] = nresult.runtime - if hasattr(nresult, 'provenance'): + if hasattr(nresult, "provenance"): finalresult.provenance.insert(i, nresult.provenance) if self.outputs: for key, _ in list(self.outputs.items()): - rm_extra = ( - self.config['execution']['remove_unnecessary_outputs']) + rm_extra = self.config["execution"]["remove_unnecessary_outputs"] if str2bool(rm_extra) and self.needed_outputs: if key not in self.needed_outputs: continue @@ -1196,20 +1280,20 @@ def _collate_results(self, nodes): for key, _ in list(self.outputs.items()): values = getattr(finalresult.outputs, key) if isdefined(values): - values = unflatten(values, - ensure_list( - getattr(self.inputs, - self.iterfield[0]))) + values = unflatten( + values, ensure_list(getattr(self.inputs, self.iterfield[0])) + ) setattr(finalresult.outputs, key, values) if returncode and any([code is not None for code in returncode]): msg = [] for i, code in enumerate(returncode): if code is not None: - msg += ['Subnode %d failed' % i] - msg += ['Error: %s' % str(code)] - raise Exception('Subnodes of node: %s failed:\n%s' % - (self.name, '\n'.join(msg))) + msg += ["Subnode %d failed" % i] + msg += ["Error: %s" % str(code)] + raise Exception( + "Subnodes of node: %s failed:\n%s" % (self.name, "\n".join(msg)) + ) return finalresult @@ -1227,15 +1311,14 @@ def num_subnodes(self): if self._serial: return 1 if self.nested: - return len( - ensure_list( - flatten(getattr(self.inputs, self.iterfield[0])))) + return len(ensure_list(flatten(getattr(self.inputs, self.iterfield[0])))) return len(ensure_list(getattr(self.inputs, self.iterfield[0]))) def _get_inputs(self): old_inputs = self._inputs.trait_get() self._inputs = self._create_dynamic_traits( - self._interface.inputs, fields=self.iterfield) + self._interface.inputs, fields=self.iterfield + ) self._inputs.trait_set(**old_inputs) super(MapNode, self)._get_inputs() @@ -1247,17 +1330,21 @@ def _check_iterfield(self): """ for iterfield in self.iterfield: if not isdefined(getattr(self.inputs, iterfield)): - raise ValueError(("Input %s was not set but it is listed " - "in iterfields.") % iterfield) + raise ValueError( + ("Input %s was not set but it is listed " "in iterfields.") + % iterfield + ) if len(self.iterfield) > 1: - first_len = len( - ensure_list(getattr(self.inputs, self.iterfield[0]))) + first_len = len(ensure_list(getattr(self.inputs, self.iterfield[0]))) for iterfield in self.iterfield[1:]: - if first_len != len( - ensure_list(getattr(self.inputs, iterfield))): + if first_len != len(ensure_list(getattr(self.inputs, iterfield))): raise ValueError( - ("All iterfields of a MapNode have to " - "have the same length. %s") % str(self.inputs)) + ( + "All iterfields of a MapNode have to " + "have the same length. %s" + ) + % str(self.inputs) + ) def _run_interface(self, execute=True, updatehash=False): """Run the mapnode interface @@ -1272,13 +1359,10 @@ def _run_interface(self, execute=True, updatehash=False): # Set up mapnode folder names if self.nested: - nitems = len( - ensure_list( - flatten(getattr(self.inputs, self.iterfield[0])))) + nitems = len(ensure_list(flatten(getattr(self.inputs, self.iterfield[0])))) else: - nitems = len( - ensure_list(getattr(self.inputs, self.iterfield[0]))) - nnametpl = '_%s{}' % self.name + nitems = len(ensure_list(getattr(self.inputs, self.iterfield[0]))) + nnametpl = "_%s{}" % self.name nodenames = [nnametpl.format(i) for i in range(nitems)] # Run mapnode @@ -1286,13 +1370,14 @@ def _run_interface(self, execute=True, updatehash=False): _node_runner( self._make_nodes(cwd), updatehash=updatehash, - stop_first=str2bool( - self.config['execution']['stop_on_first_crash']))) + stop_first=str2bool(self.config["execution"]["stop_on_first_crash"]), + ) + ) # And store results _save_resultfile(result, cwd, self.name, rebase=False) # remove any node directories no longer required dirs2remove = [] - for path in glob(op.join(cwd, 'mapflow', '*')): + for path in glob(op.join(cwd, "mapflow", "*")): if op.isdir(path): if path.split(op.sep)[-1] not in nodenames: dirs2remove.append(path) diff --git a/nipype/pipeline/engine/tests/test_base.py b/nipype/pipeline/engine/tests/test_base.py index c6b9135f54..ab80c2f158 100644 --- a/nipype/pipeline/engine/tests/test_base.py +++ b/nipype/pipeline/engine/tests/test_base.py @@ -10,13 +10,13 @@ class InputSpec(nib.TraitedSpec): - input1 = nib.traits.Int(desc='a random int') - input2 = nib.traits.Int(desc='a random int') - input_file = nib.File(desc='Random File') + input1 = nib.traits.Int(desc="a random int") + input2 = nib.traits.Int(desc="a random int") + input_file = nib.File(desc="Random File") class OutputSpec(nib.TraitedSpec): - output1 = nib.traits.List(nib.traits.Int, desc='outputs') + output1 = nib.traits.List(nib.traits.Int, desc="outputs") class EngineTestInterface(nib.SimpleInterface): @@ -25,60 +25,67 @@ class EngineTestInterface(nib.SimpleInterface): def _run_interface(self, runtime): runtime.returncode = 0 - self._results['output1'] = [1, self.inputs.input1] + self._results["output1"] = [1, self.inputs.input1] return runtime -@pytest.mark.parametrize( - 'name', ['valid1', 'valid_node', 'valid-node', 'ValidNode0']) +@pytest.mark.parametrize("name", ["valid1", "valid_node", "valid-node", "ValidNode0"]) def test_create(name): base = EngineBase(name=name) assert base.name == name @pytest.mark.parametrize( - 'name', ['invalid*1', 'invalid.1', 'invalid@', 'in/valid', None]) + "name", ["invalid*1", "invalid.1", "invalid@", "in/valid", None] +) def test_create_invalid(name): with pytest.raises(ValueError): EngineBase(name=name) def test_hierarchy(): - base = EngineBase(name='nodename') - base._hierarchy = 'some.history.behind' + base = EngineBase(name="nodename") + base._hierarchy = "some.history.behind" - assert base.name == 'nodename' - assert base.fullname == 'some.history.behind.nodename' + assert base.name == "nodename" + assert base.fullname == "some.history.behind.nodename" def test_clone(): - base = EngineBase(name='nodename') - base2 = base.clone('newnodename') + base = EngineBase(name="nodename") + base2 = base.clone("newnodename") - assert (base.base_dir == base2.base_dir and - base.config == base2.config and - base2.name == 'newnodename') + assert ( + base.base_dir == base2.base_dir + and base.config == base2.config + and base2.name == "newnodename" + ) with pytest.raises(ValueError): - base.clone('nodename') + base.clone("nodename") + def test_clone_node_iterables(tmpdir): tmpdir.chdir() def addstr(string): - return ('%s + 2' % string) - - subject_list = ['sub-001', 'sub-002'] - inputnode = pe.Node(niu.IdentityInterface(fields=['subject']), - name='inputnode') - inputnode.iterables = [('subject', subject_list)] - - node_1 = pe.Node(niu.Function(input_names='string', - output_names='string', - function=addstr), name='node_1') - node_2 = node_1.clone('node_2') - - workflow = pe.Workflow(name='iter_clone_wf') - workflow.connect([(inputnode, node_1, [('subject', 'string')]), - (node_1, node_2, [('string', 'string')])]) + return "%s + 2" % string + + subject_list = ["sub-001", "sub-002"] + inputnode = pe.Node(niu.IdentityInterface(fields=["subject"]), name="inputnode") + inputnode.iterables = [("subject", subject_list)] + + node_1 = pe.Node( + niu.Function(input_names="string", output_names="string", function=addstr), + name="node_1", + ) + node_2 = node_1.clone("node_2") + + workflow = pe.Workflow(name="iter_clone_wf") + workflow.connect( + [ + (inputnode, node_1, [("subject", "string")]), + (node_1, node_2, [("string", "string")]), + ] + ) workflow.run() diff --git a/nipype/pipeline/engine/tests/test_engine.py b/nipype/pipeline/engine/tests/test_engine.py index 083aa8b691..f28b0f3bf3 100644 --- a/nipype/pipeline/engine/tests/test_engine.py +++ b/nipype/pipeline/engine/tests/test_engine.py @@ -19,16 +19,13 @@ @pytest.mark.parametrize( "iterables, expected", [ - ({ - "1": None - }, (1, 0)), # test1 - ({ - "1": dict(input1=lambda: [1, 2], input2=lambda: [1, 2]) - }, (4, 0)) # test2 - ]) + ({"1": None}, (1, 0)), # test1 + ({"1": dict(input1=lambda: [1, 2], input2=lambda: [1, 2])}, (4, 0)), # test2 + ], +) def test_1mod(iterables, expected): - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=EngineTestInterface(), name='mod1') + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") setattr(mod1, "iterables", iterables["1"]) pipe.add_nodes([mod1]) pipe._flatgraph = pipe._create_flat_graph() @@ -40,26 +37,21 @@ def test_1mod(iterables, expected): @pytest.mark.parametrize( "iterables, expected", [ - ({ - "1": {}, - "2": dict(input1=lambda: [1, 2]) - }, (3, 2)), # test3 - ({ - "1": dict(input1=lambda: [1, 2]), - "2": {} - }, (4, 2)), # test4 - ({ - "1": dict(input1=lambda: [1, 2]), - "2": dict(input1=lambda: [1, 2]) - }, (6, 4)) # test5 - ]) + ({"1": {}, "2": dict(input1=lambda: [1, 2])}, (3, 2)), # test3 + ({"1": dict(input1=lambda: [1, 2]), "2": {}}, (4, 2)), # test4 + ( + {"1": dict(input1=lambda: [1, 2]), "2": dict(input1=lambda: [1, 2])}, + (6, 4), + ), # test5 + ], +) def test_2mods(iterables, expected): - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=EngineTestInterface(), name='mod1') - mod2 = pe.Node(interface=EngineTestInterface(), name='mod2') + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") + mod2 = pe.Node(interface=EngineTestInterface(), name="mod2") for nr in ["1", "2"]: setattr(eval("mod" + nr), "iterables", iterables[nr]) - pipe.connect([(mod1, mod2, [('output1', 'input2')])]) + pipe.connect([(mod1, mod2, [("output1", "input2")])]) pipe._flatgraph = pipe._create_flat_graph() pipe._execgraph = pe.generate_expanded_graph(deepcopy(pipe._flatgraph)) assert len(pipe._execgraph.nodes()) == expected[0] @@ -69,95 +61,111 @@ def test_2mods(iterables, expected): @pytest.mark.parametrize( "iterables, expected, connect", [ - ({ - "1": {}, - "2": dict(input1=lambda: [1, 2]), - "3": {} - }, (5, 4), ("1-2", "2-3")), # test6 - ({ - "1": dict(input1=lambda: [1, 2]), - "2": {}, - "3": {} - }, (5, 4), ("1-3", "2-3")), # test7 - ({ - "1": dict(input1=lambda: [1, 2]), - "2": dict(input1=lambda: [1, 2]), - "3": {} - }, (8, 8), ("1-3", "2-3")), # test8 - ]) + ( + {"1": {}, "2": dict(input1=lambda: [1, 2]), "3": {}}, + (5, 4), + ("1-2", "2-3"), + ), # test6 + ( + {"1": dict(input1=lambda: [1, 2]), "2": {}, "3": {}}, + (5, 4), + ("1-3", "2-3"), + ), # test7 + ( + { + "1": dict(input1=lambda: [1, 2]), + "2": dict(input1=lambda: [1, 2]), + "3": {}, + }, + (8, 8), + ("1-3", "2-3"), + ), # test8 + ], +) def test_3mods(iterables, expected, connect): - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=EngineTestInterface(), name='mod1') - mod2 = pe.Node(interface=EngineTestInterface(), name='mod2') - mod3 = pe.Node(interface=EngineTestInterface(), name='mod3') + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") + mod2 = pe.Node(interface=EngineTestInterface(), name="mod2") + mod3 = pe.Node(interface=EngineTestInterface(), name="mod3") for nr in ["1", "2", "3"]: setattr(eval("mod" + nr), "iterables", iterables[nr]) if connect == ("1-2", "2-3"): - pipe.connect([(mod1, mod2, [('output1', 'input2')]), - (mod2, mod3, [('output1', 'input2')])]) + pipe.connect( + [ + (mod1, mod2, [("output1", "input2")]), + (mod2, mod3, [("output1", "input2")]), + ] + ) elif connect == ("1-3", "2-3"): - pipe.connect([(mod1, mod3, [('output1', 'input1')]), - (mod2, mod3, [('output1', 'input2')])]) + pipe.connect( + [ + (mod1, mod3, [("output1", "input1")]), + (mod2, mod3, [("output1", "input2")]), + ] + ) else: raise Exception( - "connect pattern is not implemented yet within the test function") + "connect pattern is not implemented yet within the test function" + ) pipe._flatgraph = pipe._create_flat_graph() pipe._execgraph = pe.generate_expanded_graph(deepcopy(pipe._flatgraph)) assert len(pipe._execgraph.nodes()) == expected[0] assert len(pipe._execgraph.edges()) == expected[1] - edgenum = sorted([(len(pipe._execgraph.in_edges(node)) + - len(pipe._execgraph.out_edges(node))) - for node in pipe._execgraph.nodes()]) + edgenum = sorted( + [ + (len(pipe._execgraph.in_edges(node)) + len(pipe._execgraph.out_edges(node))) + for node in pipe._execgraph.nodes() + ] + ) assert edgenum[0] > 0 def test_expansion(): - pipe1 = pe.Workflow(name='pipe1') - mod1 = pe.Node(interface=EngineTestInterface(), name='mod1') - mod2 = pe.Node(interface=EngineTestInterface(), name='mod2') - pipe1.connect([(mod1, mod2, [('output1', 'input2')])]) - pipe2 = pe.Workflow(name='pipe2') - mod3 = pe.Node(interface=EngineTestInterface(), name='mod3') - mod4 = pe.Node(interface=EngineTestInterface(), name='mod4') - pipe2.connect([(mod3, mod4, [('output1', 'input2')])]) + pipe1 = pe.Workflow(name="pipe1") + mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") + mod2 = pe.Node(interface=EngineTestInterface(), name="mod2") + pipe1.connect([(mod1, mod2, [("output1", "input2")])]) + pipe2 = pe.Workflow(name="pipe2") + mod3 = pe.Node(interface=EngineTestInterface(), name="mod3") + mod4 = pe.Node(interface=EngineTestInterface(), name="mod4") + pipe2.connect([(mod3, mod4, [("output1", "input2")])]) pipe3 = pe.Workflow(name="pipe3") - pipe3.connect([(pipe1, pipe2, [('mod2.output1', 'mod4.input1')])]) + pipe3.connect([(pipe1, pipe2, [("mod2.output1", "mod4.input1")])]) pipe4 = pe.Workflow(name="pipe4") - mod5 = pe.Node(interface=EngineTestInterface(), name='mod5') + mod5 = pe.Node(interface=EngineTestInterface(), name="mod5") pipe4.add_nodes([mod5]) pipe5 = pe.Workflow(name="pipe5") pipe5.add_nodes([pipe4]) pipe6 = pe.Workflow(name="pipe6") - pipe6.connect([(pipe5, pipe3, [('pipe4.mod5.output1', - 'pipe2.mod3.input1')])]) + pipe6.connect([(pipe5, pipe3, [("pipe4.mod5.output1", "pipe2.mod3.input1")])]) pipe6._flatgraph = pipe6._create_flat_graph() def test_iterable_expansion(): - wf1 = pe.Workflow(name='test') - node1 = pe.Node(EngineTestInterface(), name='node1') - node2 = pe.Node(EngineTestInterface(), name='node2') - node1.iterables = ('input1', [1, 2]) - wf1.connect(node1, 'output1', node2, 'input2') - wf3 = pe.Workflow(name='group') + wf1 = pe.Workflow(name="test") + node1 = pe.Node(EngineTestInterface(), name="node1") + node2 = pe.Node(EngineTestInterface(), name="node2") + node1.iterables = ("input1", [1, 2]) + wf1.connect(node1, "output1", node2, "input2") + wf3 = pe.Workflow(name="group") for i in [0, 1, 2]: - wf3.add_nodes([wf1.clone(name='test%d' % i)]) + wf3.add_nodes([wf1.clone(name="test%d" % i)]) wf3._flatgraph = wf3._create_flat_graph() assert len(pe.generate_expanded_graph(wf3._flatgraph).nodes()) == 12 def test_synchronize_expansion(): - wf1 = pe.Workflow(name='test') - node1 = pe.Node(EngineTestInterface(), name='node1') - node1.iterables = [('input1', [1, 2]), ('input2', [3, 4, 5])] + wf1 = pe.Workflow(name="test") + node1 = pe.Node(EngineTestInterface(), name="node1") + node1.iterables = [("input1", [1, 2]), ("input2", [3, 4, 5])] node1.synchronize = True - node2 = pe.Node(EngineTestInterface(), name='node2') - wf1.connect(node1, 'output1', node2, 'input2') - wf3 = pe.Workflow(name='group') + node2 = pe.Node(EngineTestInterface(), name="node2") + wf1.connect(node1, "output1", node2, "input2") + wf3 = pe.Workflow(name="group") for i in [0, 1, 2]: - wf3.add_nodes([wf1.clone(name='test%d' % i)]) + wf3.add_nodes([wf1.clone(name="test%d" % i)]) wf3._flatgraph = wf3._create_flat_graph() # Each expanded graph clone has: # 3 node1 expansion nodes and @@ -168,19 +176,19 @@ def test_synchronize_expansion(): def test_synchronize_tuples_expansion(): - wf1 = pe.Workflow(name='test') + wf1 = pe.Workflow(name="test") - node1 = pe.Node(EngineTestInterface(), name='node1') - node2 = pe.Node(EngineTestInterface(), name='node2') - node1.iterables = [('input1', 'input2'), [(1, 3), (2, 4), (None, 5)]] + node1 = pe.Node(EngineTestInterface(), name="node1") + node2 = pe.Node(EngineTestInterface(), name="node2") + node1.iterables = [("input1", "input2"), [(1, 3), (2, 4), (None, 5)]] node1.synchronize = True - wf1.connect(node1, 'output1', node2, 'input2') + wf1.connect(node1, "output1", node2, "input2") - wf3 = pe.Workflow(name='group') + wf3 = pe.Workflow(name="group") for i in [0, 1, 2]: - wf3.add_nodes([wf1.clone(name='test%d' % i)]) + wf3.add_nodes([wf1.clone(name="test%d" % i)]) wf3._flatgraph = wf3._create_flat_graph() # Identical to test_synchronize_expansion @@ -189,25 +197,25 @@ def test_synchronize_tuples_expansion(): def test_itersource_expansion(): - wf1 = pe.Workflow(name='test') - node1 = pe.Node(EngineTestInterface(), name='node1') - node1.iterables = ('input1', [1, 2]) + wf1 = pe.Workflow(name="test") + node1 = pe.Node(EngineTestInterface(), name="node1") + node1.iterables = ("input1", [1, 2]) - node2 = pe.Node(EngineTestInterface(), name='node2') - wf1.connect(node1, 'output1', node2, 'input1') + node2 = pe.Node(EngineTestInterface(), name="node2") + wf1.connect(node1, "output1", node2, "input1") - node3 = pe.Node(EngineTestInterface(), name='node3') - node3.itersource = ('node1', 'input1') - node3.iterables = [('input1', {1: [3, 4], 2: [5, 6, 7]})] + node3 = pe.Node(EngineTestInterface(), name="node3") + node3.itersource = ("node1", "input1") + node3.iterables = [("input1", {1: [3, 4], 2: [5, 6, 7]})] - wf1.connect(node2, 'output1', node3, 'input1') - node4 = pe.Node(EngineTestInterface(), name='node4') + wf1.connect(node2, "output1", node3, "input1") + node4 = pe.Node(EngineTestInterface(), name="node4") - wf1.connect(node3, 'output1', node4, 'input1') + wf1.connect(node3, "output1", node4, "input1") - wf3 = pe.Workflow(name='group') + wf3 = pe.Workflow(name="group") for i in [0, 1, 2]: - wf3.add_nodes([wf1.clone(name='test%d' % i)]) + wf3.add_nodes([wf1.clone(name="test%d" % i)]) wf3._flatgraph = wf3._create_flat_graph() @@ -223,26 +231,24 @@ def test_itersource_expansion(): def test_itersource_synchronize1_expansion(): - wf1 = pe.Workflow(name='test') - node1 = pe.Node(EngineTestInterface(), name='node1') - node1.iterables = [('input1', [1, 2]), ('input2', [3, 4])] + wf1 = pe.Workflow(name="test") + node1 = pe.Node(EngineTestInterface(), name="node1") + node1.iterables = [("input1", [1, 2]), ("input2", [3, 4])] node1.synchronize = True - node2 = pe.Node(EngineTestInterface(), name='node2') - wf1.connect(node1, 'output1', node2, 'input1') - node3 = pe.Node(EngineTestInterface(), name='node3') - node3.itersource = ('node1', ['input1', 'input2']) - node3.iterables = [('input1', { - (1, 3): [5, 6] - }), ('input2', { - (1, 3): [7, 8], - (2, 4): [9] - })] - wf1.connect(node2, 'output1', node3, 'input1') - node4 = pe.Node(EngineTestInterface(), name='node4') - wf1.connect(node3, 'output1', node4, 'input1') - wf3 = pe.Workflow(name='group') + node2 = pe.Node(EngineTestInterface(), name="node2") + wf1.connect(node1, "output1", node2, "input1") + node3 = pe.Node(EngineTestInterface(), name="node3") + node3.itersource = ("node1", ["input1", "input2"]) + node3.iterables = [ + ("input1", {(1, 3): [5, 6]}), + ("input2", {(1, 3): [7, 8], (2, 4): [9]}), + ] + wf1.connect(node2, "output1", node3, "input1") + node4 = pe.Node(EngineTestInterface(), name="node4") + wf1.connect(node3, "output1", node4, "input1") + wf3 = pe.Workflow(name="group") for i in [0, 1, 2]: - wf3.add_nodes([wf1.clone(name='test%d' % i)]) + wf3.add_nodes([wf1.clone(name="test%d" % i)]) wf3._flatgraph = wf3._create_flat_graph() # each expanded graph clone has: @@ -257,26 +263,26 @@ def test_itersource_synchronize1_expansion(): def test_itersource_synchronize2_expansion(): - wf1 = pe.Workflow(name='test') + wf1 = pe.Workflow(name="test") - node1 = pe.Node(EngineTestInterface(), name='node1') - node1.iterables = [('input1', [1, 2]), ('input2', [3, 4])] + node1 = pe.Node(EngineTestInterface(), name="node1") + node1.iterables = [("input1", [1, 2]), ("input2", [3, 4])] node1.synchronize = True - node2 = pe.Node(EngineTestInterface(), name='node2') - wf1.connect(node1, 'output1', node2, 'input1') - node3 = pe.Node(EngineTestInterface(), name='node3') - node3.itersource = ('node1', ['input1', 'input2']) + node2 = pe.Node(EngineTestInterface(), name="node2") + wf1.connect(node1, "output1", node2, "input1") + node3 = pe.Node(EngineTestInterface(), name="node3") + node3.itersource = ("node1", ["input1", "input2"]) node3.synchronize = True - node3.iterables = [('input1', 'input2'), { - (1, 3): [(5, 7), (6, 8)], - (2, 4): [(None, 9)] - }] - wf1.connect(node2, 'output1', node3, 'input1') - node4 = pe.Node(EngineTestInterface(), name='node4') - wf1.connect(node3, 'output1', node4, 'input1') - wf3 = pe.Workflow(name='group') + node3.iterables = [ + ("input1", "input2"), + {(1, 3): [(5, 7), (6, 8)], (2, 4): [(None, 9)]}, + ] + wf1.connect(node2, "output1", node3, "input1") + node4 = pe.Node(EngineTestInterface(), name="node4") + wf1.connect(node3, "output1", node4, "input1") + wf3 = pe.Workflow(name="group") for i in [0, 1, 2]: - wf3.add_nodes([wf1.clone(name='test%d' % i)]) + wf3.add_nodes([wf1.clone(name="test%d" % i)]) wf3._flatgraph = wf3._create_flat_graph() # each expanded graph clone has: @@ -290,7 +296,6 @@ def test_itersource_synchronize2_expansion(): assert len(pe.generate_expanded_graph(wf3._flatgraph).nodes()) == 30 - def test_old_config(tmpdir): tmpdir.chdir() wd = os.getcwd() @@ -303,21 +308,21 @@ def func2(a): return a + 1 n1 = pe.Node( - Function(input_names=[], output_names=['a'], function=func1), - name='n1') + Function(input_names=[], output_names=["a"], function=func1), name="n1" + ) n2 = pe.Node( - Function(input_names=['a'], output_names=['b'], function=func2), - name='n2') - w1 = pe.Workflow(name='test') + Function(input_names=["a"], output_names=["b"], function=func2), name="n2" + ) + w1 = pe.Workflow(name="test") modify = lambda x: x + 1 n1.inputs.a = 1 - w1.connect(n1, ('a', modify), n2, 'a') + w1.connect(n1, ("a", modify), n2, "a") w1.base_dir = wd - w1.config['execution']['crashdump_dir'] = wd + w1.config["execution"]["crashdump_dir"] = wd # generate outputs - w1.run(plugin='Linear') + w1.run(plugin="Linear") def test_mapnode_json(tmpdir): @@ -331,13 +336,14 @@ def func1(in1): return in1 + 1 n1 = MapNode( - Function(input_names=['in1'], output_names=['out'], function=func1), - iterfield=['in1'], - name='n1') + Function(input_names=["in1"], output_names=["out"], function=func1), + iterfield=["in1"], + name="n1", + ) n1.inputs.in1 = [1] - w1 = Workflow(name='test') + w1 = Workflow(name="test") w1.base_dir = wd - w1.config['execution']['crashdump_dir'] = wd + w1.config["execution"]["crashdump_dir"] = wd w1.add_nodes([n1]) w1.run() n1.inputs.in1 = [2] @@ -347,13 +353,13 @@ def func1(in1): eg = w1.run() node = list(eg.nodes())[0] - outjson = glob(os.path.join(node.output_dir(), '_0x*.json')) + outjson = glob(os.path.join(node.output_dir(), "_0x*.json")) assert len(outjson) == 1 # check that multiple json's don't trigger rerun - with open(os.path.join(node.output_dir(), 'test.json'), 'wt') as fp: - fp.write('dummy file') - w1.config['execution'].update(**{'stop_on_first_rerun': True}) + with open(os.path.join(node.output_dir(), "test.json"), "wt") as fp: + fp.write("dummy file") + w1.config["execution"].update(**{"stop_on_first_rerun": True}) w1.run() @@ -362,18 +368,18 @@ def test_parameterize_dirs_false(tmpdir): from ....interfaces.utility import IdentityInterface from ....testing import example_data - input_file = example_data('fsl_motion_outliers_fd.txt') + input_file = example_data("fsl_motion_outliers_fd.txt") - n1 = pe.Node(EngineTestInterface(), name='Node1') - n1.iterables = ('input_file', (input_file, input_file)) + n1 = pe.Node(EngineTestInterface(), name="Node1") + n1.iterables = ("input_file", (input_file, input_file)) n1.interface.inputs.input1 = 1 - n2 = pe.Node(IdentityInterface(fields='in1'), name='Node2') + n2 = pe.Node(IdentityInterface(fields="in1"), name="Node2") - wf = pe.Workflow(name='Test') + wf = pe.Workflow(name="Test") wf.base_dir = tmpdir.strpath - wf.config['execution']['parameterize_dirs'] = False - wf.connect([(n1, n2, [('output1', 'in1')])]) + wf.config["execution"]["parameterize_dirs"] = False + wf.connect([(n1, n2, [("output1", "in1")])]) wf.run() @@ -387,62 +393,61 @@ def func1(in1): return in1 n1 = MapNode( - Function(input_names=['in1'], output_names=['out'], function=func1), - iterfield=['in1'], - name='n1') + Function(input_names=["in1"], output_names=["out"], function=func1), + iterfield=["in1"], + name="n1", + ) n1.inputs.in1 = [1, 2, 3] - w1 = Workflow(name='test') + w1 = Workflow(name="test") w1.base_dir = wd w1.add_nodes([n1]) # set local check - w1.config['execution'] = { - 'stop_on_first_crash': 'true', - 'local_hash_check': 'true', - 'crashdump_dir': wd, - 'poll_sleep_duration': 2 + w1.config["execution"] = { + "stop_on_first_crash": "true", + "local_hash_check": "true", + "crashdump_dir": wd, + "poll_sleep_duration": 2, } # test output of num_subnodes method when serial is default (False) assert n1.num_subnodes() == len(n1.inputs.in1) # test running the workflow on default conditions - w1.run(plugin='MultiProc') + w1.run(plugin="MultiProc") # test output of num_subnodes method when serial is True n1._serial = True assert n1.num_subnodes() == 1 # test running the workflow on serial conditions - w1.run(plugin='MultiProc') + w1.run(plugin="MultiProc") def test_write_graph_runs(tmpdir): tmpdir.chdir() - for graph in ('orig', 'flat', 'exec', 'hierarchical', 'colored'): + for graph in ("orig", "flat", "exec", "hierarchical", "colored"): for simple in (True, False): - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=EngineTestInterface(), name='mod1') - mod2 = pe.Node(interface=EngineTestInterface(), name='mod2') - pipe.connect([(mod1, mod2, [('output1', 'input1')])]) + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") + mod2 = pe.Node(interface=EngineTestInterface(), name="mod2") + pipe.connect([(mod1, mod2, [("output1", "input1")])]) try: - pipe.write_graph( - graph2use=graph, simple_form=simple, format='dot') + pipe.write_graph(graph2use=graph, simple_form=simple, format="dot") except Exception: - assert False, \ - 'Failed to plot {} {} graph'.format( - 'simple' if simple else 'detailed', graph) + assert False, "Failed to plot {} {} graph".format( + "simple" if simple else "detailed", graph + ) - assert os.path.exists('graph.dot') or os.path.exists( - 'graph_detailed.dot') + assert os.path.exists("graph.dot") or os.path.exists("graph_detailed.dot") try: - os.remove('graph.dot') + os.remove("graph.dot") except OSError: pass try: - os.remove('graph_detailed.dot') + os.remove("graph_detailed.dot") except OSError: pass @@ -450,88 +455,98 @@ def test_write_graph_runs(tmpdir): def test_deep_nested_write_graph_runs(tmpdir): tmpdir.chdir() - for graph in ('orig', 'flat', 'exec', 'hierarchical', 'colored'): + for graph in ("orig", "flat", "exec", "hierarchical", "colored"): for simple in (True, False): - pipe = pe.Workflow(name='pipe') + pipe = pe.Workflow(name="pipe") parent = pipe for depth in range(10): - sub = pe.Workflow(name='pipe_nest_{}'.format(depth)) + sub = pe.Workflow(name="pipe_nest_{}".format(depth)) parent.add_nodes([sub]) parent = sub - mod1 = pe.Node(interface=EngineTestInterface(), name='mod1') + mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") parent.add_nodes([mod1]) try: - pipe.write_graph( - graph2use=graph, simple_form=simple, format='dot') + pipe.write_graph(graph2use=graph, simple_form=simple, format="dot") except Exception as e: - assert False, \ - 'Failed to plot {} {} deep graph: {!s}'.format( - 'simple' if simple else 'detailed', graph, e) + assert False, "Failed to plot {} {} deep graph: {!s}".format( + "simple" if simple else "detailed", graph, e + ) - assert os.path.exists('graph.dot') or os.path.exists( - 'graph_detailed.dot') + assert os.path.exists("graph.dot") or os.path.exists("graph_detailed.dot") try: - os.remove('graph.dot') + os.remove("graph.dot") except OSError: pass try: - os.remove('graph_detailed.dot') + os.remove("graph_detailed.dot") except OSError: pass + import networkx + # Format of the graph has slightly changed -graph_str = '""' if int(networkx.__version__.split('.')[0]) == 1 else '' +graph_str = '""' if int(networkx.__version__.split(".")[0]) == 1 else "" # examples of dot files used in the following test -dotfile_orig = ['strict digraph ' + graph_str + ' {\n', - '"mod1 (engine)";\n', - '"mod2 (engine)";\n', - '"mod1 (engine)" -> "mod2 (engine)";\n', - '}\n'] - -dotfile_detailed_orig = ['digraph structs {\n', - 'node [shape=record];\n', - 'pipemod1 [label="{IN}|{ mod1 | engine | }|{OUT| output1}"];\n', - 'pipemod2 [label="{IN| input1}|{ mod2 | engine | }|{OUT}"];\n', - 'pipemod1:outoutput1:e -> pipemod2:ininput1:w;\n', - '}'] - - -dotfile_hierarchical = ['digraph pipe{\n', - ' label="pipe";\n', - ' pipe_mod1[label="mod1 (engine)"];\n', - ' pipe_mod2[label="mod2 (engine)"];\n', - ' pipe_mod1 -> pipe_mod2;\n', - '}'] - -dotfile_colored = ['digraph pipe{\n', - ' label="pipe";\n', - ' pipe_mod1[label="mod1 (engine)", style=filled, fillcolor="#FFFFC8"];\n', - ' pipe_mod2[label="mod2 (engine)", style=filled, fillcolor="#FFFFC8"];\n', - ' pipe_mod1 -> pipe_mod2;\n', - '}'] +dotfile_orig = [ + "strict digraph " + graph_str + " {\n", + '"mod1 (engine)";\n', + '"mod2 (engine)";\n', + '"mod1 (engine)" -> "mod2 (engine)";\n', + "}\n", +] + +dotfile_detailed_orig = [ + "digraph structs {\n", + "node [shape=record];\n", + 'pipemod1 [label="{IN}|{ mod1 | engine | }|{OUT| output1}"];\n', + 'pipemod2 [label="{IN| input1}|{ mod2 | engine | }|{OUT}"];\n', + "pipemod1:outoutput1:e -> pipemod2:ininput1:w;\n", + "}", +] + + +dotfile_hierarchical = [ + "digraph pipe{\n", + ' label="pipe";\n', + ' pipe_mod1[label="mod1 (engine)"];\n', + ' pipe_mod2[label="mod2 (engine)"];\n', + " pipe_mod1 -> pipe_mod2;\n", + "}", +] + +dotfile_colored = [ + "digraph pipe{\n", + ' label="pipe";\n', + ' pipe_mod1[label="mod1 (engine)", style=filled, fillcolor="#FFFFC8"];\n', + ' pipe_mod2[label="mod2 (engine)", style=filled, fillcolor="#FFFFC8"];\n', + " pipe_mod1 -> pipe_mod2;\n", + "}", +] dotfiles = { "orig": dotfile_orig, "flat": dotfile_orig, "exec": dotfile_orig, "hierarchical": dotfile_hierarchical, - "colored": dotfile_colored - } + "colored": dotfile_colored, +} + @pytest.mark.parametrize("simple", [True, False]) -@pytest.mark.parametrize("graph_type", ['orig', 'flat', 'exec', 'hierarchical', 'colored']) +@pytest.mark.parametrize( + "graph_type", ["orig", "flat", "exec", "hierarchical", "colored"] +) def test_write_graph_dotfile(tmpdir, graph_type, simple): """ checking dot files for a workflow without iterables""" tmpdir.chdir() - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=EngineTestInterface(), name='mod1') - mod2 = pe.Node(interface=EngineTestInterface(), name='mod2') - pipe.connect([(mod1, mod2, [('output1', 'input1')])]) - pipe.write_graph( - graph2use=graph_type, simple_form=simple, format='dot') + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") + mod2 = pe.Node(interface=EngineTestInterface(), name="mod2") + pipe.connect([(mod1, mod2, [("output1", "input1")])]) + pipe.write_graph(graph2use=graph_type, simple_form=simple, format="dot") with open("graph.dot") as f: graph_str = f.read() @@ -543,12 +558,19 @@ def test_write_graph_dotfile(tmpdir, graph_type, simple): # if simple=False graph.dot uses longer names for line in dotfiles[graph_type]: if graph_type in ["hierarchical", "colored"]: - assert line.replace("mod1 (engine)", "mod1.EngineTestInterface.engine").replace( - "mod2 (engine)", "mod2.EngineTestInterface.engine") in graph_str + assert ( + line.replace( + "mod1 (engine)", "mod1.EngineTestInterface.engine" + ).replace("mod2 (engine)", "mod2.EngineTestInterface.engine") + in graph_str + ) else: - assert line.replace( - "mod1 (engine)", "pipe.mod1.EngineTestInterface.engine").replace( - "mod2 (engine)", "pipe.mod2.EngineTestInterface.engine") in graph_str + assert ( + line.replace( + "mod1 (engine)", "pipe.mod1.EngineTestInterface.engine" + ).replace("mod2 (engine)", "pipe.mod2.EngineTestInterface.engine") + in graph_str + ) # graph_detailed is the same for orig, flat, exec (if no iterables) # graph_detailed is not created for hierachical or colored @@ -561,59 +583,64 @@ def test_write_graph_dotfile(tmpdir, graph_type, simple): # examples of dot files used in the following test dotfile_detailed_iter_exec = [ - 'digraph structs {\n', - 'node [shape=record];\n', + "digraph structs {\n", + "node [shape=record];\n", 'pipemod1aIa1 [label="{IN}|{ a1 | engine | mod1.aI }|{OUT| output1}"];\n', 'pipemod2a1 [label="{IN| input1}|{ a1 | engine | mod2 }|{OUT}"];\n', 'pipemod1aIa0 [label="{IN}|{ a0 | engine | mod1.aI }|{OUT| output1}"];\n', 'pipemod2a0 [label="{IN| input1}|{ a0 | engine | mod2 }|{OUT}"];\n', - 'pipemod1aIa0:outoutput1:e -> pipemod2a0:ininput1:w;\n', - 'pipemod1aIa1:outoutput1:e -> pipemod2a1:ininput1:w;\n', - '}'] + "pipemod1aIa0:outoutput1:e -> pipemod2a0:ininput1:w;\n", + "pipemod1aIa1:outoutput1:e -> pipemod2a1:ininput1:w;\n", + "}", +] dotfile_iter_hierarchical = [ - 'digraph pipe{\n', + "digraph pipe{\n", ' label="pipe";\n', ' pipe_mod1[label="mod1 (engine)", shape=box3d,style=filled, color=black, colorscheme=greys7 fillcolor=2];\n', ' pipe_mod2[label="mod2 (engine)"];\n', - ' pipe_mod1 -> pipe_mod2;\n', - '}'] + " pipe_mod1 -> pipe_mod2;\n", + "}", +] dotfile_iter_colored = [ - 'digraph pipe{\n', + "digraph pipe{\n", ' label="pipe";\n', ' pipe_mod1[label="mod1 (engine)", shape=box3d,style=filled, color=black, colorscheme=greys7 fillcolor=2];\n', ' pipe_mod2[label="mod2 (engine)", style=filled, fillcolor="#FFFFC8"];\n', - ' pipe_mod1 -> pipe_mod2;\n', - '}'] + " pipe_mod1 -> pipe_mod2;\n", + "}", +] dotfiles_iter = { "orig": dotfile_orig, "flat": dotfile_orig, "exec": dotfile_orig, "hierarchical": dotfile_iter_hierarchical, - "colored": dotfile_iter_colored - } + "colored": dotfile_iter_colored, +} dotfiles_detailed_iter = { "orig": dotfile_detailed_orig, "flat": dotfile_detailed_orig, - "exec": dotfile_detailed_iter_exec - } + "exec": dotfile_detailed_iter_exec, +} + @pytest.mark.parametrize("simple", [True, False]) -@pytest.mark.parametrize("graph_type", ['orig', 'flat', 'exec', 'hierarchical', 'colored']) +@pytest.mark.parametrize( + "graph_type", ["orig", "flat", "exec", "hierarchical", "colored"] +) def test_write_graph_dotfile_iterables(tmpdir, graph_type, simple): """ checking dot files for a workflow with iterables""" tmpdir.chdir() - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=EngineTestInterface(), name='mod1') - mod1.iterables = ('input1', [1, 2]) - mod2 = pe.Node(interface=EngineTestInterface(), name='mod2') - pipe.connect([(mod1, mod2, [('output1', 'input1')])]) - pipe.write_graph( - graph2use=graph_type, simple_form=simple, format='dot') + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") + mod1.iterables = ("input1", [1, 2]) + mod2 = pe.Node(interface=EngineTestInterface(), name="mod2") + pipe.connect([(mod1, mod2, [("output1", "input1")])]) + pipe.write_graph(graph2use=graph_type, simple_form=simple, format="dot") with open("graph.dot") as f: graph_str = f.read() @@ -625,12 +652,19 @@ def test_write_graph_dotfile_iterables(tmpdir, graph_type, simple): # if simple=False graph.dot uses longer names for line in dotfiles_iter[graph_type]: if graph_type in ["hierarchical", "colored"]: - assert line.replace("mod1 (engine)", "mod1.EngineTestInterface.engine").replace( - "mod2 (engine)", "mod2.EngineTestInterface.engine") in graph_str + assert ( + line.replace( + "mod1 (engine)", "mod1.EngineTestInterface.engine" + ).replace("mod2 (engine)", "mod2.EngineTestInterface.engine") + in graph_str + ) else: - assert line.replace( - "mod1 (engine)", "pipe.mod1.EngineTestInterface.engine").replace( - "mod2 (engine)", "pipe.mod2.EngineTestInterface.engine") in graph_str + assert ( + line.replace( + "mod1 (engine)", "pipe.mod1.EngineTestInterface.engine" + ).replace("mod2 (engine)", "pipe.mod2.EngineTestInterface.engine") + in graph_str + ) # graph_detailed is not created for hierachical or colored if graph_type not in ["hierarchical", "colored"]: @@ -640,7 +674,6 @@ def test_write_graph_dotfile_iterables(tmpdir, graph_type, simple): assert line in graph_str - def test_io_subclass(): """Ensure any io subclass allows dynamic traits""" from nipype.interfaces.io import IOBase @@ -652,25 +685,26 @@ class TestKV(IOBase): def _list_outputs(self): outputs = {} - outputs['test'] = 1 - outputs['foo'] = 'bar' + outputs["test"] = 1 + outputs["foo"] = "bar" return outputs - wf = pe.Workflow('testkv') + wf = pe.Workflow("testkv") def testx2(test): return test * 2 - kvnode = pe.Node(TestKV(), name='testkv') + kvnode = pe.Node(TestKV(), name="testkv") from nipype.interfaces.utility import Function + func = pe.Node( - Function( - input_names=['test'], output_names=['test2'], function=testx2), - name='func') + Function(input_names=["test"], output_names=["test2"], function=testx2), + name="func", + ) exception_not_raised = True try: - wf.connect(kvnode, 'test', func, 'test') + wf.connect(kvnode, "test", func, "test") except Exception as e: - if 'Module testkv has no output called test' in e: + if "Module testkv has no output called test" in e: exception_not_raised = False assert exception_not_raised diff --git a/nipype/pipeline/engine/tests/test_join.py b/nipype/pipeline/engine/tests/test_join.py index f59d9d4bc1..b14d79a366 100644 --- a/nipype/pipeline/engine/tests/test_join.py +++ b/nipype/pipeline/engine/tests/test_join.py @@ -13,8 +13,7 @@ class PickFirstSpec(nib.TraitedSpec): - in_files = traits.List( - File(exists=True), argstr="%s", position=2, mandatory=True) + in_files = traits.List(File(exists=True), argstr="%s", position=2, mandatory=True) class PickFirstOutSpec(nib.TraitedSpec): @@ -31,17 +30,17 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['output1'] = self.inputs.in_files[0] + outputs["output1"] = self.inputs.in_files[0] return outputs class IncrementInputSpec(nib.TraitedSpec): - input1 = nib.traits.Int(mandatory=True, desc='input') - inc = nib.traits.Int(usedefault=True, default_value=1, desc='increment') + input1 = nib.traits.Int(mandatory=True, desc="input") + inc = nib.traits.Int(usedefault=True, default_value=1, desc="increment") class IncrementOutputSpec(nib.TraitedSpec): - output1 = nib.traits.Int(desc='ouput') + output1 = nib.traits.Int(desc="ouput") class IncrementInterface(nib.SimpleInterface): @@ -50,7 +49,7 @@ class IncrementInterface(nib.SimpleInterface): def _run_interface(self, runtime): runtime.returncode = 0 - self._results['output1'] = self.inputs.input1 + self.inputs.inc + self._results["output1"] = self.inputs.input1 + self.inputs.inc return runtime @@ -60,12 +59,12 @@ def _run_interface(self, runtime): class SumInputSpec(nib.TraitedSpec): - input1 = nib.traits.List(nib.traits.Int, mandatory=True, desc='input') + input1 = nib.traits.List(nib.traits.Int, mandatory=True, desc="input") class SumOutputSpec(nib.TraitedSpec): - output1 = nib.traits.Int(desc='ouput') - operands = nib.traits.List(nib.traits.Int, desc='operands') + output1 = nib.traits.Int(desc="ouput") + operands = nib.traits.List(nib.traits.Int, desc="operands") class SumInterface(nib.SimpleInterface): @@ -76,8 +75,8 @@ def _run_interface(self, runtime): global _sum global _sum_operands runtime.returncode = 0 - self._results['operands'] = self.inputs.input1 - self._results['output1'] = sum(self.inputs.input1) + self._results["operands"] = self.inputs.input1 + self._results["output1"] = sum(self.inputs.input1) _sum_operands.append(self.inputs.input1) _sums.append(sum(self.inputs.input1)) return runtime @@ -88,11 +87,11 @@ def _run_interface(self, runtime): class SetInputSpec(nib.TraitedSpec): - input1 = nib.traits.Set(nib.traits.Int, mandatory=True, desc='input') + input1 = nib.traits.Set(nib.traits.Int, mandatory=True, desc="input") class SetOutputSpec(nib.TraitedSpec): - output1 = nib.traits.Int(desc='ouput') + output1 = nib.traits.Int(desc="ouput") class SetInterface(nib.BaseInterface): @@ -106,7 +105,7 @@ def _run_interface(self, runtime): def _list_outputs(self): global _set_len outputs = self._outputs().get() - _set_len = outputs['output1'] = len(self.inputs.input1) + _set_len = outputs["output1"] = len(self.inputs.input1) return outputs @@ -115,12 +114,12 @@ def _list_outputs(self): class ProductInputSpec(nib.TraitedSpec): - input1 = nib.traits.Int(mandatory=True, desc='input1') - input2 = nib.traits.Int(mandatory=True, desc='input2') + input1 = nib.traits.Int(mandatory=True, desc="input1") + input2 = nib.traits.Int(mandatory=True, desc="input2") class ProductOutputSpec(nib.TraitedSpec): - output1 = nib.traits.Int(mandatory=True, desc='output') + output1 = nib.traits.Int(mandatory=True, desc="output") class ProductInterface(nib.BaseInterface): @@ -134,12 +133,12 @@ def _run_interface(self, runtime): def _list_outputs(self): global _products outputs = self._outputs().get() - outputs['output1'] = self.inputs.input1 * self.inputs.input2 - _products.append(outputs['output1']) + outputs["output1"] = self.inputs.input1 * self.inputs.input2 + _products.append(outputs["output1"]) return outputs -@pytest.mark.parametrize('needed_outputs', ['true', 'false']) +@pytest.mark.parametrize("needed_outputs", ["true", "false"]) def test_join_expansion(tmpdir, needed_outputs): global _sums global _sum_operands @@ -151,40 +150,40 @@ def test_join_expansion(tmpdir, needed_outputs): _sum_operands = [] _sums = [] - prev_state = config.get('execution', 'remove_unnecessary_outputs') - config.set('execution', 'remove_unnecessary_outputs', needed_outputs) + prev_state = config.get("execution", "remove_unnecessary_outputs") + config.set("execution", "remove_unnecessary_outputs", needed_outputs) # Make the workflow. - wf = pe.Workflow(name='test') + wf = pe.Workflow(name="test") # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') - inputspec.iterables = [('n', [1, 2])] + inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") + inputspec.iterables = [("n", [1, 2])] # a pre-join node in the iterated path - pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') + pre_join1 = pe.Node(IncrementInterface(), name="pre_join1") # another pre-join node in the iterated path - pre_join2 = pe.Node(IncrementInterface(), name='pre_join2') + pre_join2 = pe.Node(IncrementInterface(), name="pre_join2") # the join node join = pe.JoinNode( - SumInterface(), - joinsource='inputspec', - joinfield='input1', - name='join') + SumInterface(), joinsource="inputspec", joinfield="input1", name="join" + ) # an uniterated post-join node - post_join1 = pe.Node(IncrementInterface(), name='post_join1') + post_join1 = pe.Node(IncrementInterface(), name="post_join1") # a post-join node in the iterated path - post_join2 = pe.Node(ProductInterface(), name='post_join2') - - wf.connect([ - (inputspec, pre_join1, [('n', 'input1')]), - (pre_join1, pre_join2, [('output1', 'input1')]), - (pre_join1, post_join2, [('output1', 'input2')]), - (pre_join2, join, [('output1', 'input1')]), - (join, post_join1, [('output1', 'input1')]), - (join, post_join2, [('output1', 'input1')]), - ]) + post_join2 = pe.Node(ProductInterface(), name="post_join2") + + wf.connect( + [ + (inputspec, pre_join1, [("n", "input1")]), + (pre_join1, pre_join2, [("output1", "input1")]), + (pre_join1, post_join2, [("output1", "input2")]), + (pre_join2, join, [("output1", "input1")]), + (join, post_join1, [("output1", "input1")]), + (join, post_join2, [("output1", "input1")]), + ] + ) result = wf.run() # the two expanded pre-join predecessor nodes feed into one join node - joins = [node for node in result.nodes() if node.name == 'join'] + joins = [node for node in result.nodes() if node.name == "join"] assert len(joins) == 1, "The number of join result nodes is incorrect." # the expanded graph contains 2 * 2 = 4 iteration pre-join nodes, 1 join # node, 1 non-iterated post-join node and 2 * 1 iteration post-join nodes. @@ -193,15 +192,14 @@ def test_join_expansion(tmpdir, needed_outputs): # the join Sum result is (1 + 1 + 1) + (2 + 1 + 1) assert len(_sums) == 1, "The number of join outputs is incorrect" - assert _sums[ - 0] == 7, "The join Sum output value is incorrect: %s." % _sums[0] + assert _sums[0] == 7, "The join Sum output value is incorrect: %s." % _sums[0] # the join input preserves the iterables input order - assert _sum_operands[0] == [3, 4], \ + assert _sum_operands[0] == [3, 4], ( "The join Sum input is incorrect: %s." % _sum_operands[0] + ) # there are two iterations of the post-join node in the iterable path - assert len(_products) == 2,\ - "The number of iterated post-join outputs is incorrect" - config.set('execution', 'remove_unnecessary_outputs', prev_state) + assert len(_products) == 2, "The number of iterated post-join outputs is incorrect" + config.set("execution", "remove_unnecessary_outputs", prev_state) def test_node_joinsource(tmpdir): @@ -209,15 +207,17 @@ def test_node_joinsource(tmpdir): tmpdir.chdir() # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') - inputspec.iterables = [('n', [1, 2])] + inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") + inputspec.iterables = [("n", [1, 2])] # the join node join = pe.JoinNode( - SetInterface(), joinsource=inputspec, joinfield='input1', name='join') + SetInterface(), joinsource=inputspec, joinfield="input1", name="join" + ) # the joinsource is the inputspec name - assert join.joinsource == inputspec.name, \ - "The joinsource is not set to the node name." + assert ( + join.joinsource == inputspec.name + ), "The joinsource is not set to the node name." def test_set_join_node(tmpdir): @@ -225,26 +225,23 @@ def test_set_join_node(tmpdir): tmpdir.chdir() # Make the workflow. - wf = pe.Workflow(name='test') + wf = pe.Workflow(name="test") # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') - inputspec.iterables = [('n', [1, 2, 1, 3, 2])] + inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") + inputspec.iterables = [("n", [1, 2, 1, 3, 2])] # a pre-join node in the iterated path - pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') - wf.connect(inputspec, 'n', pre_join1, 'input1') + pre_join1 = pe.Node(IncrementInterface(), name="pre_join1") + wf.connect(inputspec, "n", pre_join1, "input1") # the set join node join = pe.JoinNode( - SetInterface(), - joinsource='inputspec', - joinfield='input1', - name='join') - wf.connect(pre_join1, 'output1', join, 'input1') + SetInterface(), joinsource="inputspec", joinfield="input1", name="join" + ) + wf.connect(pre_join1, "output1", join, "input1") wf.run() # the join length is the number of unique inputs - assert _set_len == 3, \ - "The join Set output value is incorrect: %s." % _set_len + assert _set_len == 3, "The join Set output value is incorrect: %s." % _set_len def test_unique_join_node(tmpdir): @@ -254,26 +251,28 @@ def test_unique_join_node(tmpdir): tmpdir.chdir() # Make the workflow. - wf = pe.Workflow(name='test') + wf = pe.Workflow(name="test") # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') - inputspec.iterables = [('n', [3, 1, 2, 1, 3])] + inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") + inputspec.iterables = [("n", [3, 1, 2, 1, 3])] # a pre-join node in the iterated path - pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') - wf.connect(inputspec, 'n', pre_join1, 'input1') + pre_join1 = pe.Node(IncrementInterface(), name="pre_join1") + wf.connect(inputspec, "n", pre_join1, "input1") # the set join node join = pe.JoinNode( SumInterface(), - joinsource='inputspec', - joinfield='input1', + joinsource="inputspec", + joinfield="input1", unique=True, - name='join') - wf.connect(pre_join1, 'output1', join, 'input1') + name="join", + ) + wf.connect(pre_join1, "output1", join, "input1") wf.run() - assert _sum_operands[0] == [4, 2, 3], \ + assert _sum_operands[0] == [4, 2, 3], ( "The unique join output value is incorrect: %s." % _sum_operands[0] + ) def test_multiple_join_nodes(tmpdir): @@ -283,47 +282,48 @@ def test_multiple_join_nodes(tmpdir): tmpdir.chdir() # Make the workflow. - wf = pe.Workflow(name='test') + wf = pe.Workflow(name="test") # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') - inputspec.iterables = [('n', [1, 2, 3])] + inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") + inputspec.iterables = [("n", [1, 2, 3])] # a pre-join node in the iterated path - pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') - wf.connect(inputspec, 'n', pre_join1, 'input1') + pre_join1 = pe.Node(IncrementInterface(), name="pre_join1") + wf.connect(inputspec, "n", pre_join1, "input1") # the first join node join1 = pe.JoinNode( - IdentityInterface(fields=['vector']), - joinsource='inputspec', - joinfield='vector', - name='join1') - wf.connect(pre_join1, 'output1', join1, 'vector') + IdentityInterface(fields=["vector"]), + joinsource="inputspec", + joinfield="vector", + name="join1", + ) + wf.connect(pre_join1, "output1", join1, "vector") # an uniterated post-join node - post_join1 = pe.Node(SumInterface(), name='post_join1') - wf.connect(join1, 'vector', post_join1, 'input1') + post_join1 = pe.Node(SumInterface(), name="post_join1") + wf.connect(join1, "vector", post_join1, "input1") # the downstream join node connected to both an upstream join # path output and a separate input in the iterated path join2 = pe.JoinNode( - IdentityInterface(fields=['vector', 'scalar']), - joinsource='inputspec', - joinfield='vector', - name='join2') - wf.connect(pre_join1, 'output1', join2, 'vector') - wf.connect(post_join1, 'output1', join2, 'scalar') + IdentityInterface(fields=["vector", "scalar"]), + joinsource="inputspec", + joinfield="vector", + name="join2", + ) + wf.connect(pre_join1, "output1", join2, "vector") + wf.connect(post_join1, "output1", join2, "scalar") # a second post-join node - post_join2 = pe.Node(SumInterface(), name='post_join2') - wf.connect(join2, 'vector', post_join2, 'input1') + post_join2 = pe.Node(SumInterface(), name="post_join2") + wf.connect(join2, "vector", post_join2, "input1") # a third post-join node - post_join3 = pe.Node(ProductInterface(), name='post_join3') - wf.connect(post_join2, 'output1', post_join3, 'input1') - wf.connect(join2, 'scalar', post_join3, 'input2') + post_join3 = pe.Node(ProductInterface(), name="post_join3") + wf.connect(post_join2, "output1", post_join3, "input1") + wf.connect(join2, "scalar", post_join3, "input2") result = wf.run() # The expanded graph contains one pre_join1 replicate per inputspec # replicate and one of each remaining node = 3 + 5 = 8 nodes. # The replicated inputspec nodes are factored out of the expansion. - assert len(result.nodes()) == 8, \ - "The number of expanded nodes is incorrect." + assert len(result.nodes()) == 8, "The number of expanded nodes is incorrect." # The outputs are: # pre_join1: [2, 3, 4] # post_join1: 9 @@ -340,33 +340,34 @@ def test_identity_join_node(tmpdir): tmpdir.chdir() # Make the workflow. - wf = pe.Workflow(name='test') + wf = pe.Workflow(name="test") # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') - inputspec.iterables = [('n', [1, 2, 3])] + inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") + inputspec.iterables = [("n", [1, 2, 3])] # a pre-join node in the iterated path - pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') - wf.connect(inputspec, 'n', pre_join1, 'input1') + pre_join1 = pe.Node(IncrementInterface(), name="pre_join1") + wf.connect(inputspec, "n", pre_join1, "input1") # the IdentityInterface join node join = pe.JoinNode( - IdentityInterface(fields=['vector']), - joinsource='inputspec', - joinfield='vector', - name='join') - wf.connect(pre_join1, 'output1', join, 'vector') + IdentityInterface(fields=["vector"]), + joinsource="inputspec", + joinfield="vector", + name="join", + ) + wf.connect(pre_join1, "output1", join, "vector") # an uniterated post-join node - post_join1 = pe.Node(SumInterface(), name='post_join1') - wf.connect(join, 'vector', post_join1, 'input1') + post_join1 = pe.Node(SumInterface(), name="post_join1") + wf.connect(join, "vector", post_join1, "input1") result = wf.run() # the expanded graph contains 1 * 3 iteration pre-join nodes, 1 join # node and 1 post-join node. Nipype factors away the iterable input # IdentityInterface but keeps the join IdentityInterface. - assert len(result.nodes()) == 5, \ - "The number of expanded nodes is incorrect." - assert _sum_operands[0] == [2, 3, 4], \ + assert len(result.nodes()) == 5, "The number of expanded nodes is incorrect." + assert _sum_operands[0] == [2, 3, 4], ( "The join Sum input is incorrect: %s." % _sum_operands[0] + ) def test_multifield_join_node(tmpdir): @@ -376,38 +377,38 @@ def test_multifield_join_node(tmpdir): tmpdir.chdir() # Make the workflow. - wf = pe.Workflow(name='test') + wf = pe.Workflow(name="test") # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['m', 'n']), name='inputspec') - inputspec.iterables = [('m', [1, 2]), ('n', [3, 4])] + inputspec = pe.Node(IdentityInterface(fields=["m", "n"]), name="inputspec") + inputspec.iterables = [("m", [1, 2]), ("n", [3, 4])] # two pre-join nodes in a parallel iterated path - inc1 = pe.Node(IncrementInterface(), name='inc1') - wf.connect(inputspec, 'm', inc1, 'input1') - inc2 = pe.Node(IncrementInterface(), name='inc2') - wf.connect(inputspec, 'n', inc2, 'input1') + inc1 = pe.Node(IncrementInterface(), name="inc1") + wf.connect(inputspec, "m", inc1, "input1") + inc2 = pe.Node(IncrementInterface(), name="inc2") + wf.connect(inputspec, "n", inc2, "input1") # the join node join = pe.JoinNode( - IdentityInterface(fields=['vector1', 'vector2']), - joinsource='inputspec', - name='join') - wf.connect(inc1, 'output1', join, 'vector1') - wf.connect(inc2, 'output1', join, 'vector2') + IdentityInterface(fields=["vector1", "vector2"]), + joinsource="inputspec", + name="join", + ) + wf.connect(inc1, "output1", join, "vector1") + wf.connect(inc2, "output1", join, "vector2") # a post-join node - prod = pe.MapNode( - ProductInterface(), name='prod', iterfield=['input1', 'input2']) - wf.connect(join, 'vector1', prod, 'input1') - wf.connect(join, 'vector2', prod, 'input2') + prod = pe.MapNode(ProductInterface(), name="prod", iterfield=["input1", "input2"]) + wf.connect(join, "vector1", prod, "input1") + wf.connect(join, "vector2", prod, "input2") result = wf.run() # the iterables are expanded as the cartesian product of the iterables values. # thus, the expanded graph contains 2 * (2 * 2) iteration pre-join nodes, 1 join # node and 1 post-join node. - assert len(result.nodes()) == 10, \ - "The number of expanded nodes is incorrect." + assert len(result.nodes()) == 10, "The number of expanded nodes is incorrect." # the product inputs are [2, 4], [2, 5], [3, 4], [3, 5] - assert set(_products) == set([8, 10, 12, 15]), \ + assert set(_products) == set([8, 10, 12, 15]), ( "The post-join products is incorrect: %s." % _products + ) def test_synchronize_join_node(tmpdir): @@ -417,39 +418,37 @@ def test_synchronize_join_node(tmpdir): tmpdir.chdir() # Make the workflow. - wf = pe.Workflow(name='test') + wf = pe.Workflow(name="test") # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['m', 'n']), name='inputspec') - inputspec.iterables = [('m', [1, 2]), ('n', [3, 4])] + inputspec = pe.Node(IdentityInterface(fields=["m", "n"]), name="inputspec") + inputspec.iterables = [("m", [1, 2]), ("n", [3, 4])] inputspec.synchronize = True # two pre-join nodes in a parallel iterated path - inc1 = pe.Node(IncrementInterface(), name='inc1') - wf.connect(inputspec, 'm', inc1, 'input1') - inc2 = pe.Node(IncrementInterface(), name='inc2') - wf.connect(inputspec, 'n', inc2, 'input1') + inc1 = pe.Node(IncrementInterface(), name="inc1") + wf.connect(inputspec, "m", inc1, "input1") + inc2 = pe.Node(IncrementInterface(), name="inc2") + wf.connect(inputspec, "n", inc2, "input1") # the join node join = pe.JoinNode( - IdentityInterface(fields=['vector1', 'vector2']), - joinsource='inputspec', - name='join') - wf.connect(inc1, 'output1', join, 'vector1') - wf.connect(inc2, 'output1', join, 'vector2') + IdentityInterface(fields=["vector1", "vector2"]), + joinsource="inputspec", + name="join", + ) + wf.connect(inc1, "output1", join, "vector1") + wf.connect(inc2, "output1", join, "vector2") # a post-join node - prod = pe.MapNode( - ProductInterface(), name='prod', iterfield=['input1', 'input2']) - wf.connect(join, 'vector1', prod, 'input1') - wf.connect(join, 'vector2', prod, 'input2') + prod = pe.MapNode(ProductInterface(), name="prod", iterfield=["input1", "input2"]) + wf.connect(join, "vector1", prod, "input1") + wf.connect(join, "vector2", prod, "input2") result = wf.run() # there are 3 iterables expansions. # thus, the expanded graph contains 2 * 2 iteration pre-join nodes, 1 join # node and 1 post-join node. - assert len(result.nodes()) == 6, \ - "The number of expanded nodes is incorrect." + assert len(result.nodes()) == 6, "The number of expanded nodes is incorrect." # the product inputs are [2, 3] and [4, 5] - assert _products == [8, 15], \ - "The post-join products is incorrect: %s." % _products + assert _products == [8, 15], "The post-join products is incorrect: %s." % _products def test_itersource_join_source_node(tmpdir): @@ -457,31 +456,32 @@ def test_itersource_join_source_node(tmpdir): tmpdir.chdir() # Make the workflow. - wf = pe.Workflow(name='test') + wf = pe.Workflow(name="test") # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') - inputspec.iterables = [('n', [1, 2])] + inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") + inputspec.iterables = [("n", [1, 2])] # an intermediate node in the first iteration path - pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') - wf.connect(inputspec, 'n', pre_join1, 'input1') + pre_join1 = pe.Node(IncrementInterface(), name="pre_join1") + wf.connect(inputspec, "n", pre_join1, "input1") # an iterable pre-join node with an itersource - pre_join2 = pe.Node(ProductInterface(), name='pre_join2') - pre_join2.itersource = ('inputspec', 'n') - pre_join2.iterables = ('input1', {1: [3, 4], 2: [5, 6]}) - wf.connect(pre_join1, 'output1', pre_join2, 'input2') + pre_join2 = pe.Node(ProductInterface(), name="pre_join2") + pre_join2.itersource = ("inputspec", "n") + pre_join2.iterables = ("input1", {1: [3, 4], 2: [5, 6]}) + wf.connect(pre_join1, "output1", pre_join2, "input2") # an intermediate node in the second iteration path - pre_join3 = pe.Node(IncrementInterface(), name='pre_join3') - wf.connect(pre_join2, 'output1', pre_join3, 'input1') + pre_join3 = pe.Node(IncrementInterface(), name="pre_join3") + wf.connect(pre_join2, "output1", pre_join3, "input1") # the join node join = pe.JoinNode( - IdentityInterface(fields=['vector']), - joinsource='pre_join2', - joinfield='vector', - name='join') - wf.connect(pre_join3, 'output1', join, 'vector') + IdentityInterface(fields=["vector"]), + joinsource="pre_join2", + joinfield="vector", + name="join", + ) + wf.connect(pre_join3, "output1", join, "vector") # a join successor node - post_join1 = pe.Node(SumInterface(), name='post_join1') - wf.connect(join, 'vector', post_join1, 'input1') + post_join1 = pe.Node(SumInterface(), name="post_join1") + wf.connect(join, "vector", post_join1, "input1") result = wf.run() @@ -494,18 +494,19 @@ def test_itersource_join_source_node(tmpdir): # 2 + (2 * 2) + 4 + 2 + 2 = 14 expansion graph nodes. # Nipype factors away the iterable input # IdentityInterface but keeps the join IdentityInterface. - assert len(result.nodes()) == 14, \ - "The number of expanded nodes is incorrect." + assert len(result.nodes()) == 14, "The number of expanded nodes is incorrect." # The first join inputs are: # 1 + (3 * 2) and 1 + (4 * 2) # The second join inputs are: # 1 + (5 * 3) and 1 + (6 * 3) # the post-join nodes execution order is indeterminate; # therefore, compare the lists item-wise. - assert [16, 19] in _sum_operands, \ + assert [16, 19] in _sum_operands, ( "The join Sum input is incorrect: %s." % _sum_operands - assert [7, 9] in _sum_operands, \ + ) + assert [7, 9] in _sum_operands, ( "The join Sum input is incorrect: %s." % _sum_operands + ) def test_itersource_two_join_nodes(tmpdir): @@ -514,68 +515,69 @@ def test_itersource_two_join_nodes(tmpdir): tmpdir.chdir() # Make the workflow. - wf = pe.Workflow(name='test') + wf = pe.Workflow(name="test") # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') - inputspec.iterables = [('n', [1, 2])] + inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") + inputspec.iterables = [("n", [1, 2])] # an intermediate node in the first iteration path - pre_join1 = pe.Node(IncrementInterface(), name='pre_join1') - wf.connect(inputspec, 'n', pre_join1, 'input1') + pre_join1 = pe.Node(IncrementInterface(), name="pre_join1") + wf.connect(inputspec, "n", pre_join1, "input1") # an iterable pre-join node with an itersource - pre_join2 = pe.Node(ProductInterface(), name='pre_join2') - pre_join2.itersource = ('inputspec', 'n') - pre_join2.iterables = ('input1', {1: [3, 4], 2: [5, 6]}) - wf.connect(pre_join1, 'output1', pre_join2, 'input2') + pre_join2 = pe.Node(ProductInterface(), name="pre_join2") + pre_join2.itersource = ("inputspec", "n") + pre_join2.iterables = ("input1", {1: [3, 4], 2: [5, 6]}) + wf.connect(pre_join1, "output1", pre_join2, "input2") # an intermediate node in the second iteration path - pre_join3 = pe.Node(IncrementInterface(), name='pre_join3') - wf.connect(pre_join2, 'output1', pre_join3, 'input1') + pre_join3 = pe.Node(IncrementInterface(), name="pre_join3") + wf.connect(pre_join2, "output1", pre_join3, "input1") # the first join node join1 = pe.JoinNode( - IdentityInterface(fields=['vector']), - joinsource='pre_join2', - joinfield='vector', - name='join1') - wf.connect(pre_join3, 'output1', join1, 'vector') + IdentityInterface(fields=["vector"]), + joinsource="pre_join2", + joinfield="vector", + name="join1", + ) + wf.connect(pre_join3, "output1", join1, "vector") # a join successor node - post_join1 = pe.Node(SumInterface(), name='post_join1') - wf.connect(join1, 'vector', post_join1, 'input1') + post_join1 = pe.Node(SumInterface(), name="post_join1") + wf.connect(join1, "vector", post_join1, "input1") # a summary join node join2 = pe.JoinNode( - IdentityInterface(fields=['vector']), - joinsource='inputspec', - joinfield='vector', - name='join2') - wf.connect(post_join1, 'output1', join2, 'vector') + IdentityInterface(fields=["vector"]), + joinsource="inputspec", + joinfield="vector", + name="join2", + ) + wf.connect(post_join1, "output1", join2, "vector") result = wf.run() # the expanded graph contains the 14 test_itersource_join_source_node # nodes plus the summary join node. - assert len(result.nodes()) == 15, \ - "The number of expanded nodes is incorrect." + assert len(result.nodes()) == 15, "The number of expanded nodes is incorrect." def test_set_join_node_file_input(tmpdir): """Test collecting join inputs to a set.""" tmpdir.chdir() - open('test.nii', 'w+').close() - open('test2.nii', 'w+').close() + open("test.nii", "w+").close() + open("test2.nii", "w+").close() # Make the workflow. - wf = pe.Workflow(name='test') + wf = pe.Workflow(name="test") # the iterated input node - inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') - inputspec.iterables = [('n', [ - tmpdir.join('test.nii').strpath, - tmpdir.join('test2.nii').strpath - ])] + inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") + inputspec.iterables = [ + ("n", [tmpdir.join("test.nii").strpath, tmpdir.join("test2.nii").strpath]) + ] # a pre-join node in the iterated path - pre_join1 = pe.Node(IdentityInterface(fields=['n']), name='pre_join1') - wf.connect(inputspec, 'n', pre_join1, 'n') + pre_join1 = pe.Node(IdentityInterface(fields=["n"]), name="pre_join1") + wf.connect(inputspec, "n", pre_join1, "n") # the set join node join = pe.JoinNode( - PickFirst(), joinsource='inputspec', joinfield='in_files', name='join') - wf.connect(pre_join1, 'n', join, 'in_files') + PickFirst(), joinsource="inputspec", joinfield="in_files", name="join" + ) + wf.connect(pre_join1, "n", join, "in_files") wf.run() @@ -585,26 +587,27 @@ def test_nested_workflow_join(tmpdir): tmpdir.chdir() # Make the nested workflow - def nested_wf(i, name='smallwf'): + def nested_wf(i, name="smallwf"): # iterables with list of nums - inputspec = pe.Node(IdentityInterface(fields=['n']), name='inputspec') - inputspec.iterables = [('n', i)] + inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") + inputspec.iterables = [("n", i)] # increment each iterable before joining - pre_join = pe.Node(IncrementInterface(), name='pre_join') + pre_join = pe.Node(IncrementInterface(), name="pre_join") # rejoin nums into list join = pe.JoinNode( - IdentityInterface(fields=['n']), - joinsource='inputspec', - joinfield='n', - name='join') + IdentityInterface(fields=["n"]), + joinsource="inputspec", + joinfield="n", + name="join", + ) # define and connect nested workflow - wf = pe.Workflow(name='wf_%d' % i[0]) - wf.connect(inputspec, 'n', pre_join, 'input1') - wf.connect(pre_join, 'output1', join, 'n') + wf = pe.Workflow(name="wf_%d" % i[0]) + wf.connect(inputspec, "n", pre_join, "input1") + wf.connect(pre_join, "output1", join, "n") return wf # master wf - meta_wf = pe.Workflow(name='meta', base_dir='.') + meta_wf = pe.Workflow(name="meta", base_dir=".") # add each mini-workflow to master for i in [[1, 3], [2, 4]]: mini_wf = nested_wf(i) @@ -613,8 +616,7 @@ def nested_wf(i, name='smallwf'): result = meta_wf.run() # there should be six nodes in total - assert len(result.nodes()) == 6, \ - "The number of expanded nodes is incorrect." + assert len(result.nodes()) == 6, "The number of expanded nodes is incorrect." def test_name_prefix_join(tmpdir): @@ -623,14 +625,16 @@ def test_name_prefix_join(tmpdir): def sq(x): return x ** 2 - wf = pe.Workflow('wf', base_dir=tmpdir.strpath) - square = pe.Node(Function(function=sq), name='square') - square.iterables = [('x', [1, 2])] - square_join = pe.JoinNode(Merge(1, ravel_inputs=True), - name='square_join', - joinsource='square', - joinfield=['in1']) - wf.connect(square, 'out', square_join, "in1") + wf = pe.Workflow("wf", base_dir=tmpdir.strpath) + square = pe.Node(Function(function=sq), name="square") + square.iterables = [("x", [1, 2])] + square_join = pe.JoinNode( + Merge(1, ravel_inputs=True), + name="square_join", + joinsource="square", + joinfield=["in1"], + ) + wf.connect(square, "out", square_join, "in1") wf.run() @@ -640,27 +644,30 @@ def test_join_nestediters(tmpdir): def exponent(x, p): return x ** p - wf = pe.Workflow('wf', base_dir=tmpdir.strpath) - - xs = pe.Node(IdentityInterface(['x']), - iterables=[('x', [1, 2])], - name='xs') - ps = pe.Node(IdentityInterface(['p']), - iterables=[('p', [3, 4])], - name='ps') - exp = pe.Node(Function(function=exponent), name='exp') - exp_joinx = pe.JoinNode(Merge(1, ravel_inputs=True), - name='exp_joinx', - joinsource='xs', - joinfield=['in1']) - exp_joinp = pe.JoinNode(Merge(1, ravel_inputs=True), - name='exp_joinp', - joinsource='ps', - joinfield=['in1']) - wf.connect([ - (xs, exp, [('x', 'x')]), - (ps, exp, [('p', 'p')]), - (exp, exp_joinx, [('out', 'in1')]), - (exp_joinx, exp_joinp, [('out', 'in1')])]) + wf = pe.Workflow("wf", base_dir=tmpdir.strpath) + + xs = pe.Node(IdentityInterface(["x"]), iterables=[("x", [1, 2])], name="xs") + ps = pe.Node(IdentityInterface(["p"]), iterables=[("p", [3, 4])], name="ps") + exp = pe.Node(Function(function=exponent), name="exp") + exp_joinx = pe.JoinNode( + Merge(1, ravel_inputs=True), + name="exp_joinx", + joinsource="xs", + joinfield=["in1"], + ) + exp_joinp = pe.JoinNode( + Merge(1, ravel_inputs=True), + name="exp_joinp", + joinsource="ps", + joinfield=["in1"], + ) + wf.connect( + [ + (xs, exp, [("x", "x")]), + (ps, exp, [("p", "p")]), + (exp, exp_joinx, [("out", "in1")]), + (exp_joinx, exp_joinp, [("out", "in1")]), + ] + ) wf.run() diff --git a/nipype/pipeline/engine/tests/test_nodes.py b/nipype/pipeline/engine/tests/test_nodes.py index 395ede54bb..6fd88011ee 100644 --- a/nipype/pipeline/engine/tests/test_nodes.py +++ b/nipype/pipeline/engine/tests/test_nodes.py @@ -12,7 +12,7 @@ from .test_base import EngineTestInterface from .test_utils import UtilsTestInterface -''' +""" Test for order of iterables import nipype.pipeline.engine as pe @@ -42,7 +42,7 @@ wf1.run(inseries=True, createdirsonly=True) wf1.write_graph(graph2use='exec') -''' +""" ''' import nipype.pipeline.engine as pe import nipype.interfaces.spm as spm @@ -88,34 +88,42 @@ def test_node_init(): with pytest.raises(TypeError): pe.Node() with pytest.raises(IOError): - pe.Node(EngineTestInterface, name='test') + pe.Node(EngineTestInterface, name="test") def test_node_get_output(): - mod1 = pe.Node(interface=EngineTestInterface(), name='mod1') + mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") mod1.inputs.input1 = 1 mod1.run() - assert mod1.get_output('output1') == [1, 1] + assert mod1.get_output("output1") == [1, 1] mod1._result = None - assert mod1.get_output('output1') == [1, 1] + assert mod1.get_output("output1") == [1, 1] def test_mapnode_iterfield_check(): - mod1 = pe.MapNode(EngineTestInterface(), iterfield=['input1'], name='mod1') + mod1 = pe.MapNode(EngineTestInterface(), iterfield=["input1"], name="mod1") with pytest.raises(ValueError): mod1._check_iterfield() mod1 = pe.MapNode( - EngineTestInterface(), iterfield=['input1', 'input2'], name='mod1') + EngineTestInterface(), iterfield=["input1", "input2"], name="mod1" + ) mod1.inputs.input1 = [1, 2] mod1.inputs.input2 = 3 with pytest.raises(ValueError): mod1._check_iterfield() -@pytest.mark.parametrize("x_inp, f_exp", - [(3, [6]), ([2, 3], [4, 6]), ((2, 3), [4, 6]), - (range(3), [0, 2, 4]), ("Str", ["StrStr"]), - (["Str1", "Str2"], ["Str1Str1", "Str2Str2"])]) +@pytest.mark.parametrize( + "x_inp, f_exp", + [ + (3, [6]), + ([2, 3], [4, 6]), + ((2, 3), [4, 6]), + (range(3), [0, 2, 4]), + ("Str", ["StrStr"]), + (["Str1", "Str2"], ["Str1Str1", "Str2Str2"]), + ], +) def test_mapnode_iterfield_type(x_inp, f_exp): from nipype import MapNode, Function @@ -139,19 +147,21 @@ def func1(in1): return in1 + 1 n1 = MapNode( - Function(input_names=['in1'], output_names=['out'], function=func1), - iterfield=['in1'], + Function(input_names=["in1"], output_names=["out"], function=func1), + iterfield=["in1"], nested=True, - name='n1') + name="n1", + ) n1.inputs.in1 = [[1, [2]], 3, [4, 5]] n1.run() - assert n1.get_output('out') == [[2, [3]], 4, [5, 6]] + assert n1.get_output("out") == [[2, [3]], 4, [5, 6]] n2 = MapNode( - Function(input_names=['in1'], output_names=['out'], function=func1), - iterfield=['in1'], + Function(input_names=["in1"], output_names=["out"], function=func1), + iterfield=["in1"], nested=False, - name='n1') + name="n1", + ) n2.inputs.in1 = [[1, [2]], 3, [4, 5]] with pytest.raises(Exception) as excinfo: @@ -167,27 +177,25 @@ def func1(in1): return in1 + 1 mapnode = MapNode( - Function(function=func1), - iterfield='in1', - name='mapnode', - n_procs=2, - mem_gb=2) + Function(function=func1), iterfield="in1", name="mapnode", n_procs=2, mem_gb=2 + ) mapnode.inputs.in1 = [1, 2] for idx, node in mapnode._make_nodes(): - for attr in ('overwrite', 'run_without_submitting', 'plugin_args'): + for attr in ("overwrite", "run_without_submitting", "plugin_args"): + assert getattr(node, attr) == getattr(mapnode, attr) + for attr in ("_n_procs", "_mem_gb"): assert getattr(node, attr) == getattr(mapnode, attr) - for attr in ('_n_procs', '_mem_gb'): - assert (getattr(node, attr) == getattr(mapnode, attr)) def test_node_hash(tmpdir): from nipype.interfaces.utility import Function + tmpdir.chdir() config.set_default_config() - config.set('execution', 'stop_on_first_crash', True) - config.set('execution', 'crashdump_dir', os.getcwd()) + config.set("execution", "stop_on_first_crash", True) + config.set("execution", "crashdump_dir", os.getcwd()) def func1(): return 1 @@ -196,17 +204,18 @@ def func2(a): return a + 1 n1 = pe.Node( - Function(input_names=[], output_names=['a'], function=func1), - name='n1') + Function(input_names=[], output_names=["a"], function=func1), name="n1" + ) n2 = pe.Node( - Function(input_names=['a'], output_names=['b'], function=func2), - name='n2') - w1 = pe.Workflow(name='test') + Function(input_names=["a"], output_names=["b"], function=func2), name="n2" + ) + w1 = pe.Workflow(name="test") def modify(x): return x + 1 + n1.inputs.a = 1 - w1.connect(n1, ('a', modify), n2, 'a') + w1.connect(n1, ("a", modify), n2, "a") w1.base_dir = os.getcwd() # create dummy distributed plugin class @@ -219,25 +228,26 @@ class EngineTestException(Exception): class RaiseError(DistributedPluginBase): def _submit_job(self, node, updatehash=False): raise EngineTestException( - 'Submit called - cached=%s, updated=%s' % node.is_cached()) + "Submit called - cached=%s, updated=%s" % node.is_cached() + ) # check if a proper exception is raised with pytest.raises(EngineTestException) as excinfo: w1.run(plugin=RaiseError()) - assert str(excinfo.value).startswith('Submit called') + assert str(excinfo.value).startswith("Submit called") # generate outputs - w1.run(plugin='Linear') + w1.run(plugin="Linear") # ensure plugin is being called - config.set('execution', 'local_hash_check', False) + config.set("execution", "local_hash_check", False) # rerun to ensure we have outputs - w1.run(plugin='Linear') + w1.run(plugin="Linear") # set local check - config.set('execution', 'local_hash_check', True) - w1 = pe.Workflow(name='test') - w1.connect(n1, ('a', modify), n2, 'a') + config.set("execution", "local_hash_check", True) + w1 = pe.Workflow(name="test") + w1.connect(n1, ("a", modify), n2, "a") w1.base_dir = os.getcwd() w1.run(plugin=RaiseError()) @@ -245,57 +255,58 @@ def _submit_job(self, node, updatehash=False): def test_outputs_removal(tmpdir): def test_function(arg1): import os - file1 = os.path.join(os.getcwd(), 'file1.txt') - file2 = os.path.join(os.getcwd(), 'file2.txt') - with open(file1, 'wt') as fp: - fp.write('%d' % arg1) - with open(file2, 'wt') as fp: - fp.write('%d' % arg1) + + file1 = os.path.join(os.getcwd(), "file1.txt") + file2 = os.path.join(os.getcwd(), "file2.txt") + with open(file1, "wt") as fp: + fp.write("%d" % arg1) + with open(file2, "wt") as fp: + fp.write("%d" % arg1) return file1, file2 n1 = pe.Node( niu.Function( - input_names=['arg1'], - output_names=['file1', 'file2'], - function=test_function), + input_names=["arg1"], + output_names=["file1", "file2"], + function=test_function, + ), base_dir=tmpdir.strpath, - name='testoutputs') + name="testoutputs", + ) n1.inputs.arg1 = 1 - n1.config = {'execution': {'remove_unnecessary_outputs': True}} + n1.config = {"execution": {"remove_unnecessary_outputs": True}} n1.config = merge_dict(deepcopy(config._sections), n1.config) n1.run() - assert tmpdir.join(n1.name, 'file1.txt').check() - assert tmpdir.join(n1.name, 'file1.txt').check() - n1.needed_outputs = ['file2'] + assert tmpdir.join(n1.name, "file1.txt").check() + assert tmpdir.join(n1.name, "file1.txt").check() + n1.needed_outputs = ["file2"] n1.run() - assert not tmpdir.join(n1.name, 'file1.txt').check() - assert tmpdir.join(n1.name, 'file2.txt').check() + assert not tmpdir.join(n1.name, "file1.txt").check() + assert tmpdir.join(n1.name, "file2.txt").check() def test_inputs_removal(tmpdir): - file1 = tmpdir.join('file1.txt') - file1.write('dummy_file') - n1 = pe.Node( - UtilsTestInterface(), base_dir=tmpdir.strpath, name='testinputs') + file1 = tmpdir.join("file1.txt") + file1.write("dummy_file") + n1 = pe.Node(UtilsTestInterface(), base_dir=tmpdir.strpath, name="testinputs") n1.inputs.in_file = file1.strpath - n1.config = {'execution': {'keep_inputs': True}} + n1.config = {"execution": {"keep_inputs": True}} n1.config = merge_dict(deepcopy(config._sections), n1.config) n1.run() - assert tmpdir.join(n1.name, 'file1.txt').check() + assert tmpdir.join(n1.name, "file1.txt").check() n1.inputs.in_file = file1.strpath - n1.config = {'execution': {'keep_inputs': False}} + n1.config = {"execution": {"keep_inputs": False}} n1.config = merge_dict(deepcopy(config._sections), n1.config) n1.overwrite = True n1.run() - assert not tmpdir.join(n1.name, 'file1.txt').check() + assert not tmpdir.join(n1.name, "file1.txt").check() def test_outputmultipath_collapse(tmpdir): """Test an OutputMultiPath whose initial value is ``[[x]]`` to ensure that it is returned as ``[x]``, regardless of how accessed.""" select_if = niu.Select(inlist=[[1, 2, 3], [4]], index=1) - select_nd = pe.Node(niu.Select(inlist=[[1, 2, 3], [4]], index=1), - name='select_nd') + select_nd = pe.Node(niu.Select(inlist=[[1, 2, 3], [4]], index=1), name="select_nd") ifres = select_if.run() ndres = select_nd.run() diff --git a/nipype/pipeline/engine/tests/test_utils.py b/nipype/pipeline/engine/tests/test_utils.py index 83f5aef282..1e86e5a071 100644 --- a/nipype/pipeline/engine/tests/test_utils.py +++ b/nipype/pipeline/engine/tests/test_utils.py @@ -11,8 +11,7 @@ from ....interfaces import base as nib from ....interfaces import utility as niu from .... import config -from ..utils import (clean_working_directory, write_workflow_prov, - load_resultfile) +from ..utils import clean_working_directory, write_workflow_prov, load_resultfile class InputSpec(nib.TraitedSpec): @@ -20,7 +19,7 @@ class InputSpec(nib.TraitedSpec): class OutputSpec(nib.TraitedSpec): - output1 = nib.traits.List(nib.traits.Int, desc='outputs') + output1 = nib.traits.List(nib.traits.Int, desc="outputs") class UtilsTestInterface(nib.BaseInterface): @@ -33,38 +32,37 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['output1'] = [1] + outputs["output1"] = [1] return outputs def test_identitynode_removal(tmpdir): def test_function(arg1, arg2, arg3): import numpy as np + return (np.array(arg1) + arg2 + arg3).tolist() wf = pe.Workflow(name="testidentity", base_dir=tmpdir.strpath) n1 = pe.Node( - niu.IdentityInterface(fields=['a', 'b']), - name='src', - base_dir=tmpdir.strpath) - n1.iterables = ('b', [0, 1, 2, 3]) + niu.IdentityInterface(fields=["a", "b"]), name="src", base_dir=tmpdir.strpath + ) + n1.iterables = ("b", [0, 1, 2, 3]) n1.inputs.a = [0, 1, 2, 3] - n2 = pe.Node(niu.Select(), name='selector', base_dir=tmpdir.strpath) - wf.connect(n1, ('a', test_function, 1, -1), n2, 'inlist') - wf.connect(n1, 'b', n2, 'index') + n2 = pe.Node(niu.Select(), name="selector", base_dir=tmpdir.strpath) + wf.connect(n1, ("a", test_function, 1, -1), n2, "inlist") + wf.connect(n1, "b", n2, "index") n3 = pe.Node( - niu.IdentityInterface(fields=['c', 'd']), - name='passer', - base_dir=tmpdir.strpath) + niu.IdentityInterface(fields=["c", "d"]), name="passer", base_dir=tmpdir.strpath + ) n3.inputs.c = [1, 2, 3, 4] - wf.connect(n2, 'out', n3, 'd') + wf.connect(n2, "out", n3, "d") - n4 = pe.Node(niu.Select(), name='selector2', base_dir=tmpdir.strpath) - wf.connect(n3, ('c', test_function, 1, -1), n4, 'inlist') - wf.connect(n3, 'd', n4, 'index') + n4 = pe.Node(niu.Select(), name="selector2", base_dir=tmpdir.strpath) + wf.connect(n3, ("c", test_function, 1, -1), n4, "inlist") + wf.connect(n3, "d", n4, "index") fg = wf._create_flat_graph() wf._set_needed_outputs(fg) @@ -84,29 +82,35 @@ class InputSpec(nib.TraitedSpec): inputs = InputSpec() filenames = [ - 'file.hdr', 'file.img', 'file.BRIK', 'file.HEAD', '_0x1234.json', - 'foo.txt' + "file.hdr", + "file.img", + "file.BRIK", + "file.HEAD", + "_0x1234.json", + "foo.txt", ] outfiles = [] for filename in filenames: outfile = tmpdir.join(filename) - outfile.write('dummy') + outfile.write("dummy") outfiles.append(outfile.strpath) outputs.files = outfiles[:4:2] outputs.others = outfiles[5] inputs.infile = outfiles[-1] - needed_outputs = ['files'] + needed_outputs = ["files"] config.set_default_config() assert os.path.exists(outfiles[5]) config.set_default_config() - config.set('execution', 'remove_unnecessary_outputs', False) - out = clean_working_directory(outputs, tmpdir.strpath, inputs, - needed_outputs, deepcopy(config._sections)) + config.set("execution", "remove_unnecessary_outputs", False) + out = clean_working_directory( + outputs, tmpdir.strpath, inputs, needed_outputs, deepcopy(config._sections) + ) assert os.path.exists(outfiles[5]) assert out.others == outfiles[5] - config.set('execution', 'remove_unnecessary_outputs', True) - out = clean_working_directory(outputs, tmpdir.strpath, inputs, - needed_outputs, deepcopy(config._sections)) + config.set("execution", "remove_unnecessary_outputs", True) + out = clean_working_directory( + outputs, tmpdir.strpath, inputs, needed_outputs, deepcopy(config._sections) + ) assert os.path.exists(outfiles[1]) assert os.path.exists(outfiles[3]) assert os.path.exists(outfiles[4]) @@ -118,39 +122,40 @@ class InputSpec(nib.TraitedSpec): def create_wf(name): """Creates a workflow for the following tests""" + def fwhm(fwhm): return fwhm pipe = pe.Workflow(name=name) process = pe.Node( - niu.Function( - input_names=['fwhm'], output_names=['fwhm'], function=fwhm), - name='proc') - process.iterables = ('fwhm', [0]) + niu.Function(input_names=["fwhm"], output_names=["fwhm"], function=fwhm), + name="proc", + ) + process.iterables = ("fwhm", [0]) process2 = pe.Node( - niu.Function( - input_names=['fwhm'], output_names=['fwhm'], function=fwhm), - name='proc2') - process2.iterables = ('fwhm', [0]) - pipe.connect(process, 'fwhm', process2, 'fwhm') + niu.Function(input_names=["fwhm"], output_names=["fwhm"], function=fwhm), + name="proc2", + ) + process2.iterables = ("fwhm", [0]) + pipe.connect(process, "fwhm", process2, "fwhm") return pipe def test_multi_disconnected_iterable(tmpdir): - metawf = pe.Workflow(name='meta') + metawf = pe.Workflow(name="meta") metawf.base_dir = tmpdir.strpath - metawf.add_nodes([create_wf('wf%d' % i) for i in range(30)]) - eg = metawf.run(plugin='Linear') + metawf.add_nodes([create_wf("wf%d" % i) for i in range(30)]) + eg = metawf.run(plugin="Linear") assert len(eg.nodes()) == 60 def test_provenance(tmpdir): - metawf = pe.Workflow(name='meta') + metawf = pe.Workflow(name="meta") metawf.base_dir = tmpdir.strpath - metawf.add_nodes([create_wf('wf%d' % i) for i in range(1)]) - eg = metawf.run(plugin='Linear') - prov_base = tmpdir.join('workflow_provenance_test').strpath - psg = write_workflow_prov(eg, prov_base, format='all') + metawf.add_nodes([create_wf("wf%d" % i) for i in range(1)]) + eg = metawf.run(plugin="Linear") + prov_base = tmpdir.join("workflow_provenance_test").strpath + psg = write_workflow_prov(eg, prov_base, format="all") assert len(psg.bundles) == 2 assert len(psg.get_records()) == 7 @@ -164,14 +169,14 @@ def test_mapnode_crash(tmpdir): cwd = os.getcwd() node = pe.MapNode( niu.Function( - input_names=['WRONG'], - output_names=['newstring'], - function=dummy_func), - iterfield=['WRONG'], - name='myfunc') - node.inputs.WRONG = ['string{}'.format(i) for i in range(3)] + input_names=["WRONG"], output_names=["newstring"], function=dummy_func + ), + iterfield=["WRONG"], + name="myfunc", + ) + node.inputs.WRONG = ["string{}".format(i) for i in range(3)] node.config = deepcopy(config._sections) - node.config['execution']['stop_on_first_crash'] = True + node.config["execution"]["stop_on_first_crash"] = True node.base_dir = tmpdir.strpath with pytest.raises(TypeError): node.run() @@ -183,12 +188,12 @@ def test_mapnode_crash2(tmpdir): cwd = os.getcwd() node = pe.MapNode( niu.Function( - input_names=['WRONG'], - output_names=['newstring'], - function=dummy_func), - iterfield=['WRONG'], - name='myfunc') - node.inputs.WRONG = ['string{}'.format(i) for i in range(3)] + input_names=["WRONG"], output_names=["newstring"], function=dummy_func + ), + iterfield=["WRONG"], + name="myfunc", + ) + node.inputs.WRONG = ["string{}".format(i) for i in range(3)] node.base_dir = tmpdir.strpath with pytest.raises(Exception): @@ -201,19 +206,20 @@ def test_mapnode_crash3(tmpdir): tmpdir.chdir() node = pe.MapNode( niu.Function( - input_names=['WRONG'], - output_names=['newstring'], - function=dummy_func), - iterfield=['WRONG'], - name='myfunc') - node.inputs.WRONG = ['string{}'.format(i) for i in range(3)] - wf = pe.Workflow('testmapnodecrash') + input_names=["WRONG"], output_names=["newstring"], function=dummy_func + ), + iterfield=["WRONG"], + name="myfunc", + ) + node.inputs.WRONG = ["string{}".format(i) for i in range(3)] + wf = pe.Workflow("testmapnodecrash") wf.add_nodes([node]) wf.base_dir = tmpdir.strpath # changing crashdump dir to current working directory (to avoid problems with read-only systems) wf.config["execution"]["crashdump_dir"] = os.getcwd() with pytest.raises(RuntimeError): - wf.run(plugin='Linear') + wf.run(plugin="Linear") + class StrPathConfuserInputSpec(nib.TraitedSpec): in_str = nib.traits.String() @@ -233,14 +239,14 @@ class StrPathConfuser(nib.SimpleInterface): output_spec = StrPathConfuserOutputSpec def _run_interface(self, runtime): - out_path = os.path.abspath(os.path.basename(self.inputs.in_str) + '_path') - open(out_path, 'w').close() - self._results['out_str'] = self.inputs.in_str - self._results['out_path'] = out_path - self._results['out_tuple'] = (out_path, self.inputs.in_str) - self._results['out_dict_path'] = {self.inputs.in_str: out_path} - self._results['out_dict_str'] = {self.inputs.in_str: self.inputs.in_str} - self._results['out_list'] = [self.inputs.in_str] * 2 + out_path = os.path.abspath(os.path.basename(self.inputs.in_str) + "_path") + open(out_path, "w").close() + self._results["out_str"] = self.inputs.in_str + self._results["out_path"] = out_path + self._results["out_tuple"] = (out_path, self.inputs.in_str) + self._results["out_dict_path"] = {self.inputs.in_str: out_path} + self._results["out_dict_str"] = {self.inputs.in_str: self.inputs.in_str} + self._results["out_list"] = [self.inputs.in_str] * 2 return runtime @@ -254,15 +260,15 @@ def test_modify_paths_bug(tmpdir): """ tmpdir.chdir() - spc = pe.Node(StrPathConfuser(in_str='2'), name='spc') + spc = pe.Node(StrPathConfuser(in_str="2"), name="spc") - open('2', 'w').close() + open("2", "w").close() outputs = spc.run().outputs # Basic check that string was not manipulated out_str = outputs.out_str - assert out_str == '2' + assert out_str == "2" # Check path exists and is absolute out_path = outputs.out_path @@ -279,41 +285,45 @@ def test_modify_paths_bug(tmpdir): def test_save_load_resultfile(tmpdir, use_relative): """Test minimally the save/load functions for result files.""" from shutil import copytree, rmtree + tmpdir.chdir() - old_use_relative = config.getboolean('execution', 'use_relative_paths') - config.set('execution', 'use_relative_paths', use_relative) + old_use_relative = config.getboolean("execution", "use_relative_paths") + config.set("execution", "use_relative_paths", use_relative) - spc = pe.Node(StrPathConfuser(in_str='2'), name='spc') - spc.base_dir = tmpdir.mkdir('node').strpath + spc = pe.Node(StrPathConfuser(in_str="2"), name="spc") + spc.base_dir = tmpdir.mkdir("node").strpath result = spc.run() loaded_result = load_resultfile( - tmpdir.join('node').join('spc').join('result_spc.pklz').strpath) + tmpdir.join("node").join("spc").join("result_spc.pklz").strpath + ) assert result.runtime.dictcopy() == loaded_result.runtime.dictcopy() assert result.inputs == loaded_result.inputs assert result.outputs.get() == loaded_result.outputs.get() # Test the mobility of the result file. - copytree(tmpdir.join('node').strpath, tmpdir.join('node2').strpath) - rmtree(tmpdir.join('node').strpath) + copytree(tmpdir.join("node").strpath, tmpdir.join("node2").strpath) + rmtree(tmpdir.join("node").strpath) if use_relative: loaded_result2 = load_resultfile( - tmpdir.join('node2').join('spc').join('result_spc.pklz').strpath) + tmpdir.join("node2").join("spc").join("result_spc.pklz").strpath + ) assert result.runtime.dictcopy() == loaded_result2.runtime.dictcopy() assert result.inputs == loaded_result2.inputs assert loaded_result2.outputs.get() != result.outputs.get() - newpath = result.outputs.out_path.replace('/node/', '/node2/') + newpath = result.outputs.out_path.replace("/node/", "/node2/") assert loaded_result2.outputs.out_path == newpath assert loaded_result2.outputs.out_tuple[0] == newpath - assert loaded_result2.outputs.out_dict_path['2'] == newpath + assert loaded_result2.outputs.out_dict_path["2"] == newpath else: with pytest.raises(nib.TraitError): load_resultfile( - tmpdir.join('node2').join('spc').join('result_spc.pklz').strpath) + tmpdir.join("node2").join("spc").join("result_spc.pklz").strpath + ) - config.set('execution', 'use_relative_paths', old_use_relative) + config.set("execution", "use_relative_paths", old_use_relative) diff --git a/nipype/pipeline/engine/tests/test_workflows.py b/nipype/pipeline/engine/tests/test_workflows.py index 0cc7f2142f..75f77525f8 100644 --- a/nipype/pipeline/engine/tests/test_workflows.py +++ b/nipype/pipeline/engine/tests/test_workflows.py @@ -20,27 +20,25 @@ def test_init(): with pytest.raises(TypeError): pe.Workflow() - pipe = pe.Workflow(name='pipe') + pipe = pe.Workflow(name="pipe") assert type(pipe._graph) == nx.DiGraph def test_connect(): - pipe = pe.Workflow(name='pipe') - mod2 = pe.Node(EngineTestInterface(), name='mod2') - mod1 = pe.Node(EngineTestInterface(), name='mod1') - pipe.connect([(mod1, mod2, [('output1', 'input1')])]) + pipe = pe.Workflow(name="pipe") + mod2 = pe.Node(EngineTestInterface(), name="mod2") + mod1 = pe.Node(EngineTestInterface(), name="mod1") + pipe.connect([(mod1, mod2, [("output1", "input1")])]) assert mod1 in pipe._graph.nodes() assert mod2 in pipe._graph.nodes() - assert pipe._graph.get_edge_data(mod1, mod2) == { - 'connect': [('output1', 'input1')] - } + assert pipe._graph.get_edge_data(mod1, mod2) == {"connect": [("output1", "input1")]} def test_add_nodes(): - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(EngineTestInterface(), name='mod1') - mod2 = pe.Node(EngineTestInterface(), name='mod2') + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(EngineTestInterface(), name="mod1") + mod2 = pe.Node(EngineTestInterface(), name="mod2") pipe.add_nodes([mod1, mod2]) assert mod1 in pipe._graph.nodes() @@ -48,40 +46,40 @@ def test_add_nodes(): def test_disconnect(): - a = pe.Node(niu.IdentityInterface(fields=['a', 'b']), name='a') - b = pe.Node(niu.IdentityInterface(fields=['a', 'b']), name='b') - flow1 = pe.Workflow(name='test') - flow1.connect(a, 'a', b, 'a') - flow1.disconnect(a, 'a', b, 'a') + a = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="a") + b = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="b") + flow1 = pe.Workflow(name="test") + flow1.connect(a, "a", b, "a") + flow1.disconnect(a, "a", b, "a") assert list(flow1._graph.edges()) == [] def test_workflow_add(): - n1 = pe.Node(niu.IdentityInterface(fields=['a', 'b']), name='n1') - n2 = pe.Node(niu.IdentityInterface(fields=['c', 'd']), name='n2') - n3 = pe.Node(niu.IdentityInterface(fields=['c', 'd']), name='n1') - w1 = pe.Workflow(name='test') - w1.connect(n1, 'a', n2, 'c') + n1 = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="n1") + n2 = pe.Node(niu.IdentityInterface(fields=["c", "d"]), name="n2") + n3 = pe.Node(niu.IdentityInterface(fields=["c", "d"]), name="n1") + w1 = pe.Workflow(name="test") + w1.connect(n1, "a", n2, "c") for node in [n1, n2, n3]: with pytest.raises(IOError): w1.add_nodes([node]) with pytest.raises(IOError): - w1.connect([(w1, n2, [('n1.a', 'd')])]) + w1.connect([(w1, n2, [("n1.a", "d")])]) def test_doubleconnect(): - a = pe.Node(niu.IdentityInterface(fields=['a', 'b']), name='a') - b = pe.Node(niu.IdentityInterface(fields=['a', 'b']), name='b') - flow1 = pe.Workflow(name='test') - flow1.connect(a, 'a', b, 'a') + a = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="a") + b = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="b") + flow1 = pe.Workflow(name="test") + flow1.connect(a, "a", b, "a") with pytest.raises(Exception) as excinfo: - flow1.connect(a, 'b', b, 'a') + flow1.connect(a, "b", b, "a") assert "Trying to connect" in str(excinfo.value) - c = pe.Node(niu.IdentityInterface(fields=['a', 'b']), name='c') - flow1 = pe.Workflow(name='test2') + c = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="c") + flow1 = pe.Workflow(name="test2") with pytest.raises(Exception) as excinfo: - flow1.connect([(a, c, [('b', 'b')]), (b, c, [('a', 'b')])]) + flow1.connect([(a, c, [("b", "b")]), (b, c, [("a", "b")])]) assert "Trying to connect" in str(excinfo.value) @@ -114,29 +112,31 @@ def test_duplicate_node_check(): def _test_function(arg1): import os - file1 = os.path.join(os.getcwd(), 'file1.txt') - file2 = os.path.join(os.getcwd(), 'file2.txt') - file3 = os.path.join(os.getcwd(), 'file3.txt') - file4 = os.path.join(os.getcwd(), 'subdir', 'file4.txt') + + file1 = os.path.join(os.getcwd(), "file1.txt") + file2 = os.path.join(os.getcwd(), "file2.txt") + file3 = os.path.join(os.getcwd(), "file3.txt") + file4 = os.path.join(os.getcwd(), "subdir", "file4.txt") os.mkdir("subdir") for filename in [file1, file2, file3, file4]: - with open(filename, 'wt') as fp: - fp.write('%d' % arg1) + with open(filename, "wt") as fp: + fp.write("%d" % arg1) return file1, file2, os.path.join(os.getcwd(), "subdir") def _test_function2(in_file, arg): import os - with open(in_file, 'rt') as fp: + + with open(in_file, "rt") as fp: in_arg = fp.read() - file1 = os.path.join(os.getcwd(), 'file1.txt') - file2 = os.path.join(os.getcwd(), 'file2.txt') - file3 = os.path.join(os.getcwd(), 'file3.txt') + file1 = os.path.join(os.getcwd(), "file1.txt") + file2 = os.path.join(os.getcwd(), "file2.txt") + file3 = os.path.join(os.getcwd(), "file3.txt") files = [file1, file2, file3] for filename in files: - with open(filename, 'wt') as fp: - fp.write('%d' % arg + in_arg) + with open(filename, "wt") as fp: + fp.write("%d" % arg + in_arg) return file1, file2, 1 @@ -145,67 +145,69 @@ def _test_function3(arg): @pytest.mark.parametrize( - 'plugin, remove_unnecessary_outputs, keep_inputs', - list(product(['Linear', 'MultiProc'], [False, True], [True, False]))) -def test_outputs_removal_wf(tmpdir, plugin, remove_unnecessary_outputs, - keep_inputs): + "plugin, remove_unnecessary_outputs, keep_inputs", + list(product(["Linear", "MultiProc"], [False, True], [True, False])), +) +def test_outputs_removal_wf(tmpdir, plugin, remove_unnecessary_outputs, keep_inputs): config.set_default_config() - config.set('execution', 'remove_unnecessary_outputs', - remove_unnecessary_outputs) - config.set('execution', 'keep_inputs', keep_inputs) + config.set("execution", "remove_unnecessary_outputs", remove_unnecessary_outputs) + config.set("execution", "keep_inputs", keep_inputs) n1 = pe.Node( niu.Function( - output_names=['out_file1', 'out_file2', 'dir'], - function=_test_function), - name='n1', - base_dir=tmpdir.strpath) + output_names=["out_file1", "out_file2", "dir"], function=_test_function + ), + name="n1", + base_dir=tmpdir.strpath, + ) n1.inputs.arg1 = 1 n2 = pe.Node( niu.Function( - output_names=['out_file1', 'out_file2', 'n'], - function=_test_function2), - name='n2', - base_dir=tmpdir.strpath) + output_names=["out_file1", "out_file2", "n"], function=_test_function2 + ), + name="n2", + base_dir=tmpdir.strpath, + ) n2.inputs.arg = 2 n3 = pe.Node( - niu.Function( - output_names=['n'], - function=_test_function3), - name='n3', - base_dir=tmpdir.strpath) + niu.Function(output_names=["n"], function=_test_function3), + name="n3", + base_dir=tmpdir.strpath, + ) - wf = pe.Workflow( - name="node_rem_test" + plugin, base_dir=tmpdir.strpath) + wf = pe.Workflow(name="node_rem_test" + plugin, base_dir=tmpdir.strpath) wf.connect(n1, "out_file1", n2, "in_file") wf.run(plugin=plugin) # Necessary outputs HAVE to exist - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n1.name, 'file1.txt')) - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n2.name, 'file1.txt')) - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n2.name, 'file2.txt')) + assert os.path.exists(os.path.join(wf.base_dir, wf.name, n1.name, "file1.txt")) + assert os.path.exists(os.path.join(wf.base_dir, wf.name, n2.name, "file1.txt")) + assert os.path.exists(os.path.join(wf.base_dir, wf.name, n2.name, "file2.txt")) # Unnecessary outputs exist only iff remove_unnecessary_outputs is True - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n1.name, - 'file2.txt')) is not remove_unnecessary_outputs - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n1.name, "subdir", - 'file4.txt')) is not remove_unnecessary_outputs - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n1.name, - 'file3.txt')) is not remove_unnecessary_outputs - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n2.name, - 'file3.txt')) is not remove_unnecessary_outputs - - n4 = pe.Node(UtilsTestInterface(), name='n4', base_dir=tmpdir.strpath) + assert ( + os.path.exists(os.path.join(wf.base_dir, wf.name, n1.name, "file2.txt")) + is not remove_unnecessary_outputs + ) + assert ( + os.path.exists( + os.path.join(wf.base_dir, wf.name, n1.name, "subdir", "file4.txt") + ) + is not remove_unnecessary_outputs + ) + assert ( + os.path.exists(os.path.join(wf.base_dir, wf.name, n1.name, "file3.txt")) + is not remove_unnecessary_outputs + ) + assert ( + os.path.exists(os.path.join(wf.base_dir, wf.name, n2.name, "file3.txt")) + is not remove_unnecessary_outputs + ) + + n4 = pe.Node(UtilsTestInterface(), name="n4", base_dir=tmpdir.strpath) wf.connect(n2, "out_file1", n4, "in_file") def pick_first(l): @@ -216,50 +218,49 @@ def pick_first(l): wf.run(plugin=plugin) # Test necessary outputs - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n2.name, 'file1.txt')) - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n2.name, 'file1.txt')) + assert os.path.exists(os.path.join(wf.base_dir, wf.name, n2.name, "file1.txt")) + assert os.path.exists(os.path.join(wf.base_dir, wf.name, n2.name, "file1.txt")) # Test unnecessary outputs - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n2.name, - 'file2.txt')) is not remove_unnecessary_outputs + assert ( + os.path.exists(os.path.join(wf.base_dir, wf.name, n2.name, "file2.txt")) + is not remove_unnecessary_outputs + ) # Test keep_inputs - assert os.path.exists( - os.path.join(wf.base_dir, wf.name, n4.name, - 'file1.txt')) is keep_inputs + assert ( + os.path.exists(os.path.join(wf.base_dir, wf.name, n4.name, "file1.txt")) + is keep_inputs + ) def _test_function4(): - raise FileNotFoundError('Generic error') + raise FileNotFoundError("Generic error") def test_config_setting(tmpdir): tmpdir.chdir() - wf = pe.Workflow('config') + wf = pe.Workflow("config") wf.base_dir = os.getcwd() - crashdir = os.path.join(os.getcwd(), 'crashdir') + crashdir = os.path.join(os.getcwd(), "crashdir") os.mkdir(crashdir) wf.config = {"execution": {"crashdump_dir": crashdir}} - n1 = pe.Node(niu.Function(function=_test_function4), - name='errorfunc') + n1 = pe.Node(niu.Function(function=_test_function4), name="errorfunc") wf.add_nodes([n1]) try: wf.run() except RuntimeError: pass - fl = glob(os.path.join(crashdir, 'crash*')) + fl = glob(os.path.join(crashdir, "crash*")) assert len(fl) == 1 # Now test node overwrite - crashdir2 = os.path.join(os.getcwd(), 'crashdir2') + crashdir2 = os.path.join(os.getcwd(), "crashdir2") os.mkdir(crashdir2) - crashdir3 = os.path.join(os.getcwd(), 'crashdir3') + crashdir3 = os.path.join(os.getcwd(), "crashdir3") os.mkdir(crashdir3) wf.config = {"execution": {"crashdump_dir": crashdir3}} n1.config = {"execution": {"crashdump_dir": crashdir2}} @@ -269,7 +270,7 @@ def test_config_setting(tmpdir): except RuntimeError: pass - fl = glob(os.path.join(crashdir2, 'crash*')) + fl = glob(os.path.join(crashdir2, "crash*")) assert len(fl) == 1 - fl = glob(os.path.join(crashdir3, 'crash*')) + fl = glob(os.path.join(crashdir3, "crash*")) assert len(fl) == 0 diff --git a/nipype/pipeline/engine/workflows.py b/nipype/pipeline/engine/workflows.py index 195ebc6f69..d1fde0ba32 100644 --- a/nipype/pipeline/engine/workflows.py +++ b/nipype/pipeline/engine/workflows.py @@ -18,19 +18,26 @@ from ... import config, logging from ...utils.misc import str2bool -from ...utils.functions import (getsource, create_function_from_source) +from ...utils.functions import getsource, create_function_from_source -from ...interfaces.base import (traits, TraitedSpec, TraitDictObject, - TraitListObject) +from ...interfaces.base import traits, TraitedSpec, TraitDictObject, TraitListObject from ...utils.filemanip import save_json -from .utils import (generate_expanded_graph, export_graph, write_workflow_prov, - write_workflow_resources, format_dot, topological_sort, - get_print_name, merge_dict, format_node) +from .utils import ( + generate_expanded_graph, + export_graph, + write_workflow_prov, + write_workflow_resources, + format_dot, + topological_sort, + get_print_name, + merge_dict, + format_node, +) from .base import EngineBase from .nodes import MapNode -logger = logging.getLogger('nipype.workflow') +logger = logging.getLogger("nipype.workflow") class Workflow(EngineBase): @@ -48,6 +55,7 @@ def __init__(self, name, base_dir=None): """ import networkx as nx + super(Workflow, self).__init__(name, base_dir) self._graph = nx.DiGraph() @@ -120,12 +128,14 @@ def connect(self, *args, **kwargs): elif len(args) == 4: connection_list = [(args[0], args[2], [(args[1], args[3])])] else: - raise TypeError('connect() takes either 4 arguments, or 1 list of' - ' connection tuples (%d args given)' % len(args)) + raise TypeError( + "connect() takes either 4 arguments, or 1 list of" + " connection tuples (%d args given)" % len(args) + ) disconnect = False if kwargs: - disconnect = kwargs.get('disconnect', False) + disconnect = kwargs.get("disconnect", False) if disconnect: self.disconnect(connection_list) @@ -134,9 +144,10 @@ def connect(self, *args, **kwargs): newnodes = [] for srcnode, destnode, _ in connection_list: if self in [srcnode, destnode]: - msg = ('Workflow connect cannot contain itself as node:' - ' src[%s] dest[%s] workflow[%s]') % (srcnode, destnode, - self.name) + msg = ( + "Workflow connect cannot contain itself as node:" + " src[%s] dest[%s] workflow[%s]" + ) % (srcnode, destnode, self.name) raise IOError(msg) if (srcnode not in newnodes) and not self._has_node(srcnode): @@ -158,7 +169,7 @@ def connect(self, *args, **kwargs): if not disconnect and (destnode in self._graph.nodes()): for edge in self._graph.in_edges(destnode): data = self._graph.get_edge_data(*edge) - for sourceinfo, destname in data['connect']: + for sourceinfo, destname in data["connect"]: if destname not in connected_ports[destnode]: connected_ports[destnode] += [destname] for source, dest in connects: @@ -166,22 +177,39 @@ def connect(self, *args, **kwargs): # determine their inputs/outputs depending on # connection settings. Skip these modules in the check if dest in connected_ports[destnode]: - raise Exception("""\ + raise Exception( + """\ Trying to connect %s:%s to %s:%s but input '%s' of node '%s' is already connected. -""" % (srcnode, source, destnode, dest, dest, destnode)) - if not (hasattr(destnode, '_interface') and - ('.io' in str(destnode._interface.__class__) or any([ - '.io' in str(val) - for val in destnode._interface.__class__.__bases__ - ]))): +""" + % (srcnode, source, destnode, dest, dest, destnode) + ) + if not ( + hasattr(destnode, "_interface") + and ( + ".io" in str(destnode._interface.__class__) + or any( + [ + ".io" in str(val) + for val in destnode._interface.__class__.__bases__ + ] + ) + ) + ): if not destnode._check_inputs(dest): - not_found.append(['in', destnode.name, dest]) - if not (hasattr(srcnode, '_interface') and - ('.io' in str(srcnode._interface.__class__) or any([ - '.io' in str(val) - for val in srcnode._interface.__class__.__bases__ - ]))): + not_found.append(["in", destnode.name, dest]) + if not ( + hasattr(srcnode, "_interface") + and ( + ".io" in str(srcnode._interface.__class__) + or any( + [ + ".io" in str(val) + for val in srcnode._interface.__class__.__bases__ + ] + ) + ) + ): if isinstance(source, tuple): # handles the case that source is specified # with a function @@ -190,26 +218,27 @@ def connect(self, *args, **kwargs): sourcename = source else: raise Exception( - ('Unknown source specification in ' - 'connection from output of %s') % srcnode.name) + ( + "Unknown source specification in " + "connection from output of %s" + ) + % srcnode.name + ) if sourcename and not srcnode._check_outputs(sourcename): - not_found.append(['out', srcnode.name, sourcename]) + not_found.append(["out", srcnode.name, sourcename]) connected_ports[destnode] += [dest] infostr = [] for info in not_found: infostr += [ - "Module %s has no %sput called %s\n" % (info[1], info[0], - info[2]) + "Module %s has no %sput called %s\n" % (info[1], info[0], info[2]) ] if not_found: - raise Exception( - '\n'.join(['Some connections were not found'] + infostr)) + raise Exception("\n".join(["Some connections were not found"] + infostr)) # turn functions into strings for srcnode, destnode, connects in connection_list: for idx, (src, dest) in enumerate(connects): - if isinstance(src, - tuple) and not isinstance(src[1], (str, bytes)): + if isinstance(src, tuple) and not isinstance(src[1], (str, bytes)): function_source = getsource(src[1]) connects[idx] = ((src[0], function_source, src[2:]), dest) @@ -217,30 +246,28 @@ def connect(self, *args, **kwargs): for srcnode, destnode, connects in connection_list: edge_data = self._graph.get_edge_data(srcnode, destnode, None) if edge_data: - logger.debug('(%s, %s): Edge data exists: %s', srcnode, - destnode, str(edge_data)) + logger.debug( + "(%s, %s): Edge data exists: %s", srcnode, destnode, str(edge_data) + ) for data in connects: - if data not in edge_data['connect']: - edge_data['connect'].append(data) + if data not in edge_data["connect"]: + edge_data["connect"].append(data) if disconnect: - logger.debug('Removing connection: %s', str(data)) - edge_data['connect'].remove(data) - if edge_data['connect']: - self._graph.add_edges_from([(srcnode, destnode, - edge_data)]) + logger.debug("Removing connection: %s", str(data)) + edge_data["connect"].remove(data) + if edge_data["connect"]: + self._graph.add_edges_from([(srcnode, destnode, edge_data)]) else: # pass - logger.debug('Removing connection: %s->%s', srcnode, - destnode) + logger.debug("Removing connection: %s->%s", srcnode, destnode) self._graph.remove_edges_from([(srcnode, destnode)]) elif not disconnect: - logger.debug('(%s, %s): No edge data', srcnode, destnode) - self._graph.add_edges_from([(srcnode, destnode, { - 'connect': connects - })]) + logger.debug("(%s, %s): No edge data", srcnode, destnode) + self._graph.add_edges_from([(srcnode, destnode, {"connect": connects})]) edge_data = self._graph.get_edge_data(srcnode, destnode) - logger.debug('(%s, %s): new edge data: %s', srcnode, destnode, - str(edge_data)) + logger.debug( + "(%s, %s): new edge data: %s", srcnode, destnode, str(edge_data) + ) def disconnect(self, *args): """Disconnect nodes @@ -251,25 +278,25 @@ def disconnect(self, *args): elif len(args) == 4: connection_list = [(args[0], args[2], [(args[1], args[3])])] else: - raise TypeError('disconnect() takes either 4 arguments, or 1 list ' - 'of connection tuples (%d args given)' % len(args)) + raise TypeError( + "disconnect() takes either 4 arguments, or 1 list " + "of connection tuples (%d args given)" % len(args) + ) for srcnode, dstnode, conn in connection_list: - logger.debug('disconnect(): %s->%s %s', srcnode, dstnode, - str(conn)) + logger.debug("disconnect(): %s->%s %s", srcnode, dstnode, str(conn)) if self in [srcnode, dstnode]: raise IOError( - 'Workflow connect cannot contain itself as node: src[%s] ' - 'dest[%s] workflow[%s]') % (srcnode, dstnode, self.name) + "Workflow connect cannot contain itself as node: src[%s] " + "dest[%s] workflow[%s]" + ) % (srcnode, dstnode, self.name) # If node is not in the graph, not connected if not self._has_node(srcnode) or not self._has_node(dstnode): continue - edge_data = self._graph.get_edge_data(srcnode, dstnode, { - 'connect': [] - }) - ed_conns = [(c[0], c[1]) for c in edge_data['connect']] + edge_data = self._graph.get_edge_data(srcnode, dstnode, {"connect": []}) + ed_conns = [(c[0], c[1]) for c in edge_data["connect"]] remove = [] for edge in conn: @@ -277,12 +304,12 @@ def disconnect(self, *args): # idx = ed_conns.index(edge) remove.append((edge[0], edge[1])) - logger.debug('disconnect(): remove list %s', str(remove)) + logger.debug("disconnect(): remove list %s", str(remove)) for el in remove: - edge_data['connect'].remove(el) - logger.debug('disconnect(): removed connection %s', str(el)) + edge_data["connect"].remove(el) + logger.debug("disconnect(): removed connection %s", str(el)) - if not edge_data['connect']: + if not edge_data["connect"]: self._graph.remove_edge(srcnode, dstnode) else: self._graph.add_edges_from([(srcnode, dstnode, edge_data)]) @@ -299,20 +326,21 @@ def add_nodes(self, nodes): all_nodes = self._get_all_nodes() for node in nodes: if self._has_node(node): - raise IOError('Node %s already exists in the workflow' % node) + raise IOError("Node %s already exists in the workflow" % node) if isinstance(node, Workflow): for subnode in node._get_all_nodes(): if subnode in all_nodes: - raise IOError(('Subnode %s of node %s already exists ' - 'in the workflow') % (subnode, node)) + raise IOError( + ("Subnode %s of node %s already exists " "in the workflow") + % (subnode, node) + ) newnodes.append(node) if not newnodes: - logger.debug('no new nodes to add') + logger.debug("no new nodes to add") return for node in newnodes: if not issubclass(node.__class__, EngineBase): - raise Exception('Node %s must be a subclass of EngineBase', - node) + raise Exception("Node %s must be a subclass of EngineBase", node) self._check_nodes(newnodes) for node in newnodes: if node._hierarchy is None: @@ -341,16 +369,15 @@ def outputs(self): def get_node(self, name): """Return an internal node by name """ - nodenames = name.split('.') + nodenames = name.split(".") nodename = nodenames[0] outnode = [ - node for node in self._graph.nodes() - if str(node).endswith('.' + nodename) + node for node in self._graph.nodes() if str(node).endswith("." + nodename) ] if outnode: outnode = outnode[0] if nodenames[1:] and issubclass(outnode.__class__, Workflow): - outnode = outnode.get_node('.'.join(nodenames[1:])) + outnode = outnode.get_node(".".join(nodenames[1:])) else: outnode = None return outnode @@ -359,22 +386,27 @@ def list_node_names(self): """List names of all nodes in a workflow """ import networkx as nx + outlist = [] for node in nx.topological_sort(self._graph): if isinstance(node, Workflow): - outlist.extend([ - '.'.join((node.name, nodename)) - for nodename in node.list_node_names() - ]) + outlist.extend( + [ + ".".join((node.name, nodename)) + for nodename in node.list_node_names() + ] + ) else: outlist.append(node.name) return sorted(outlist) - def write_graph(self, - dotfilename='graph.dot', - graph2use='hierarchical', - format="png", - simple_form=True): + def write_graph( + self, + dotfilename="graph.dot", + graph2use="hierarchical", + format="png", + simple_form=True, + ): """Generates a graphviz dot file and a png file Parameters @@ -398,12 +430,13 @@ def write_graph(self, False. """ - graphtypes = ['orig', 'flat', 'hierarchical', 'exec', 'colored'] + graphtypes = ["orig", "flat", "hierarchical", "exec", "colored"] if graph2use not in graphtypes: - raise ValueError('Unknown graph2use keyword. Must be one of: ' + - str(graphtypes)) + raise ValueError( + "Unknown graph2use keyword. Must be one of: " + str(graphtypes) + ) base_dir, dotfilename = op.split(dotfilename) - if base_dir == '': + if base_dir == "": if self.base_dir: base_dir = self.base_dir if self.name: @@ -411,56 +444,58 @@ def write_graph(self, else: base_dir = os.getcwd() os.makedirs(base_dir, exist_ok=True) - if graph2use in ['hierarchical', 'colored']: + if graph2use in ["hierarchical", "colored"]: if self.name[:1].isdigit(): # these graphs break if int - raise ValueError('{} graph failed, workflow name cannot begin ' - 'with a number'.format(graph2use)) + raise ValueError( + "{} graph failed, workflow name cannot begin " + "with a number".format(graph2use) + ) dotfilename = op.join(base_dir, dotfilename) self.write_hierarchical_dotfile( dotfilename=dotfilename, colored=graph2use == "colored", - simple_form=simple_form) + simple_form=simple_form, + ) outfname = format_dot(dotfilename, format=format) else: graph = self._graph - if graph2use in ['flat', 'exec']: + if graph2use in ["flat", "exec"]: graph = self._create_flat_graph() - if graph2use == 'exec': + if graph2use == "exec": graph = generate_expanded_graph(deepcopy(graph)) outfname = export_graph( graph, base_dir, dotfilename=dotfilename, format=format, - simple_form=simple_form) + simple_form=simple_form, + ) logger.info( - 'Generated workflow graph: %s (graph2use=%s, simple_form=%s).' % - (outfname, graph2use, simple_form)) + "Generated workflow graph: %s (graph2use=%s, simple_form=%s)." + % (outfname, graph2use, simple_form) + ) return outfname - def write_hierarchical_dotfile(self, - dotfilename=None, - colored=False, - simple_form=True): - dotlist = ['digraph %s{' % self.name] + def write_hierarchical_dotfile( + self, dotfilename=None, colored=False, simple_form=True + ): + dotlist = ["digraph %s{" % self.name] dotlist.append( - self._get_dot( - prefix=' ', colored=colored, simple_form=simple_form)) - dotlist.append('}') - dotstr = '\n'.join(dotlist) + self._get_dot(prefix=" ", colored=colored, simple_form=simple_form) + ) + dotlist.append("}") + dotstr = "\n".join(dotlist) if dotfilename: - fp = open(dotfilename, 'wt') + fp = open(dotfilename, "wt") fp.writelines(dotstr) fp.close() else: logger.info(dotstr) - def export(self, - filename=None, - prefix="output", - format="python", - include_config=False): + def export( + self, filename=None, prefix="output", format="python", include_config=False + ): """Export object into a different format Parameters @@ -476,41 +511,39 @@ def export(self, """ import networkx as nx + formats = ["python"] if format not in formats: - raise ValueError('format must be one of: %s' % '|'.join(formats)) + raise ValueError("format must be one of: %s" % "|".join(formats)) flatgraph = self._create_flat_graph() nodes = nx.topological_sort(flatgraph) all_lines = None - lines = ['# Workflow'] - importlines = [ - 'from nipype.pipeline.engine import Workflow, ' - 'Node, MapNode' - ] + lines = ["# Workflow"] + importlines = ["from nipype.pipeline.engine import Workflow, " "Node, MapNode"] functions = {} if format == "python": connect_template = '%s.connect(%%s, %%s, %%s, "%%s")' % self.name - connect_template2 = '%s.connect(%%s, "%%s", %%s, "%%s")' \ - % self.name + connect_template2 = '%s.connect(%%s, "%%s", %%s, "%%s")' % self.name wfdef = '%s = Workflow("%s")' % (self.name, self.name) lines.append(wfdef) if include_config: - lines.append('%s.config = %s' % (self.name, self.config)) + lines.append("%s.config = %s" % (self.name, self.config)) for idx, node in enumerate(nodes): - nodename = node.fullname.replace('.', '_') + nodename = node.fullname.replace(".", "_") # write nodes nodelines = format_node( - node, format='python', include_config=include_config) + node, format="python", include_config=include_config + ) for line in nodelines: - if line.startswith('from'): + if line.startswith("from"): if line not in importlines: importlines.append(line) else: lines.append(line) # write connections for u, _, d in flatgraph.in_edges(nbunch=node, data=True): - for cd in d['connect']: + for cd in d["connect"]: if isinstance(cd[0], tuple): args = list(cd[0]) if args[1] in functions: @@ -518,30 +551,39 @@ def export(self, else: func = create_function_from_source(args[1]) funcname = [ - name for name in func.__globals__ - if name != '__builtins__' + name + for name in func.__globals__ + if name != "__builtins__" ][0] functions[args[1]] = funcname args[1] = funcname args = tuple([arg for arg in args if arg]) - line_args = (u.fullname.replace('.', '_'), args, - nodename, cd[1]) + line_args = ( + u.fullname.replace(".", "_"), + args, + nodename, + cd[1], + ) line = connect_template % line_args line = line.replace("'%s'" % funcname, funcname) lines.append(line) else: - line_args = (u.fullname.replace('.', '_'), cd[0], - nodename, cd[1]) + line_args = ( + u.fullname.replace(".", "_"), + cd[0], + nodename, + cd[1], + ) lines.append(connect_template2 % line_args) - functionlines = ['# Functions'] + functionlines = ["# Functions"] for function in functions: functionlines.append(pickle.loads(function).rstrip()) all_lines = importlines + functionlines + lines if not filename: - filename = '%s%s.py' % (prefix, self.name) - with open(filename, 'wt') as fp: - fp.writelines('\n'.join(all_lines)) + filename = "%s%s.py" % (prefix, self.name) + with open(filename, "wt") as fp: + fp.writelines("\n".join(all_lines)) return all_lines def run(self, plugin=None, plugin_args=None, updatehash=False): @@ -557,26 +599,25 @@ def run(self, plugin=None, plugin_args=None, updatehash=False): constructor. see individual plugin doc strings for details. """ if plugin is None: - plugin = config.get('execution', 'plugin') + plugin = config.get("execution", "plugin") if not isinstance(plugin, (str, bytes)): runner = plugin - plugin = runner.__class__.__name__[:-len('Plugin')] + plugin = runner.__class__.__name__[: -len("Plugin")] plugin_args = runner.plugin_args else: - name = '.'.join(__name__.split('.')[:-2] + ['plugins']) + name = ".".join(__name__.split(".")[:-2] + ["plugins"]) try: __import__(name) except ImportError: - msg = 'Could not import plugin module: %s' % name + msg = "Could not import plugin module: %s" % name logger.error(msg) raise ImportError(msg) else: - plugin_mod = getattr(sys.modules[name], '%sPlugin' % plugin) + plugin_mod = getattr(sys.modules[name], "%sPlugin" % plugin) runner = plugin_mod(plugin_args=plugin_args) flatgraph = self._create_flat_graph() self.config = merge_dict(deepcopy(config._sections), self.config) - logger.info('Workflow %s settings: %s', self.name, - str(sorted(self.config))) + logger.info("Workflow %s settings: %s", self.name, str(sorted(self.config))) self._set_needed_outputs(flatgraph) execgraph = generate_expanded_graph(deepcopy(flatgraph)) for index, node in enumerate(execgraph.nodes()): @@ -586,21 +627,21 @@ def run(self, plugin=None, plugin_args=None, updatehash=False): if isinstance(node, MapNode): node.use_plugin = (plugin, plugin_args) self._configure_exec_nodes(execgraph) - if str2bool(self.config['execution']['create_report']): + if str2bool(self.config["execution"]["create_report"]): self._write_report_info(self.base_dir, self.name, execgraph) runner.run(execgraph, updatehash=updatehash, config=self.config) - datestr = datetime.utcnow().strftime('%Y%m%dT%H%M%S') - if str2bool(self.config['execution']['write_provenance']): - prov_base = op.join(self.base_dir, - 'workflow_provenance_%s' % datestr) - logger.info('Provenance file prefix: %s' % prov_base) - write_workflow_prov(execgraph, prov_base, format='all') + datestr = datetime.utcnow().strftime("%Y%m%dT%H%M%S") + if str2bool(self.config["execution"]["write_provenance"]): + prov_base = op.join(self.base_dir, "workflow_provenance_%s" % datestr) + logger.info("Provenance file prefix: %s" % prov_base) + write_workflow_prov(execgraph, prov_base, format="all") if config.resource_monitor: base_dir = self.base_dir or os.getcwd() write_workflow_resources( execgraph, - filename=op.join(base_dir, self.name, 'resource_monitor.json')) + filename=op.join(base_dir, self.name, "resource_monitor.json"), + ) return execgraph # PRIVATE API AND FUNCTIONS @@ -611,50 +652,58 @@ def _write_report_info(self, workingdir, name, graph): report_dir = op.join(workingdir, name) os.makedirs(report_dir, exist_ok=True) shutil.copyfile( - op.join(op.dirname(__file__), 'report_template.html'), - op.join(report_dir, 'index.html')) + op.join(op.dirname(__file__), "report_template.html"), + op.join(report_dir, "index.html"), + ) shutil.copyfile( - op.join(op.dirname(__file__), '..', '..', 'external', 'd3.js'), - op.join(report_dir, 'd3.js')) + op.join(op.dirname(__file__), "..", "..", "external", "d3.js"), + op.join(report_dir, "d3.js"), + ) nodes, groups = topological_sort(graph, depth_first=True) - graph_file = op.join(report_dir, 'graph1.json') - json_dict = {'nodes': [], 'links': [], 'groups': [], 'maxN': 0} + graph_file = op.join(report_dir, "graph1.json") + json_dict = {"nodes": [], "links": [], "groups": [], "maxN": 0} for i, node in enumerate(nodes): - report_file = "%s/_report/report.rst" % \ - node.output_dir().replace(report_dir, '') - result_file = "%s/result_%s.pklz" % \ - (node.output_dir().replace(report_dir, ''), - node.name) - json_dict['nodes'].append( + report_file = "%s/_report/report.rst" % node.output_dir().replace( + report_dir, "" + ) + result_file = "%s/result_%s.pklz" % ( + node.output_dir().replace(report_dir, ""), + node.name, + ) + json_dict["nodes"].append( dict( - name='%d_%s' % (i, node.name), + name="%d_%s" % (i, node.name), report=report_file, result=result_file, - group=groups[i])) + group=groups[i], + ) + ) maxN = 0 for gid in np.unique(groups): procs = [i for i, val in enumerate(groups) if val == gid] N = len(procs) if N > maxN: maxN = N - json_dict['groups'].append( - dict(procs=procs, total=N, name='Group_%05d' % gid)) - json_dict['maxN'] = maxN + json_dict["groups"].append( + dict(procs=procs, total=N, name="Group_%05d" % gid) + ) + json_dict["maxN"] = maxN for u, v in graph.in_edges(): - json_dict['links'].append( - dict(source=nodes.index(u), target=nodes.index(v), value=1)) + json_dict["links"].append( + dict(source=nodes.index(u), target=nodes.index(v), value=1) + ) save_json(graph_file, json_dict) - graph_file = op.join(report_dir, 'graph.json') + graph_file = op.join(report_dir, "graph.json") # Avoid RuntimeWarning: divide by zero encountered in log10 num_nodes = len(nodes) if num_nodes > 0: index_name = np.ceil(np.log10(num_nodes)).astype(int) else: index_name = 0 - template = '%%0%dd_' % index_name + template = "%%0%dd_" % index_name def getname(u, i): - name_parts = u.fullname.split('.') + name_parts = u.fullname.split(".") # return '.'.join(name_parts[:-1] + [template % i + name_parts[-1]]) return template % i + name_parts[-1] @@ -664,16 +713,13 @@ def getname(u, i): for u, v in graph.in_edges(nbunch=node): imports.append(getname(u, nodes.index(u))) json_dict.append( - dict( - name=getname(node, i), - size=1, - group=groups[i], - imports=imports)) + dict(name=getname(node, i), size=1, group=groups[i], imports=imports) + ) save_json(graph_file, json_dict) def _set_needed_outputs(self, graph): """Initialize node with list of which outputs are needed.""" - rm_outputs = self.config['execution']['remove_unnecessary_outputs'] + rm_outputs = self.config["execution"]["remove_unnecessary_outputs"] if not str2bool(rm_outputs): return for node in graph.nodes(): @@ -681,8 +727,7 @@ def _set_needed_outputs(self, graph): for edge in graph.out_edges(node): data = graph.get_edge_data(*edge) sourceinfo = [ - v1[0] if isinstance(v1, tuple) else v1 - for v1, v2 in data['connect'] + v1[0] if isinstance(v1, tuple) else v1 for v1, v2 in data["connect"] ] node.needed_outputs += [ v for v in sourceinfo if v not in node.needed_outputs @@ -697,11 +742,11 @@ def _configure_exec_nodes(self, graph): node.input_source = {} for edge in graph.in_edges(node): data = graph.get_edge_data(*edge) - for sourceinfo, field in data['connect']: - node.input_source[field] = \ - (op.join(edge[0].output_dir(), - 'result_%s.pklz' % edge[0].name), - sourceinfo) + for sourceinfo, field in data["connect"]: + node.input_source[field] = ( + op.join(edge[0].output_dir(), "result_%s.pklz" % edge[0].name), + sourceinfo, + ) def _check_nodes(self, nodes): """Checks if any of the nodes are already in the graph @@ -715,23 +760,21 @@ def _check_nodes(self, nodes): try: this_node_lineage = node_lineage[idx] except IndexError: - raise IOError( - 'Duplicate node name "%s" found.' % node.name) + raise IOError('Duplicate node name "%s" found.' % node.name) else: if this_node_lineage in [node._hierarchy, self.name]: - raise IOError( - 'Duplicate node name "%s" found.' % node.name) + raise IOError('Duplicate node name "%s" found.' % node.name) else: node_names.append(node.name) - def _has_attr(self, parameter, subtype='in'): + def _has_attr(self, parameter, subtype="in"): """Checks if a parameter is available as an input or output """ - if subtype == 'in': + if subtype == "in": subobject = self.inputs else: subobject = self.outputs - attrlist = parameter.split('.') + attrlist = parameter.split(".") cur_out = subobject for attr in attrlist: if not hasattr(cur_out, attr): @@ -739,25 +782,25 @@ def _has_attr(self, parameter, subtype='in'): cur_out = getattr(cur_out, attr) return True - def _get_parameter_node(self, parameter, subtype='in'): + def _get_parameter_node(self, parameter, subtype="in"): """Returns the underlying node corresponding to an input or output parameter """ - if subtype == 'in': + if subtype == "in": subobject = self.inputs else: subobject = self.outputs - attrlist = parameter.split('.') + attrlist = parameter.split(".") cur_out = subobject for attr in attrlist[:-1]: cur_out = getattr(cur_out, attr) return cur_out.traits()[attrlist[-1]].node def _check_outputs(self, parameter): - return self._has_attr(parameter, subtype='out') + return self._has_attr(parameter, subtype="out") def _check_inputs(self, parameter): - return self._has_attr(parameter, subtype='in') + return self._has_attr(parameter, subtype="in") def _get_inputs(self): """Returns the inputs of a workflow @@ -773,14 +816,12 @@ def _get_inputs(self): else: taken_inputs = [] for _, _, d in self._graph.in_edges(nbunch=node, data=True): - for cd in d['connect']: + for cd in d["connect"]: taken_inputs.append(cd[1]) unconnectedinputs = TraitedSpec() for key, trait in list(node.inputs.items()): if key not in taken_inputs: - unconnectedinputs.add_trait(key, - traits.Trait( - trait, node=node)) + unconnectedinputs.add_trait(key, traits.Trait(trait, node=node)) value = getattr(node.inputs, key) setattr(unconnectedinputs, key, value) setattr(inputdict, node.name, unconnectedinputs) @@ -814,14 +855,13 @@ def _set_node_input(self, node, param, source, sourceinfo): val = source.get_output(sourceinfo) elif isinstance(sourceinfo, tuple): if callable(sourceinfo[1]): - val = sourceinfo[1](source.get_output(sourceinfo[0]), - *sourceinfo[2:]) + val = sourceinfo[1](source.get_output(sourceinfo[0]), *sourceinfo[2:]) newval = val if isinstance(val, TraitDictObject): newval = dict(val) if isinstance(val, TraitListObject): newval = val[:] - logger.debug('setting node input: %s->%s', param, str(newval)) + logger.debug("setting node input: %s->%s", param, str(newval)) node.set_input(param, deepcopy(newval)) def _get_all_nodes(self): @@ -844,7 +884,7 @@ def _has_node(self, wanted_node): def _create_flat_graph(self): """Make a simple DAG where no node is a workflow.""" - logger.debug('Creating flat graph for workflow: %s', self.name) + logger.debug("Creating flat graph for workflow: %s", self.name) workflowcopy = deepcopy(self) workflowcopy._generate_flatgraph() return workflowcopy._graph @@ -856,8 +896,7 @@ def _reset_hierarchy(self): if isinstance(node, Workflow): node._reset_hierarchy() for innernode in node._graph.nodes(): - innernode._hierarchy = '.'.join((self.name, - innernode._hierarchy)) + innernode._hierarchy = ".".join((self.name, innernode._hierarchy)) else: node._hierarchy = self.name @@ -865,179 +904,184 @@ def _generate_flatgraph(self): """Generate a graph containing only Nodes or MapNodes """ import networkx as nx - logger.debug('expanding workflow: %s', self) + + logger.debug("expanding workflow: %s", self) nodes2remove = [] if not nx.is_directed_acyclic_graph(self._graph): - raise Exception(('Workflow: %s is not a directed acyclic graph ' - '(DAG)') % self.name) + raise Exception( + ("Workflow: %s is not a directed acyclic graph " "(DAG)") % self.name + ) nodes = list(nx.topological_sort(self._graph)) for node in nodes: - logger.debug('processing node: %s', node) + logger.debug("processing node: %s", node) if isinstance(node, Workflow): nodes2remove.append(node) # use in_edges instead of in_edges_iter to allow # disconnections to take place properly. otherwise, the # edge dict is modified. # dj: added list() for networkx ver.2 - for u, _, d in list( - self._graph.in_edges(nbunch=node, data=True)): - logger.debug('in: connections-> %s', str(d['connect'])) - for cd in deepcopy(d['connect']): + for u, _, d in list(self._graph.in_edges(nbunch=node, data=True)): + logger.debug("in: connections-> %s", str(d["connect"])) + for cd in deepcopy(d["connect"]): logger.debug("in: %s", str(cd)) - dstnode = node._get_parameter_node(cd[1], subtype='in') + dstnode = node._get_parameter_node(cd[1], subtype="in") srcnode = u srcout = cd[0] - dstin = cd[1].split('.')[-1] - logger.debug('in edges: %s %s %s %s', srcnode, srcout, - dstnode, dstin) + dstin = cd[1].split(".")[-1] + logger.debug( + "in edges: %s %s %s %s", srcnode, srcout, dstnode, dstin + ) self.disconnect(u, cd[0], node, cd[1]) self.connect(srcnode, srcout, dstnode, dstin) # do not use out_edges_iter for reasons stated in in_edges # dj: for ver 2 use list(out_edges) - for _, v, d in list( - self._graph.out_edges(nbunch=node, data=True)): - logger.debug('out: connections-> %s', str(d['connect'])) - for cd in deepcopy(d['connect']): + for _, v, d in list(self._graph.out_edges(nbunch=node, data=True)): + logger.debug("out: connections-> %s", str(d["connect"])) + for cd in deepcopy(d["connect"]): logger.debug("out: %s", str(cd)) dstnode = v if isinstance(cd[0], tuple): parameter = cd[0][0] else: parameter = cd[0] - srcnode = node._get_parameter_node( - parameter, subtype='out') + srcnode = node._get_parameter_node(parameter, subtype="out") if isinstance(cd[0], tuple): srcout = list(cd[0]) - srcout[0] = parameter.split('.')[-1] + srcout[0] = parameter.split(".")[-1] srcout = tuple(srcout) else: - srcout = parameter.split('.')[-1] + srcout = parameter.split(".")[-1] dstin = cd[1] - logger.debug('out edges: %s %s %s %s', srcnode, srcout, - dstnode, dstin) + logger.debug( + "out edges: %s %s %s %s", srcnode, srcout, dstnode, dstin + ) self.disconnect(node, cd[0], v, cd[1]) self.connect(srcnode, srcout, dstnode, dstin) # expand the workflow node # logger.debug('expanding workflow: %s', node) node._generate_flatgraph() for innernode in node._graph.nodes(): - innernode._hierarchy = '.'.join((self.name, - innernode._hierarchy)) + innernode._hierarchy = ".".join((self.name, innernode._hierarchy)) self._graph.add_nodes_from(node._graph.nodes()) self._graph.add_edges_from(node._graph.edges(data=True)) if nodes2remove: self._graph.remove_nodes_from(nodes2remove) - logger.debug('finished expanding workflow: %s', self) - - def _get_dot(self, - prefix=None, - hierarchy=None, - colored=False, - simple_form=True, - level=0): + logger.debug("finished expanding workflow: %s", self) + + def _get_dot( + self, prefix=None, hierarchy=None, colored=False, simple_form=True, level=0 + ): """Create a dot file with connection info """ import networkx as nx + if prefix is None: - prefix = ' ' + prefix = " " if hierarchy is None: hierarchy = [] colorset = [ - '#FFFFC8', # Y - '#0000FF', - '#B4B4FF', - '#E6E6FF', # B - '#FF0000', - '#FFB4B4', - '#FFE6E6', # R - '#00A300', - '#B4FFB4', - '#E6FFE6', # G - '#0000FF', - '#B4B4FF' + "#FFFFC8", # Y + "#0000FF", + "#B4B4FF", + "#E6E6FF", # B + "#FF0000", + "#FFB4B4", + "#FFE6E6", # R + "#00A300", + "#B4FFB4", + "#E6FFE6", # G + "#0000FF", + "#B4B4FF", ] # loop B if level > len(colorset) - 2: level = 3 # Loop back to blue dotlist = ['%slabel="%s";' % (prefix, self.name)] for node in nx.topological_sort(self._graph): - fullname = '.'.join(hierarchy + [node.fullname]) - nodename = fullname.replace('.', '_') + fullname = ".".join(hierarchy + [node.fullname]) + nodename = fullname.replace(".", "_") if not isinstance(node, Workflow): node_class_name = get_print_name(node, simple_form=simple_form) if not simple_form: - node_class_name = '.'.join(node_class_name.split('.')[1:]) - if hasattr(node, 'iterables') and node.iterables: - dotlist.append(('%s[label="%s", shape=box3d,' - 'style=filled, color=black, colorscheme' - '=greys7 fillcolor=2];') % - (nodename, node_class_name)) + node_class_name = ".".join(node_class_name.split(".")[1:]) + if hasattr(node, "iterables") and node.iterables: + dotlist.append( + ( + '%s[label="%s", shape=box3d,' + "style=filled, color=black, colorscheme" + "=greys7 fillcolor=2];" + ) + % (nodename, node_class_name) + ) else: if colored: dotlist.append( - ('%s[label="%s", style=filled,' - ' fillcolor="%s"];') % (nodename, node_class_name, - colorset[level])) + ('%s[label="%s", style=filled,' ' fillcolor="%s"];') + % (nodename, node_class_name, colorset[level]) + ) else: - dotlist.append(('%s[label="%s"];') % (nodename, - node_class_name)) + dotlist.append( + ('%s[label="%s"];') % (nodename, node_class_name) + ) for node in nx.topological_sort(self._graph): if isinstance(node, Workflow): - fullname = '.'.join(hierarchy + [node.fullname]) - nodename = fullname.replace('.', '_') - dotlist.append('subgraph cluster_%s {' % nodename) + fullname = ".".join(hierarchy + [node.fullname]) + nodename = fullname.replace(".", "_") + dotlist.append("subgraph cluster_%s {" % nodename) if colored: - dotlist.append(prefix + prefix + 'edge [color="%s"];' % - (colorset[level + 1])) - dotlist.append(prefix + prefix + 'style=filled;') - dotlist.append(prefix + prefix + 'fillcolor="%s";' % - (colorset[level + 2])) + dotlist.append( + prefix + prefix + 'edge [color="%s"];' % (colorset[level + 1]) + ) + dotlist.append(prefix + prefix + "style=filled;") + dotlist.append( + prefix + prefix + 'fillcolor="%s";' % (colorset[level + 2]) + ) dotlist.append( node._get_dot( prefix=prefix + prefix, hierarchy=hierarchy + [self.name], colored=colored, simple_form=simple_form, - level=level + 3)) - dotlist.append('}') + level=level + 3, + ) + ) + dotlist.append("}") else: for subnode in self._graph.successors(node): if node._hierarchy != subnode._hierarchy: continue if not isinstance(subnode, Workflow): - nodefullname = '.'.join(hierarchy + [node.fullname]) - subnodefullname = '.'.join( - hierarchy + [subnode.fullname]) - nodename = nodefullname.replace('.', '_') - subnodename = subnodefullname.replace('.', '_') - for _ in self._graph.get_edge_data(node, - subnode)['connect']: - dotlist.append('%s -> %s;' % (nodename, - subnodename)) - logger.debug('connection: %s', dotlist[-1]) + nodefullname = ".".join(hierarchy + [node.fullname]) + subnodefullname = ".".join(hierarchy + [subnode.fullname]) + nodename = nodefullname.replace(".", "_") + subnodename = subnodefullname.replace(".", "_") + for _ in self._graph.get_edge_data(node, subnode)["connect"]: + dotlist.append("%s -> %s;" % (nodename, subnodename)) + logger.debug("connection: %s", dotlist[-1]) # add between workflow connections for u, v, d in self._graph.edges(data=True): - uname = '.'.join(hierarchy + [u.fullname]) - vname = '.'.join(hierarchy + [v.fullname]) - for src, dest in d['connect']: + uname = ".".join(hierarchy + [u.fullname]) + vname = ".".join(hierarchy + [v.fullname]) + for src, dest in d["connect"]: uname1 = uname vname1 = vname if isinstance(src, tuple): srcname = src[0] else: srcname = src - if '.' in srcname: - uname1 += '.' + '.'.join(srcname.split('.')[:-1]) - if '.' in dest and '@' not in dest: + if "." in srcname: + uname1 += "." + ".".join(srcname.split(".")[:-1]) + if "." in dest and "@" not in dest: if not isinstance(v, Workflow): - if 'datasink' not in \ - str(v._interface.__class__).lower(): - vname1 += '.' + '.'.join(dest.split('.')[:-1]) + if "datasink" not in str(v._interface.__class__).lower(): + vname1 += "." + ".".join(dest.split(".")[:-1]) else: - vname1 += '.' + '.'.join(dest.split('.')[:-1]) - if uname1.split('.')[:-1] != vname1.split('.')[:-1]: - dotlist.append('%s -> %s;' % (uname1.replace('.', '_'), - vname1.replace('.', '_'))) - logger.debug('cross connection: %s', dotlist[-1]) - return ('\n' + prefix).join(dotlist) + vname1 += "." + ".".join(dest.split(".")[:-1]) + if uname1.split(".")[:-1] != vname1.split(".")[:-1]: + dotlist.append( + "%s -> %s;" + % (uname1.replace(".", "_"), vname1.replace(".", "_")) + ) + logger.debug("cross connection: %s", dotlist[-1]) + return ("\n" + prefix).join(dotlist) diff --git a/nipype/pipeline/plugins/base.py b/nipype/pipeline/plugins/base.py index 23c8faa5b9..f7fcb6dab1 100644 --- a/nipype/pipeline/plugins/base.py +++ b/nipype/pipeline/plugins/base.py @@ -19,7 +19,7 @@ from ..engine import MapNode from .tools import report_crash, report_nodes_not_run, create_pyscript -logger = logging.getLogger('nipype.workflow') +logger = logging.getLogger("nipype.workflow") class PluginBase(object): @@ -33,7 +33,7 @@ def __init__(self, plugin_args=None): plugin_args = {} self.plugin_args = plugin_args self._config = None - self._status_callback = plugin_args.get('status_callback') + self._status_callback = plugin_args.get("status_callback") def run(self, graph, config, updatehash=False): """ @@ -96,7 +96,7 @@ def __init__(self, plugin_args=None): self.proc_done = None self.proc_pending = None self.pending_tasks = [] - self.max_jobs = self.plugin_args.get('max_jobs', np.inf) + self.max_jobs = self.plugin_args.get("max_jobs", np.inf) def _prerun_check(self, graph): """Stub method to validate/massage graph and nodes before running""" @@ -110,7 +110,7 @@ def run(self, graph, config, updatehash=False): """ logger.info("Running in parallel.") self._config = config - poll_sleep_secs = float(config['execution']['poll_sleep_duration']) + poll_sleep_secs = float(config["execution"]["poll_sleep_duration"]) self._prerun_check(graph) # Generate appropriate structures for worker-manager model @@ -126,19 +126,24 @@ def run(self, graph, config, updatehash=False): loop_start = time() # Check if a job is available (jobs with all dependencies run) # https://github.com/nipy/nipype/pull/2200#discussion_r141605722 - jobs_ready = np.nonzero(~self.proc_done & - (self.depidx.sum(0) == 0))[1] - - progress_stats = (len(self.proc_done), - np.sum(self.proc_done ^ self.proc_pending), - np.sum(self.proc_done & self.proc_pending), - len(jobs_ready), len(self.pending_tasks), - np.sum(~self.proc_done & ~self.proc_pending)) + jobs_ready = np.nonzero(~self.proc_done & (self.depidx.sum(0) == 0))[1] + + progress_stats = ( + len(self.proc_done), + np.sum(self.proc_done ^ self.proc_pending), + np.sum(self.proc_done & self.proc_pending), + len(jobs_ready), + len(self.pending_tasks), + np.sum(~self.proc_done & ~self.proc_pending), + ) display_stats = progress_stats != old_progress_stats if display_stats: - logger.debug('Progress: %d jobs, %d/%d/%d ' - '(done/running/ready), %d/%d ' - '(pending_tasks/waiting).', *progress_stats) + logger.debug( + "Progress: %d jobs, %d/%d/%d " + "(done/running/ready), %d/%d " + "(pending_tasks/waiting).", + *progress_stats + ) old_progress_stats = progress_stats toappend = [] # trigger callbacks for any pending results @@ -147,37 +152,34 @@ def run(self, graph, config, updatehash=False): try: result = self._get_result(taskid) except Exception: - notrun.append( - self._clean_queue(jobid, graph)) + notrun.append(self._clean_queue(jobid, graph)) else: if result: - if result['traceback']: + if result["traceback"]: notrun.append( - self._clean_queue(jobid, graph, result=result)) + self._clean_queue(jobid, graph, result=result) + ) else: self._task_finished_cb(jobid) self._remove_node_dirs() self._clear_task(taskid) else: - assert self.proc_done[jobid] and \ - self.proc_pending[jobid] + assert self.proc_done[jobid] and self.proc_pending[jobid] toappend.insert(0, (taskid, jobid)) if toappend: self.pending_tasks.extend(toappend) num_jobs = len(self.pending_tasks) - presub_stats = (num_jobs, - np.sum(self.proc_done & self.proc_pending)) + presub_stats = (num_jobs, np.sum(self.proc_done & self.proc_pending)) display_stats = display_stats or presub_stats != old_presub_stats if display_stats: - logger.debug('Tasks currently running: %d. Pending: %d.', - *presub_stats) + logger.debug("Tasks currently running: %d. Pending: %d.", *presub_stats) old_presub_stats = presub_stats if num_jobs < self.max_jobs: self._send_procs_to_workers(updatehash=updatehash, graph=graph) elif display_stats: - logger.debug('Not submitting (max jobs reached)') + logger.debug("Not submitting (max jobs reached)") sleep_til = loop_start + poll_sleep_secs sleep(max(0, sleep_til - time())) @@ -197,8 +199,8 @@ def _submit_job(self, node, updatehash=False): def _report_crash(self, node, result=None): tb = None if result is not None: - node._result = result['result'] - tb = result['traceback'] + node._result = result["result"] + tb = result["traceback"] node._traceback = tb return report_crash(node, traceback=tb) @@ -206,17 +208,19 @@ def _clear_task(self, taskid): raise NotImplementedError def _clean_queue(self, jobid, graph, result=None): - logger.debug('Clearing %d from queue', jobid) + logger.debug("Clearing %d from queue", jobid) if self._status_callback: - self._status_callback(self.procs[jobid], 'exception') + self._status_callback(self.procs[jobid], "exception") if result is None: - result = {'result': None, - 'traceback': '\n'.join(format_exception(*sys.exc_info()))} + result = { + "result": None, + "traceback": "\n".join(format_exception(*sys.exc_info())), + } crashfile = self._report_crash(self.procs[jobid], result=result) - if str2bool(self._config['execution']['stop_on_first_crash']): - raise RuntimeError("".join(result['traceback'])) + if str2bool(self._config["execution"]["stop_on_first_crash"]): + raise RuntimeError("".join(result["traceback"])) if jobid in self.mapnodesubids: # remove current jobid self.proc_pending[jobid] = False @@ -230,29 +234,31 @@ def _clean_queue(self, jobid, graph, result=None): def _submit_mapnode(self, jobid): import scipy.sparse as ssp + if jobid in self.mapnodes: return True self.mapnodes.append(jobid) mapnodesubids = self.procs[jobid].get_subnodes() numnodes = len(mapnodesubids) - logger.debug('Adding %d jobs for mapnode %s', numnodes, - self.procs[jobid]) + logger.debug("Adding %d jobs for mapnode %s", numnodes, self.procs[jobid]) for i in range(numnodes): self.mapnodesubids[self.depidx.shape[0] + i] = jobid self.procs.extend(mapnodesubids) self.depidx = ssp.vstack( - (self.depidx, - ssp.lil_matrix(np.zeros( - (numnodes, self.depidx.shape[1])))), 'lil') + (self.depidx, ssp.lil_matrix(np.zeros((numnodes, self.depidx.shape[1])))), + "lil", + ) self.depidx = ssp.hstack( - (self.depidx, - ssp.lil_matrix(np.zeros( - (self.depidx.shape[0], numnodes)))), 'lil') + (self.depidx, ssp.lil_matrix(np.zeros((self.depidx.shape[0], numnodes)))), + "lil", + ) self.depidx[-numnodes:, jobid] = 1 - self.proc_done = np.concatenate((self.proc_done, - np.zeros(numnodes, dtype=bool))) - self.proc_pending = np.concatenate((self.proc_pending, - np.zeros(numnodes, dtype=bool))) + self.proc_done = np.concatenate( + (self.proc_done, np.zeros(numnodes, dtype=bool)) + ) + self.proc_pending = np.concatenate( + (self.proc_pending, np.zeros(numnodes, dtype=bool)) + ) return False def _send_procs_to_workers(self, updatehash=False, graph=None): @@ -266,7 +272,7 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): slots = None else: slots = max(0, self.max_jobs - num_jobs) - logger.debug('Slots available: %s', slots) + logger.debug("Slots available: %s", slots) if (num_jobs >= self.max_jobs) or (slots == 0): break @@ -276,8 +282,12 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): if len(jobids) > 0: # send all available jobs - logger.info('Pending[%d] Submitting[%d] jobs Slots[%s]', - num_jobs, len(jobids[:slots]), slots or 'inf') + logger.info( + "Pending[%d] Submitting[%d] jobs Slots[%s]", + num_jobs, + len(jobids[:slots]), + slots or "inf", + ) for jobid in jobids[:slots]: if isinstance(self.procs[jobid], MapNode): @@ -295,15 +305,15 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): self.proc_done[jobid] = True self.proc_pending[jobid] = True # Send job to task manager and add to pending tasks - logger.info('Submitting: %s ID: %d', - self.procs[jobid], jobid) + logger.info("Submitting: %s ID: %d", self.procs[jobid], jobid) if self._status_callback: - self._status_callback(self.procs[jobid], 'start') + self._status_callback(self.procs[jobid], "start") if not self._local_hash_check(jobid, graph): if self.procs[jobid].run_without_submitting: - logger.debug('Running node %s on master thread', - self.procs[jobid]) + logger.debug( + "Running node %s on master thread", self.procs[jobid] + ) try: self.procs[jobid].run() except Exception: @@ -312,55 +322,66 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): self._remove_node_dirs() else: tid = self._submit_job( - deepcopy(self.procs[jobid]), - updatehash=updatehash) + deepcopy(self.procs[jobid]), updatehash=updatehash + ) if tid is None: self.proc_done[jobid] = False self.proc_pending[jobid] = False else: self.pending_tasks.insert(0, (tid, jobid)) - logger.info('Finished submitting: %s ID: %d', - self.procs[jobid], jobid) + logger.info( + "Finished submitting: %s ID: %d", self.procs[jobid], jobid + ) else: break def _local_hash_check(self, jobid, graph): - if not str2bool( - self.procs[jobid].config['execution']['local_hash_check']): + if not str2bool(self.procs[jobid].config["execution"]["local_hash_check"]): return False try: cached, updated = self.procs[jobid].is_cached() except Exception: logger.warning( - 'Error while checking node hash, forcing re-run. ' - 'Although this error may not prevent the workflow from running, ' - 'it could indicate a major problem. Please report a new issue ' - 'at https://github.com/nipy/nipype/issues adding the following ' - 'information:\n\n\tNode: %s\n\tInterface: %s.%s\n\tTraceback:\n%s', + "Error while checking node hash, forcing re-run. " + "Although this error may not prevent the workflow from running, " + "it could indicate a major problem. Please report a new issue " + "at https://github.com/nipy/nipype/issues adding the following " + "information:\n\n\tNode: %s\n\tInterface: %s.%s\n\tTraceback:\n%s", self.procs[jobid], self.procs[jobid].interface.__module__, self.procs[jobid].interface.__class__.__name__, - '\n'.join(format_exception(*sys.exc_info())) + "\n".join(format_exception(*sys.exc_info())), ) return False - logger.debug('Checking hash "%s" locally: cached=%s, updated=%s.', - self.procs[jobid], cached, updated) + logger.debug( + 'Checking hash "%s" locally: cached=%s, updated=%s.', + self.procs[jobid], + cached, + updated, + ) overwrite = self.procs[jobid].overwrite always_run = self.procs[jobid].interface.always_run - if cached and updated and (overwrite is False or - overwrite is None and not always_run): - logger.debug('Skipping cached node %s with ID %s.', - self.procs[jobid], jobid) + if ( + cached + and updated + and (overwrite is False or overwrite is None and not always_run) + ): + logger.debug( + "Skipping cached node %s with ID %s.", self.procs[jobid], jobid + ) try: self._task_finished_cb(jobid, cached=True) self._remove_node_dirs() except Exception: - logger.debug('Error skipping cached node %s (%s).\n\n%s', - self.procs[jobid], jobid, - '\n'.join(format_exception(*sys.exc_info()))) + logger.debug( + "Error skipping cached node %s (%s).\n\n%s", + self.procs[jobid], + jobid, + "\n".join(format_exception(*sys.exc_info())), + ) self._clean_queue(jobid, graph) self.proc_pending[jobid] = False return True @@ -371,10 +392,14 @@ def _task_finished_cb(self, jobid, cached=False): This is called when a job is completed. """ - logger.info('[Job %d] %s (%s).', jobid, 'Cached' - if cached else 'Completed', self.procs[jobid]) + logger.info( + "[Job %d] %s (%s).", + jobid, + "Cached" if cached else "Completed", + self.procs[jobid], + ) if self._status_callback: - self._status_callback(self.procs[jobid], 'end') + self._status_callback(self.procs[jobid], "end") # Update job and worker queues self.proc_pending[jobid] = False # update the job dependency structure @@ -391,7 +416,8 @@ def _generate_dependency_list(self, graph): self.procs, _ = topological_sort(graph) try: self.depidx = nx.to_scipy_sparse_matrix( - graph, nodelist=self.procs, format='lil') + graph, nodelist=self.procs, format="lil" + ) except: self.depidx = nx.to_scipy_sparse_matrix(graph, nodelist=self.procs) self.refidx = deepcopy(self.depidx) @@ -401,6 +427,7 @@ def _generate_dependency_list(self, graph): def _remove_node_deps(self, jobid, crashfile, graph): import networkx as nx + try: dfs_preorder = nx.dfs_preorder except AttributeError: @@ -410,13 +437,12 @@ def _remove_node_deps(self, jobid, crashfile, graph): idx = self.procs.index(node) self.proc_done[idx] = True self.proc_pending[idx] = False - return dict( - node=self.procs[jobid], dependents=subnodes, crashfile=crashfile) + return dict(node=self.procs[jobid], dependents=subnodes, crashfile=crashfile) def _remove_node_dirs(self): """Removes directories whose outputs have already been used up """ - if str2bool(self._config['execution']['remove_node_directories']): + if str2bool(self._config["execution"]["remove_node_directories"]): indices = np.nonzero((self.refidx.sum(axis=1) == 0).__array__())[0] for idx in indices: if idx in self.mapnodesubids: @@ -424,9 +450,13 @@ def _remove_node_dirs(self): if self.proc_done[idx] and (not self.proc_pending[idx]): self.refidx[idx, idx] = -1 outdir = self.procs[idx].output_dir() - logger.info(('[node dependencies finished] ' - 'removing node: %s from directory %s') % - (self.procs[idx]._id, outdir)) + logger.info( + ( + "[node dependencies finished] " + "removing node: %s from directory %s" + ) + % (self.procs[idx]._id, outdir) + ) shutil.rmtree(outdir) @@ -439,13 +469,13 @@ def __init__(self, template, plugin_args=None): self._template = template self._qsub_args = None if plugin_args: - if 'template' in plugin_args: - self._template = plugin_args['template'] + if "template" in plugin_args: + self._template = plugin_args["template"] if os.path.isfile(self._template): with open(self._template) as tpl_file: self._template = tpl_file.read() - if 'qsub_args' in plugin_args: - self._qsub_args = plugin_args['qsub_args'] + if "qsub_args" in plugin_args: + self._qsub_args = plugin_args["qsub_args"] self._pending = {} def _is_pending(self, taskid): @@ -460,7 +490,7 @@ def _submit_batchtask(self, scriptfile, node): def _get_result(self, taskid): if taskid not in self._pending: - raise Exception('Task %d not found' % taskid) + raise Exception("Task %d not found" % taskid) if self._is_pending(taskid): return None node_dir = self._pending[taskid] @@ -470,46 +500,43 @@ def _get_result(self, taskid): # is a disconnect when the queueing engine knows a job is # finished to when the directories become statable. t = time() - timeout = float(self._config['execution']['job_finished_timeout']) + timeout = float(self._config["execution"]["job_finished_timeout"]) timed_out = True while (time() - t) < timeout: try: - glob(os.path.join(node_dir, 'result_*.pklz')).pop() + glob(os.path.join(node_dir, "result_*.pklz")).pop() timed_out = False break except Exception as e: logger.debug(e) sleep(2) if timed_out: - result_data = { - 'hostname': 'unknown', - 'result': None, - 'traceback': None - } + result_data = {"hostname": "unknown", "result": None, "traceback": None} results_file = None try: - error_message = ('Job id ({0}) finished or terminated, but ' - 'results file does not exist after ({1}) ' - 'seconds. Batch dir contains crashdump file ' - 'if node raised an exception.\n' - 'Node working directory: ({2}) '.format( - taskid, timeout, node_dir)) + error_message = ( + "Job id ({0}) finished or terminated, but " + "results file does not exist after ({1}) " + "seconds. Batch dir contains crashdump file " + "if node raised an exception.\n" + "Node working directory: ({2}) ".format(taskid, timeout, node_dir) + ) raise IOError(error_message) except IOError as e: - result_data['traceback'] = '\n'.join(format_exception(*sys.exc_info())) + result_data["traceback"] = "\n".join(format_exception(*sys.exc_info())) else: - results_file = glob(os.path.join(node_dir, 'result_*.pklz'))[0] + results_file = glob(os.path.join(node_dir, "result_*.pklz"))[0] result_data = load_resultfile(results_file) result_out = dict(result=None, traceback=None) if isinstance(result_data, dict): - result_out['result'] = result_data['result'] - result_out['traceback'] = result_data['traceback'] - result_out['hostname'] = result_data['hostname'] + result_out["result"] = result_data["result"] + result_out["traceback"] = result_data["traceback"] + result_out["hostname"] = result_data["hostname"] if results_file: - crash_file = os.path.join(node_dir, 'crashstore.pklz') + crash_file = os.path.join(node_dir, "crashstore.pklz") os.rename(results_file, crash_file) else: - result_out['result'] = result_data + result_out["result"] = result_data return result_out def _submit_job(self, node, updatehash=False): @@ -517,11 +544,10 @@ def _submit_job(self, node, updatehash=False): """ pyscript = create_pyscript(node, updatehash=updatehash) batch_dir, name = os.path.split(pyscript) - name = '.'.join(name.split('.')[:-1]) - batchscript = '\n'.join((self._template, '%s %s' % (sys.executable, - pyscript))) - batchscriptfile = os.path.join(batch_dir, 'batchscript_%s.sh' % name) - with open(batchscriptfile, 'wt') as fp: + name = ".".join(name.split(".")[:-1]) + batchscript = "\n".join((self._template, "%s %s" % (sys.executable, pyscript))) + batchscriptfile = os.path.join(batch_dir, "batchscript_%s.sh" % name) + with open(batchscriptfile, "wt") as fp: fp.writelines(batchscript) return self._submit_batchtask(batchscriptfile, node) @@ -534,25 +560,27 @@ class GraphPluginBase(PluginBase): """ def __init__(self, plugin_args=None): - if plugin_args and plugin_args.get('status_callback'): - logger.warning('status_callback not supported for Graph submission' - ' plugins') + if plugin_args and plugin_args.get("status_callback"): + logger.warning( + "status_callback not supported for Graph submission" " plugins" + ) super(GraphPluginBase, self).__init__(plugin_args=plugin_args) def run(self, graph, config, updatehash=False): import networkx as nx + pyfiles = [] dependencies = {} self._config = config nodes = list(nx.topological_sort(graph)) - logger.debug('Creating executable python files for each node') + logger.debug("Creating executable python files for each node") for idx, node in enumerate(nodes): pyfiles.append( - create_pyscript( - node, updatehash=updatehash, store_exception=False)) + create_pyscript(node, updatehash=updatehash, store_exception=False) + ) dependencies[idx] = [ - nodes.index(prevnode) - for prevnode in list(graph.predecessors(node))] + nodes.index(prevnode) for prevnode in list(graph.predecessors(node)) + ] self._submit_graph(pyfiles, dependencies, nodes) def _get_args(self, node, keywords): @@ -562,22 +590,22 @@ def _get_args(self, node, keywords): if keyword == "template" and os.path.isfile(value): with open(value) as f: value = f.read() - if (hasattr(node, "plugin_args") - and isinstance(node.plugin_args, dict) - and keyword in node.plugin_args): - if (keyword == "template" - and os.path.isfile(node.plugin_args[keyword])): + if ( + hasattr(node, "plugin_args") + and isinstance(node.plugin_args, dict) + and keyword in node.plugin_args + ): + if keyword == "template" and os.path.isfile(node.plugin_args[keyword]): with open(node.plugin_args[keyword]) as f: tmp_value = f.read() else: tmp_value = node.plugin_args[keyword] - if ('overwrite' in node.plugin_args - and node.plugin_args['overwrite']): + if "overwrite" in node.plugin_args and node.plugin_args["overwrite"]: value = tmp_value else: value += tmp_value - values += (value, ) + values += (value,) return values def _submit_graph(self, pyfiles, dependencies, nodes): @@ -589,25 +617,25 @@ def _submit_graph(self, pyfiles, dependencies, nodes): def _get_result(self, taskid): if taskid not in self._pending: - raise Exception('Task %d not found' % taskid) + raise Exception("Task %d not found" % taskid) if self._is_pending(taskid): return None node_dir = self._pending[taskid] - glob(os.path.join(node_dir, 'result_*.pklz')).pop() + glob(os.path.join(node_dir, "result_*.pklz")).pop() - results_file = glob(os.path.join(node_dir, 'result_*.pklz'))[0] + results_file = glob(os.path.join(node_dir, "result_*.pklz"))[0] result_data = load_resultfile(results_file) result_out = dict(result=None, traceback=None) if isinstance(result_data, dict): - result_out['result'] = result_data['result'] - result_out['traceback'] = result_data['traceback'] - result_out['hostname'] = result_data['hostname'] + result_out["result"] = result_data["result"] + result_out["traceback"] = result_data["traceback"] + result_out["hostname"] = result_data["hostname"] if results_file: - crash_file = os.path.join(node_dir, 'crashstore.pklz') + crash_file = os.path.join(node_dir, "crashstore.pklz") os.rename(results_file, crash_file) else: - result_out['result'] = result_data + result_out["result"] = result_data return result_out diff --git a/nipype/pipeline/plugins/condor.py b/nipype/pipeline/plugins/condor.py index bdf598c5f6..cd0ad985e2 100644 --- a/nipype/pipeline/plugins/condor.py +++ b/nipype/pipeline/plugins/condor.py @@ -8,7 +8,8 @@ from ...interfaces.base import CommandLine from ... import logging from .base import SGELikeBatchManagerBase, logger -iflogger = logging.getLogger('nipype.interface') + +iflogger = logging.getLogger("nipype.interface") class CondorPlugin(SGELikeBatchManagerBase): @@ -38,59 +39,59 @@ def __init__(self, **kwargs): """ self._retry_timeout = 2 self._max_tries = 2 - if 'plugin_args' in kwargs and kwargs['plugin_args']: - if 'retry_timeout' in kwargs['plugin_args']: - self._retry_timeout = kwargs['plugin_args']['retry_timeout'] - if 'max_tries' in kwargs['plugin_args']: - self._max_tries = kwargs['plugin_args']['max_tries'] + if "plugin_args" in kwargs and kwargs["plugin_args"]: + if "retry_timeout" in kwargs["plugin_args"]: + self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] + if "max_tries" in kwargs["plugin_args"]: + self._max_tries = kwargs["plugin_args"]["max_tries"] super(CondorPlugin, self).__init__(template, **kwargs) def _is_pending(self, taskid): cmd = CommandLine( - 'condor_q', resource_monitor=False, terminal_output='allatonce') - cmd.inputs.args = '%d' % taskid + "condor_q", resource_monitor=False, terminal_output="allatonce" + ) + cmd.inputs.args = "%d" % taskid # check condor cluster oldlevel = iflogger.level - iflogger.setLevel(logging.getLevelName('CRITICAL')) + iflogger.setLevel(logging.getLevelName("CRITICAL")) result = cmd.run(ignore_exception=True) iflogger.setLevel(oldlevel) - if result.runtime.stdout.count('\n%d' % taskid): + if result.runtime.stdout.count("\n%d" % taskid): return True return False def _submit_batchtask(self, scriptfile, node): cmd = CommandLine( - 'condor_qsub', + "condor_qsub", environ=dict(os.environ), resource_monitor=False, - terminal_output='allatonce') + terminal_output="allatonce", + ) path = os.path.dirname(scriptfile) - qsubargs = '' + qsubargs = "" if self._qsub_args: qsubargs = self._qsub_args - if 'qsub_args' in node.plugin_args: - if 'overwrite' in node.plugin_args and\ - node.plugin_args['overwrite']: - qsubargs = node.plugin_args['qsub_args'] + if "qsub_args" in node.plugin_args: + if "overwrite" in node.plugin_args and node.plugin_args["overwrite"]: + qsubargs = node.plugin_args["qsub_args"] else: - qsubargs += (" " + node.plugin_args['qsub_args']) + qsubargs += " " + node.plugin_args["qsub_args"] if self._qsub_args: qsubargs = self._qsub_args - if '-o' not in qsubargs: - qsubargs = '%s -o %s' % (qsubargs, path) - if '-e' not in qsubargs: - qsubargs = '%s -e %s' % (qsubargs, path) + if "-o" not in qsubargs: + qsubargs = "%s -o %s" % (qsubargs, path) + if "-e" not in qsubargs: + qsubargs = "%s -e %s" % (qsubargs, path) if node._hierarchy: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._hierarchy, - node._id)) + jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._id)) - jobnameitems = jobname.split('.') + jobname = ".".join((dict(os.environ)["LOGNAME"], node._id)) + jobnameitems = jobname.split(".") jobnameitems.reverse() - jobname = '.'.join(jobnameitems) - cmd.inputs.args = '%s -N %s %s' % (qsubargs, jobname, scriptfile) + jobname = ".".join(jobnameitems) + cmd.inputs.args = "%s -N %s %s" % (qsubargs, jobname, scriptfile) oldlevel = iflogger.level - iflogger.setLevel(logging.getLevelName('CRITICAL')) + iflogger.setLevel(logging.getLevelName("CRITICAL")) tries = 0 while True: try: @@ -101,16 +102,20 @@ def _submit_batchtask(self, scriptfile, node): sleep(self._retry_timeout) # sleep 2 seconds and try again else: iflogger.setLevel(oldlevel) - raise RuntimeError('\n'.join((('Could not submit condor ' - 'cluster' - ' for node %s') % node._id, - str(e)))) + raise RuntimeError( + "\n".join( + ( + ("Could not submit condor " "cluster" " for node %s") + % node._id, + str(e), + ) + ) + ) else: break iflogger.setLevel(oldlevel) # retrieve condor clusterid - taskid = int(result.runtime.stdout.split(' ')[2]) + taskid = int(result.runtime.stdout.split(" ")[2]) self._pending[taskid] = node.output_dir() - logger.debug('submitted condor cluster: %d for node %s' % (taskid, - node._id)) + logger.debug("submitted condor cluster: %d for node %s" % (taskid, node._id)) return taskid diff --git a/nipype/pipeline/plugins/dagman.py b/nipype/pipeline/plugins/dagman.py index 9db1f70fe5..98b07eeb10 100644 --- a/nipype/pipeline/plugins/dagman.py +++ b/nipype/pipeline/plugins/dagman.py @@ -65,35 +65,37 @@ def _get_str_or_file(self, arg): # actually have to run. would be good to be able to decide whether they # actually have to be scheduled (i.e. output already exist). def __init__(self, **kwargs): - for var, id_, val in \ - (('_template', 'submit_template', self.default_submit_template), - ('_initial_specs', 'template', ''), - ('_initial_specs', 'initial_specs', ''), - ('_override_specs', 'submit_specs', ''), - ('_override_specs', 'override_specs', ''), - ('_wrapper_cmd', 'wrapper_cmd', None), - ('_wrapper_args', 'wrapper_args', ''), - ('_block', 'block', False), - ('_dagman_args', 'dagman_args', '')): - if 'plugin_args' in kwargs \ - and not kwargs['plugin_args'] is None \ - and id_ in kwargs['plugin_args']: - if id_ == 'wrapper_cmd': - val = os.path.abspath(kwargs['plugin_args'][id_]) - elif id_ == 'block': - val = kwargs['plugin_args'][id_] + for var, id_, val in ( + ("_template", "submit_template", self.default_submit_template), + ("_initial_specs", "template", ""), + ("_initial_specs", "initial_specs", ""), + ("_override_specs", "submit_specs", ""), + ("_override_specs", "override_specs", ""), + ("_wrapper_cmd", "wrapper_cmd", None), + ("_wrapper_args", "wrapper_args", ""), + ("_block", "block", False), + ("_dagman_args", "dagman_args", ""), + ): + if ( + "plugin_args" in kwargs + and not kwargs["plugin_args"] is None + and id_ in kwargs["plugin_args"] + ): + if id_ == "wrapper_cmd": + val = os.path.abspath(kwargs["plugin_args"][id_]) + elif id_ == "block": + val = kwargs["plugin_args"][id_] else: - val = self._get_str_or_file(kwargs['plugin_args'][id_]) + val = self._get_str_or_file(kwargs["plugin_args"][id_]) setattr(self, var, val) # TODO remove after some time - if 'plugin_args' in kwargs \ - and not kwargs['plugin_args'] is None: - plugin_args = kwargs['plugin_args'] - if 'template' in plugin_args: + if "plugin_args" in kwargs and not kwargs["plugin_args"] is None: + plugin_args = kwargs["plugin_args"] + if "template" in plugin_args: warn( "the 'template' argument is deprecated, use 'initial_specs' instead" ) - if 'submit_specs' in plugin_args: + if "submit_specs" in plugin_args: warn( "the 'submit_specs' argument is deprecated, use 'override_specs' instead" ) @@ -103,73 +105,89 @@ def _submit_graph(self, pyfiles, dependencies, nodes): # location of all scripts, place dagman output in here too batch_dir, _ = os.path.split(pyfiles[0]) # DAG description filename - dagfilename = os.path.join(batch_dir, 'workflow-%s.dag' % uuid.uuid4()) - with open(dagfilename, 'wt') as dagfileptr: + dagfilename = os.path.join(batch_dir, "workflow-%s.dag" % uuid.uuid4()) + with open(dagfilename, "wt") as dagfileptr: # loop over all scripts, create submit files, and define them # as jobs in the DAG for idx, pyscript in enumerate(pyfiles): node = nodes[idx] # XXX redundant with previous value? or could it change between # scripts? - template, initial_specs, override_specs, wrapper_cmd, wrapper_args = \ - self._get_args(node, - ["template", "initial_specs", - "override_specs", "wrapper_cmd", - "wrapper_args"]) + ( + template, + initial_specs, + override_specs, + wrapper_cmd, + wrapper_args, + ) = self._get_args( + node, + [ + "template", + "initial_specs", + "override_specs", + "wrapper_cmd", + "wrapper_args", + ], + ) # add required slots to the template - template = '%s\n%s\n%s\nqueue\n' % ('%(initial_specs)s', - template, - '%(override_specs)s') + template = "%s\n%s\n%s\nqueue\n" % ( + "%(initial_specs)s", + template, + "%(override_specs)s", + ) batch_dir, name = os.path.split(pyscript) - name = '.'.join(name.split('.')[:-1]) + name = ".".join(name.split(".")[:-1]) specs = dict( # TODO make parameter for this, initial_specs=initial_specs, executable=sys.executable, nodescript=pyscript, basename=os.path.join(batch_dir, name), - override_specs=override_specs) + override_specs=override_specs, + ) if wrapper_cmd is not None: - specs['executable'] = wrapper_cmd - specs['nodescript'] = \ - '%s %s %s' % (wrapper_args % specs, # give access to variables - sys.executable, - pyscript) + specs["executable"] = wrapper_cmd + specs["nodescript"] = "%s %s %s" % ( + wrapper_args % specs, # give access to variables + sys.executable, + pyscript, + ) submitspec = template % specs # write submit spec for this job - submitfile = os.path.join(batch_dir, '%s.submit' % name) - with open(submitfile, 'wt') as submitfileprt: + submitfile = os.path.join(batch_dir, "%s.submit" % name) + with open(submitfile, "wt") as submitfileprt: submitfileprt.writelines(submitspec) submitfileprt.close() # define job in DAG - dagfileptr.write('JOB %i %s\n' % (idx, submitfile)) + dagfileptr.write("JOB %i %s\n" % (idx, submitfile)) # define dependencies in DAG for child in dependencies: parents = dependencies[child] if len(parents): - dagfileptr.write('PARENT %s CHILD %i\n' % - (' '.join([str(i) for i in parents]), - child)) + dagfileptr.write( + "PARENT %s CHILD %i\n" + % (" ".join([str(i) for i in parents]), child) + ) # hand over DAG to condor_dagman cmd = CommandLine( - 'condor_submit_dag', + "condor_submit_dag", environ=dict(os.environ), resource_monitor=False, - terminal_output='allatonce') + terminal_output="allatonce", + ) # needs -update_submit or re-running a workflow will fail - cmd.inputs.args = '%s -update_submit %s' % (self._dagman_args, - dagfilename) + cmd.inputs.args = "%s -update_submit %s" % (self._dagman_args, dagfilename) cmd.run() - logger.info('submitted all jobs to Condor DAGMan') + logger.info("submitted all jobs to Condor DAGMan") if self._block: # wait for DAGMan to settle down, no time wasted it is already running time.sleep(10) - if not os.path.exists('%s.condor.sub' % dagfilename): + if not os.path.exists("%s.condor.sub" % dagfilename): raise EnvironmentError( "DAGMan did not create its submit file, please check the logs" ) # wait for completion - logger.info('waiting for DAGMan to finish') - lockfilename = '%s.lock' % dagfilename + logger.info("waiting for DAGMan to finish") + lockfilename = "%s.lock" % dagfilename while os.path.exists(lockfilename): time.sleep(5) diff --git a/nipype/pipeline/plugins/debug.py b/nipype/pipeline/plugins/debug.py index 0b9b009c2f..16ea8f44ee 100644 --- a/nipype/pipeline/plugins/debug.py +++ b/nipype/pipeline/plugins/debug.py @@ -14,11 +14,14 @@ class DebugPlugin(PluginBase): def __init__(self, plugin_args=None): super(DebugPlugin, self).__init__(plugin_args=plugin_args) - if plugin_args and "callable" in plugin_args and \ - hasattr(plugin_args['callable'], '__call__'): - self._callable = plugin_args['callable'] + if ( + plugin_args + and "callable" in plugin_args + and hasattr(plugin_args["callable"], "__call__") + ): + self._callable = plugin_args["callable"] else: - raise ValueError('plugin_args must contain a callable function') + raise ValueError("plugin_args must contain a callable function") def run(self, graph, config, updatehash=False): """Executes a pre-defined pipeline in a serial order. @@ -31,7 +34,7 @@ def run(self, graph, config, updatehash=False): """ if not isinstance(graph, nx.DiGraph): - raise ValueError('Input must be a networkx digraph object') + raise ValueError("Input must be a networkx digraph object") logger.info("Executing debug plugin") for node in nx.topological_sort(graph): self._callable(node, graph) diff --git a/nipype/pipeline/plugins/ipython.py b/nipype/pipeline/plugins/ipython.py index eafeb904e3..8a786a16f1 100644 --- a/nipype/pipeline/plugins/ipython.py +++ b/nipype/pipeline/plugins/ipython.py @@ -6,7 +6,7 @@ from pickle import dumps import sys -from .base import (DistributedPluginBase, logger, report_crash) +from .base import DistributedPluginBase, logger, report_crash IPython_not_loaded = False try: @@ -20,19 +20,23 @@ def execute_task(pckld_task, node_config, updatehash): from socket import gethostname from traceback import format_exc from nipype import config, logging + traceback = None result = None import os + cwd = os.getcwd() try: config.update_config(node_config) logging.update_logging(config) from pickle import loads + task = loads(pckld_task) result = task.run(updatehash=updatehash) except: traceback = format_exc() from pickle import loads + task = loads(pckld_task) result = task.result os.chdir(cwd) @@ -45,14 +49,24 @@ class IPythonPlugin(DistributedPluginBase): def __init__(self, plugin_args=None): if IPython_not_loaded: - raise ImportError('Please install ipyparallel to use this plugin.') + raise ImportError("Please install ipyparallel to use this plugin.") super(IPythonPlugin, self).__init__(plugin_args=plugin_args) - valid_args = ('url_file', 'profile', 'cluster_id', 'context', 'debug', - 'timeout', 'config', 'username', 'sshserver', 'sshkey', - 'password', 'paramiko') + valid_args = ( + "url_file", + "profile", + "cluster_id", + "context", + "debug", + "timeout", + "config", + "username", + "sshserver", + "sshkey", + "password", + "paramiko", + ) self.client_args = { - arg: plugin_args[arg] - for arg in valid_args if arg in plugin_args + arg: plugin_args[arg] for arg in valid_args if arg in plugin_args } self.iparallel = None self.taskclient = None @@ -65,36 +79,35 @@ def run(self, graph, config, updatehash=False): """ # retrieve clients again try: - name = 'ipyparallel' + name = "ipyparallel" __import__(name) self.iparallel = sys.modules[name] except ImportError as e: - raise ImportError("ipyparallel not found. Parallel execution " - "will be unavailable") from e + raise ImportError( + "ipyparallel not found. Parallel execution " "will be unavailable" + ) from e try: self.taskclient = self.iparallel.Client(**self.client_args) except Exception as e: if isinstance(e, TimeoutError): raise Exception("No IPython clients found.") from e if isinstance(e, IOError): - raise Exception("ipcluster/ipcontroller has not been started") \ - from e + raise Exception("ipcluster/ipcontroller has not been started") from e if isinstance(e, ValueError): raise Exception("Ipython kernel not installed") from e else: raise e - return super(IPythonPlugin, self).run( - graph, config, updatehash=updatehash) + return super(IPythonPlugin, self).run(graph, config, updatehash=updatehash) def _get_result(self, taskid): if taskid not in self.taskmap: - raise ValueError('Task %d not in pending list' % taskid) + raise ValueError("Task %d not in pending list" % taskid) if self.taskmap[taskid].ready(): result, traceback, hostname = self.taskmap[taskid].get() result_out = dict(result=None, traceback=None) - result_out['result'] = result - result_out['traceback'] = traceback - result_out['hostname'] = hostname + result_out["result"] = result + result_out["traceback"] = traceback + result_out["hostname"] = hostname return result_out else: return None @@ -102,21 +115,22 @@ def _get_result(self, taskid): def _submit_job(self, node, updatehash=False): pckld_node = dumps(node, 2) result_object = self.taskclient.load_balanced_view().apply( - execute_task, pckld_node, node.config, updatehash) + execute_task, pckld_node, node.config, updatehash + ) self._taskid += 1 self.taskmap[self._taskid] = result_object return self._taskid def _report_crash(self, node, result=None): - if result and result['traceback']: - node._result = result['result'] - node._traceback = result['traceback'] - return report_crash(node, traceback=result['traceback']) + if result and result["traceback"]: + node._result = result["result"] + node._traceback = result["traceback"] + return report_crash(node, traceback=result["traceback"]) else: return report_crash(node) def _clear_task(self, taskid): - if IPyversion >= '0.11': + if IPyversion >= "0.11": logger.debug("Clearing id: %d" % taskid) self.taskclient.purge_results(self.taskmap[taskid]) del self.taskmap[taskid] diff --git a/nipype/pipeline/plugins/legacymultiproc.py b/nipype/pipeline/plugins/legacymultiproc.py index 451770e2bd..620aadb422 100644 --- a/nipype/pipeline/plugins/legacymultiproc.py +++ b/nipype/pipeline/plugins/legacymultiproc.py @@ -36,7 +36,7 @@ def indent(text, prefix): # Init logger -logger = logging.getLogger('nipype.workflow') +logger = logging.getLogger("nipype.workflow") # Run node @@ -64,14 +64,15 @@ def run_node(node, updatehash, taskid): # Try and execute the node via node.run() try: - result['result'] = node.run(updatehash=updatehash) + result["result"] = node.run(updatehash=updatehash) except: # noqa: E722, intendedly catch all here - result['traceback'] = format_exception(*sys.exc_info()) - result['result'] = node.result + result["traceback"] = format_exception(*sys.exc_info()) + result["result"] = node.result # Return the result dictionary return result + # Pythons 2.7, 3.4-3.7.0, and 3.7.1 have three different implementations of # pool.Pool().Process(), and the type of the result varies based on the default # multiprocessing context, so we need to dynamically patch the daemon property @@ -84,51 +85,70 @@ def daemon(self): def daemon(self, val): pass + try: from multiprocessing import context + # Exists on all platforms class NonDaemonSpawnProcess(NonDaemonMixin, context.SpawnProcess): pass + class NonDaemonSpawnContext(context.SpawnContext): Process = NonDaemonSpawnProcess - _nondaemon_context_mapper = { - 'spawn': NonDaemonSpawnContext() - } + + _nondaemon_context_mapper = {"spawn": NonDaemonSpawnContext()} # POSIX only try: + class NonDaemonForkProcess(NonDaemonMixin, context.ForkProcess): pass + class NonDaemonForkContext(context.ForkContext): Process = NonDaemonForkProcess - _nondaemon_context_mapper['fork'] = NonDaemonForkContext() + + _nondaemon_context_mapper["fork"] = NonDaemonForkContext() except AttributeError: pass # POSIX only try: + class NonDaemonForkServerProcess(NonDaemonMixin, context.ForkServerProcess): pass + class NonDaemonForkServerContext(context.ForkServerContext): Process = NonDaemonForkServerProcess - _nondaemon_context_mapper['forkserver'] = NonDaemonForkServerContext() + + _nondaemon_context_mapper["forkserver"] = NonDaemonForkServerContext() except AttributeError: pass class NonDaemonPool(pool.Pool): - def __init__(self, processes=None, initializer=None, initargs=(), - maxtasksperchild=None, context=None): + def __init__( + self, + processes=None, + initializer=None, + initargs=(), + maxtasksperchild=None, + context=None, + ): if context is None: context = mp.get_context() context = _nondaemon_context_mapper[context._name] - super(NonDaemonPool, self).__init__(processes=processes, - initializer=initializer, - initargs=initargs, - maxtasksperchild=maxtasksperchild, - context=context) + super(NonDaemonPool, self).__init__( + processes=processes, + initializer=initializer, + initargs=initargs, + maxtasksperchild=maxtasksperchild, + context=context, + ) + except ImportError: + class NonDaemonProcess(NonDaemonMixin, mp.Process): pass + class NonDaemonPool(pool.Pool): Process = NonDaemonProcess @@ -179,19 +199,24 @@ def __init__(self, plugin_args=None): self._cwd = os.getcwd() # Read in options or set defaults. - non_daemon = self.plugin_args.get('non_daemon', True) - maxtasks = self.plugin_args.get('maxtasksperchild', 10) - self.processors = self.plugin_args.get('n_procs', cpu_count()) + non_daemon = self.plugin_args.get("non_daemon", True) + maxtasks = self.plugin_args.get("maxtasksperchild", 10) + self.processors = self.plugin_args.get("n_procs", cpu_count()) self.memory_gb = self.plugin_args.get( - 'memory_gb', # Allocate 90% of system memory - get_system_total_memory_gb() * 0.9) - self.raise_insufficient = self.plugin_args.get('raise_insufficient', - True) + "memory_gb", # Allocate 90% of system memory + get_system_total_memory_gb() * 0.9, + ) + self.raise_insufficient = self.plugin_args.get("raise_insufficient", True) # Instantiate different thread pools for non-daemon processes - logger.debug('[LegacyMultiProc] Starting in "%sdaemon" mode (n_procs=%d, ' - 'mem_gb=%0.2f, cwd=%s)', 'non' * int(non_daemon), - self.processors, self.memory_gb, self._cwd) + logger.debug( + '[LegacyMultiProc] Starting in "%sdaemon" mode (n_procs=%d, ' + "mem_gb=%0.2f, cwd=%s)", + "non" * int(non_daemon), + self.processors, + self.memory_gb, + self._cwd, + ) NipypePool = NonDaemonPool if non_daemon else Pool try: @@ -199,7 +224,7 @@ def __init__(self, plugin_args=None): processes=self.processors, maxtasksperchild=maxtasks, initializer=os.chdir, - initargs=(self._cwd,) + initargs=(self._cwd,), ) except TypeError: # Python < 3.2 does not have maxtasksperchild @@ -212,7 +237,7 @@ def __init__(self, plugin_args=None): def _async_callback(self, args): # Make sure runtime is not left at a dubious working directory os.chdir(self._cwd) - self._taskresult[args['taskid']] = args + self._taskresult[args["taskid"]] = args def _get_result(self, taskid): return self._taskresult.get(taskid) @@ -224,15 +249,18 @@ def _submit_job(self, node, updatehash=False): self._taskid += 1 # Don't allow streaming outputs - if getattr(node.interface, 'terminal_output', '') == 'stream': - node.interface.terminal_output = 'allatonce' + if getattr(node.interface, "terminal_output", "") == "stream": + node.interface.terminal_output = "allatonce" self._task_obj[self._taskid] = self.pool.apply_async( - run_node, (node, updatehash, self._taskid), - callback=self._async_callback) - - logger.debug('[LegacyMultiProc] Submitted task %s (taskid=%d).', - node.fullname, self._taskid) + run_node, (node, updatehash, self._taskid), callback=self._async_callback + ) + + logger.debug( + "[LegacyMultiProc] Submitted task %s (taskid=%d).", + node.fullname, + self._taskid, + ) return self._taskid def _prerun_check(self, graph): @@ -245,17 +273,19 @@ def _prerun_check(self, graph): if np.any(np.array(tasks_mem_gb) > self.memory_gb): logger.warning( - 'Some nodes exceed the total amount of memory available ' - '(%0.2fGB).', self.memory_gb) + "Some nodes exceed the total amount of memory available " "(%0.2fGB).", + self.memory_gb, + ) if self.raise_insufficient: - raise RuntimeError('Insufficient resources available for job') + raise RuntimeError("Insufficient resources available for job") if np.any(np.array(tasks_num_th) > self.processors): logger.warning( - 'Some nodes demand for more threads than available (%d).', - self.processors) + "Some nodes demand for more threads than available (%d).", + self.processors, + ) if self.raise_insufficient: - raise RuntimeError('Insufficient resources available for job') + raise RuntimeError("Insufficient resources available for job") def _postrun_check(self): self.pool.close() @@ -280,46 +310,58 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # Check to see if a job is available (jobs with all dependencies run) # See https://github.com/nipy/nipype/pull/2200#discussion_r141605722 # See also https://github.com/nipy/nipype/issues/2372 - jobids = np.flatnonzero(~self.proc_done & - (self.depidx.sum(axis=0) == 0).__array__()) + jobids = np.flatnonzero( + ~self.proc_done & (self.depidx.sum(axis=0) == 0).__array__() + ) # Check available resources by summing all threads and memory used - free_memory_gb, free_processors = self._check_resources( - self.pending_tasks) - - stats = (len(self.pending_tasks), len(jobids), free_memory_gb, - self.memory_gb, free_processors, self.processors) + free_memory_gb, free_processors = self._check_resources(self.pending_tasks) + + stats = ( + len(self.pending_tasks), + len(jobids), + free_memory_gb, + self.memory_gb, + free_processors, + self.processors, + ) if self._stats != stats: - tasks_list_msg = '' + tasks_list_msg = "" if logger.level <= INFO: running_tasks = [ - ' * %s' % self.procs[jobid].fullname + " * %s" % self.procs[jobid].fullname for _, jobid in self.pending_tasks ] if running_tasks: - tasks_list_msg = '\nCurrently running:\n' - tasks_list_msg += '\n'.join(running_tasks) - tasks_list_msg = indent(tasks_list_msg, ' ' * 21) + tasks_list_msg = "\nCurrently running:\n" + tasks_list_msg += "\n".join(running_tasks) + tasks_list_msg = indent(tasks_list_msg, " " * 21) logger.info( - '[LegacyMultiProc] Running %d tasks, and %d jobs ready. Free ' - 'memory (GB): %0.2f/%0.2f, Free processors: %d/%d.%s', - len(self.pending_tasks), len(jobids), free_memory_gb, - self.memory_gb, free_processors, self.processors, - tasks_list_msg) + "[LegacyMultiProc] Running %d tasks, and %d jobs ready. Free " + "memory (GB): %0.2f/%0.2f, Free processors: %d/%d.%s", + len(self.pending_tasks), + len(jobids), + free_memory_gb, + self.memory_gb, + free_processors, + self.processors, + tasks_list_msg, + ) self._stats = stats if free_memory_gb < 0.01 or free_processors == 0: - logger.debug('No resources available') + logger.debug("No resources available") return if len(jobids) + len(self.pending_tasks) == 0: - logger.debug('No tasks are being run, and no jobs can ' - 'be submitted to the queue. Potential deadlock') + logger.debug( + "No tasks are being run, and no jobs can " + "be submitted to the queue. Potential deadlock" + ) return - jobids = self._sort_jobs( - jobids, scheduler=self.plugin_args.get('scheduler')) + jobids = self._sort_jobs(jobids, scheduler=self.plugin_args.get("scheduler")) # Run garbage collector before potentially submitting jobs gc.collect() @@ -333,12 +375,8 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): except Exception: traceback = format_exception(*sys.exc_info()) self._clean_queue( - jobid, - graph, - result={ - 'result': None, - 'traceback': traceback - }) + jobid, graph, result={"result": None, "traceback": traceback} + ) self.proc_pending[jobid] = False continue if num_subnodes > 1: @@ -352,16 +390,26 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # If node does not fit, skip at this moment if next_job_th > free_processors or next_job_gb > free_memory_gb: - logger.debug('Cannot allocate job %d (%0.2fGB, %d threads).', - jobid, next_job_gb, next_job_th) + logger.debug( + "Cannot allocate job %d (%0.2fGB, %d threads).", + jobid, + next_job_gb, + next_job_th, + ) continue free_memory_gb -= next_job_gb free_processors -= next_job_th - logger.debug('Allocating %s ID=%d (%0.2fGB, %d threads). Free: ' - '%0.2fGB, %d threads.', self.procs[jobid].fullname, - jobid, next_job_gb, next_job_th, free_memory_gb, - free_processors) + logger.debug( + "Allocating %s ID=%d (%0.2fGB, %d threads). Free: " + "%0.2fGB, %d threads.", + self.procs[jobid].fullname, + jobid, + next_job_gb, + next_job_th, + free_memory_gb, + free_processors, + ) # change job status in appropriate queues self.proc_done[jobid] = True @@ -373,19 +421,14 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # updatehash and run_without_submitting are also run locally if updatehash or self.procs[jobid].run_without_submitting: - logger.debug('Running node %s on master thread', - self.procs[jobid]) + logger.debug("Running node %s on master thread", self.procs[jobid]) try: self.procs[jobid].run(updatehash=updatehash) except Exception: traceback = format_exception(*sys.exc_info()) self._clean_queue( - jobid, - graph, - result={ - 'result': None, - 'traceback': traceback - }) + jobid, graph, result={"result": None, "traceback": traceback} + ) # Release resources self._task_finished_cb(jobid) @@ -402,9 +445,8 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # Task should be submitted to workers # Send job to task manager and add to pending tasks if self._status_callback: - self._status_callback(self.procs[jobid], 'start') - tid = self._submit_job( - deepcopy(self.procs[jobid]), updatehash=updatehash) + self._status_callback(self.procs[jobid], "start") + tid = self._submit_job(deepcopy(self.procs[jobid]), updatehash=updatehash) if tid is None: self.proc_done[jobid] = False self.proc_pending[jobid] = False @@ -413,10 +455,10 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # Display stats next loop self._stats = None - def _sort_jobs(self, jobids, scheduler='tsort'): - if scheduler == 'mem_thread': + def _sort_jobs(self, jobids, scheduler="tsort"): + if scheduler == "mem_thread": return sorted( jobids, - key=lambda item: (self.procs[item].mem_gb, self.procs[item].n_procs) + key=lambda item: (self.procs[item].mem_gb, self.procs[item].n_procs), ) return jobids diff --git a/nipype/pipeline/plugins/linear.py b/nipype/pipeline/plugins/linear.py index 732636ca12..650bff280f 100644 --- a/nipype/pipeline/plugins/linear.py +++ b/nipype/pipeline/plugins/linear.py @@ -5,8 +5,7 @@ """ import os -from .base import (PluginBase, logger, report_crash, report_nodes_not_run, - str2bool) +from .base import PluginBase, logger, report_crash, report_nodes_not_run, str2bool from ..engine.utils import topological_sort @@ -24,40 +23,42 @@ def run(self, graph, config, updatehash=False): defines order of execution """ import networkx as nx + try: dfs_preorder = nx.dfs_preorder except AttributeError: dfs_preorder = nx.dfs_preorder_nodes if not isinstance(graph, nx.DiGraph): - raise ValueError('Input must be a networkx digraph object') + raise ValueError("Input must be a networkx digraph object") logger.info("Running serially.") old_wd = os.getcwd() notrun = [] donotrun = [] nodes, _ = topological_sort(graph) for node in nodes: - endstatus = 'end' + endstatus = "end" try: if node in donotrun: continue if self._status_callback: - self._status_callback(node, 'start') + self._status_callback(node, "start") node.run(updatehash=updatehash) except: - endstatus = 'exception' + endstatus = "exception" # bare except, but i really don't know where a # node might fail crashfile = report_crash(node) - if str2bool(config['execution']['stop_on_first_crash']): + if str2bool(config["execution"]["stop_on_first_crash"]): raise # remove dependencies from queue subnodes = [s for s in dfs_preorder(graph, node)] - notrun.append({'node': node, 'dependents': subnodes, - 'crashfile': crashfile}) + notrun.append( + {"node": node, "dependents": subnodes, "crashfile": crashfile} + ) donotrun.extend(subnodes) # Delay raising the crash until we cleaned the house - if str2bool(config['execution']['stop_on_first_crash']): + if str2bool(config["execution"]["stop_on_first_crash"]): os.chdir(old_wd) # Return wherever we were before report_nodes_not_run(notrun) # report before raising raise diff --git a/nipype/pipeline/plugins/lsf.py b/nipype/pipeline/plugins/lsf.py index 866529d8bb..a88fbb6675 100644 --- a/nipype/pipeline/plugins/lsf.py +++ b/nipype/pipeline/plugins/lsf.py @@ -9,7 +9,8 @@ from ... import logging from ...interfaces.base import CommandLine from .base import SGELikeBatchManagerBase, logger -iflogger = logging.getLogger('nipype.interface') + +iflogger = logging.getLogger("nipype.interface") class LSFPlugin(SGELikeBatchManagerBase): @@ -30,14 +31,14 @@ def __init__(self, **kwargs): """ self._retry_timeout = 2 self._max_tries = 2 - self._bsub_args = '' - if 'plugin_args' in kwargs and kwargs['plugin_args']: - if 'retry_timeout' in kwargs['plugin_args']: - self._retry_timeout = kwargs['plugin_args']['retry_timeout'] - if 'max_tries' in kwargs['plugin_args']: - self._max_tries = kwargs['plugin_args']['max_tries'] - if 'bsub_args' in kwargs['plugin_args']: - self._bsub_args = kwargs['plugin_args']['bsub_args'] + self._bsub_args = "" + if "plugin_args" in kwargs and kwargs["plugin_args"]: + if "retry_timeout" in kwargs["plugin_args"]: + self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] + if "max_tries" in kwargs["plugin_args"]: + self._max_tries = kwargs["plugin_args"]["max_tries"] + if "bsub_args" in kwargs["plugin_args"]: + self._bsub_args = kwargs["plugin_args"]["bsub_args"] super(LSFPlugin, self).__init__(template, **kwargs) def _is_pending(self, taskid): @@ -46,53 +47,54 @@ def _is_pending(self, taskid): But _is_pending should return True until a job has finished and is ready to be checked for completeness. So return True if status is either 'PEND' or 'RUN'""" - cmd = CommandLine( - 'bjobs', resource_monitor=False, terminal_output='allatonce') - cmd.inputs.args = '%d' % taskid + cmd = CommandLine("bjobs", resource_monitor=False, terminal_output="allatonce") + cmd.inputs.args = "%d" % taskid # check lsf task oldlevel = iflogger.level - iflogger.setLevel(logging.getLevelName('CRITICAL')) + iflogger.setLevel(logging.getLevelName("CRITICAL")) result = cmd.run(ignore_exception=True) iflogger.setLevel(oldlevel) # logger.debug(result.runtime.stdout) - if 'DONE' in result.runtime.stdout or 'EXIT' in result.runtime.stdout: + if "DONE" in result.runtime.stdout or "EXIT" in result.runtime.stdout: return False else: return True def _submit_batchtask(self, scriptfile, node): cmd = CommandLine( - 'bsub', + "bsub", environ=dict(os.environ), resource_monitor=False, - terminal_output='allatonce') - bsubargs = '' + terminal_output="allatonce", + ) + bsubargs = "" if self._bsub_args: bsubargs = self._bsub_args - if 'bsub_args' in node.plugin_args: - if 'overwrite' in node.plugin_args and\ - node.plugin_args['overwrite']: - bsubargs = node.plugin_args['bsub_args'] + if "bsub_args" in node.plugin_args: + if "overwrite" in node.plugin_args and node.plugin_args["overwrite"]: + bsubargs = node.plugin_args["bsub_args"] else: - bsubargs += (" " + node.plugin_args['bsub_args']) - if '-o' not in bsubargs: # -o outfile - bsubargs = '%s -o %s' % (bsubargs, scriptfile + ".log") - if '-e' not in bsubargs: + bsubargs += " " + node.plugin_args["bsub_args"] + if "-o" not in bsubargs: # -o outfile + bsubargs = "%s -o %s" % (bsubargs, scriptfile + ".log") + if "-e" not in bsubargs: # -e error file - bsubargs = '%s -e %s' % (bsubargs, scriptfile + ".log") + bsubargs = "%s -e %s" % (bsubargs, scriptfile + ".log") if node._hierarchy: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._hierarchy, - node._id)) + jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._id)) - jobnameitems = jobname.split('.') + jobname = ".".join((dict(os.environ)["LOGNAME"], node._id)) + jobnameitems = jobname.split(".") jobnameitems.reverse() - jobname = '.'.join(jobnameitems) - cmd.inputs.args = '%s -J %s sh %s' % (bsubargs, jobname, - scriptfile) # -J job_name_spec - logger.debug('bsub ' + cmd.inputs.args) + jobname = ".".join(jobnameitems) + cmd.inputs.args = "%s -J %s sh %s" % ( + bsubargs, + jobname, + scriptfile, + ) # -J job_name_spec + logger.debug("bsub " + cmd.inputs.args) oldlevel = iflogger.level - iflogger.setLevel(logging.getLevelName('CRITICAL')) + iflogger.setLevel(logging.getLevelName("CRITICAL")) tries = 0 while True: try: @@ -100,23 +102,28 @@ def _submit_batchtask(self, scriptfile, node): except Exception as e: if tries < self._max_tries: tries += 1 - sleep( - self._retry_timeout) # sleep 2 seconds and try again. + sleep(self._retry_timeout) # sleep 2 seconds and try again. else: iflogger.setLevel(oldlevel) - raise RuntimeError('\n'.join((('Could not submit lsf task' - ' for node %s') % node._id, - str(e)))) + raise RuntimeError( + "\n".join( + ( + ("Could not submit lsf task" " for node %s") % node._id, + str(e), + ) + ) + ) else: break iflogger.setLevel(oldlevel) # retrieve lsf taskid - match = re.search(r'<(\d*)>', result.runtime.stdout) + match = re.search(r"<(\d*)>", result.runtime.stdout) if match: taskid = int(match.groups()[0]) else: - raise IOError("Can't parse submission job output id: %s" % - result.runtime.stdout) + raise IOError( + "Can't parse submission job output id: %s" % result.runtime.stdout + ) self._pending[taskid] = node.output_dir() - logger.debug('submitted lsf task: %d for node %s' % (taskid, node._id)) + logger.debug("submitted lsf task: %d for node %s" % (taskid, node._id)) return taskid diff --git a/nipype/pipeline/plugins/multiproc.py b/nipype/pipeline/plugins/multiproc.py index f310d97912..dc950385b1 100644 --- a/nipype/pipeline/plugins/multiproc.py +++ b/nipype/pipeline/plugins/multiproc.py @@ -36,7 +36,7 @@ def indent(text, prefix): # Init logger -logger = logging.getLogger('nipype.workflow') +logger = logging.getLogger("nipype.workflow") # Run node @@ -64,10 +64,10 @@ def run_node(node, updatehash, taskid): # Try and execute the node via node.run() try: - result['result'] = node.run(updatehash=updatehash) + result["result"] = node.run(updatehash=updatehash) except: # noqa: E722, intendedly catch all here - result['traceback'] = format_exception(*sys.exc_info()) - result['result'] = node.result + result["traceback"] = format_exception(*sys.exc_info()) + result["result"] = node.result # Return the result dictionary return result @@ -118,25 +118,29 @@ def __init__(self, plugin_args=None): self._cwd = os.getcwd() # Read in options or set defaults. - self.processors = self.plugin_args.get('n_procs', mp.cpu_count()) + self.processors = self.plugin_args.get("n_procs", mp.cpu_count()) self.memory_gb = self.plugin_args.get( - 'memory_gb', # Allocate 90% of system memory - get_system_total_memory_gb() * 0.9) - self.raise_insufficient = self.plugin_args.get('raise_insufficient', - True) + "memory_gb", # Allocate 90% of system memory + get_system_total_memory_gb() * 0.9, + ) + self.raise_insufficient = self.plugin_args.get("raise_insufficient", True) # Instantiate different thread pools for non-daemon processes - logger.debug('[MultiProc] Starting (n_procs=%d, ' - 'mem_gb=%0.2f, cwd=%s)', - self.processors, self.memory_gb, self._cwd) + logger.debug( + "[MultiProc] Starting (n_procs=%d, " "mem_gb=%0.2f, cwd=%s)", + self.processors, + self.memory_gb, + self._cwd, + ) try: - mp_context = mp.context.get_context( - self.plugin_args.get('mp_context')) - self.pool = ProcessPoolExecutor(max_workers=self.processors, - initializer=os.chdir, - initargs=(self._cwd,), - mp_context=mp_context) + mp_context = mp.context.get_context(self.plugin_args.get("mp_context")) + self.pool = ProcessPoolExecutor( + max_workers=self.processors, + initializer=os.chdir, + initargs=(self._cwd,), + mp_context=mp_context, + ) except (AttributeError, TypeError): # Python < 3.7 does not support initialization or contexts self.pool = ProcessPoolExecutor(max_workers=self.processors) @@ -145,7 +149,7 @@ def __init__(self, plugin_args=None): def _async_callback(self, args): result = args.result() - self._taskresult[result['taskid']] = result + self._taskresult[result["taskid"]] = result def _get_result(self, taskid): return self._taskresult.get(taskid) @@ -157,15 +161,16 @@ def _submit_job(self, node, updatehash=False): self._taskid += 1 # Don't allow streaming outputs - if getattr(node.interface, 'terminal_output', '') == 'stream': - node.interface.terminal_output = 'allatonce' + if getattr(node.interface, "terminal_output", "") == "stream": + node.interface.terminal_output = "allatonce" result_future = self.pool.submit(run_node, node, updatehash, self._taskid) result_future.add_done_callback(self._async_callback) self._task_obj[self._taskid] = result_future - logger.debug('[MultiProc] Submitted task %s (taskid=%d).', - node.fullname, self._taskid) + logger.debug( + "[MultiProc] Submitted task %s (taskid=%d).", node.fullname, self._taskid + ) return self._taskid def _prerun_check(self, graph): @@ -178,17 +183,19 @@ def _prerun_check(self, graph): if np.any(np.array(tasks_mem_gb) > self.memory_gb): logger.warning( - 'Some nodes exceed the total amount of memory available ' - '(%0.2fGB).', self.memory_gb) + "Some nodes exceed the total amount of memory available " "(%0.2fGB).", + self.memory_gb, + ) if self.raise_insufficient: - raise RuntimeError('Insufficient resources available for job') + raise RuntimeError("Insufficient resources available for job") if np.any(np.array(tasks_num_th) > self.processors): logger.warning( - 'Some nodes demand for more threads than available (%d).', - self.processors) + "Some nodes demand for more threads than available (%d).", + self.processors, + ) if self.raise_insufficient: - raise RuntimeError('Insufficient resources available for job') + raise RuntimeError("Insufficient resources available for job") def _postrun_check(self): self.pool.shutdown() @@ -213,46 +220,58 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # Check to see if a job is available (jobs with all dependencies run) # See https://github.com/nipy/nipype/pull/2200#discussion_r141605722 # See also https://github.com/nipy/nipype/issues/2372 - jobids = np.flatnonzero(~self.proc_done & - (self.depidx.sum(axis=0) == 0).__array__()) + jobids = np.flatnonzero( + ~self.proc_done & (self.depidx.sum(axis=0) == 0).__array__() + ) # Check available resources by summing all threads and memory used - free_memory_gb, free_processors = self._check_resources( - self.pending_tasks) - - stats = (len(self.pending_tasks), len(jobids), free_memory_gb, - self.memory_gb, free_processors, self.processors) + free_memory_gb, free_processors = self._check_resources(self.pending_tasks) + + stats = ( + len(self.pending_tasks), + len(jobids), + free_memory_gb, + self.memory_gb, + free_processors, + self.processors, + ) if self._stats != stats: - tasks_list_msg = '' + tasks_list_msg = "" if logger.level <= INFO: running_tasks = [ - ' * %s' % self.procs[jobid].fullname + " * %s" % self.procs[jobid].fullname for _, jobid in self.pending_tasks ] if running_tasks: - tasks_list_msg = '\nCurrently running:\n' - tasks_list_msg += '\n'.join(running_tasks) - tasks_list_msg = indent(tasks_list_msg, ' ' * 21) + tasks_list_msg = "\nCurrently running:\n" + tasks_list_msg += "\n".join(running_tasks) + tasks_list_msg = indent(tasks_list_msg, " " * 21) logger.info( - '[MultiProc] Running %d tasks, and %d jobs ready. Free ' - 'memory (GB): %0.2f/%0.2f, Free processors: %d/%d.%s', - len(self.pending_tasks), len(jobids), free_memory_gb, - self.memory_gb, free_processors, self.processors, - tasks_list_msg) + "[MultiProc] Running %d tasks, and %d jobs ready. Free " + "memory (GB): %0.2f/%0.2f, Free processors: %d/%d.%s", + len(self.pending_tasks), + len(jobids), + free_memory_gb, + self.memory_gb, + free_processors, + self.processors, + tasks_list_msg, + ) self._stats = stats if free_memory_gb < 0.01 or free_processors == 0: - logger.debug('No resources available') + logger.debug("No resources available") return if len(jobids) + len(self.pending_tasks) == 0: - logger.debug('No tasks are being run, and no jobs can ' - 'be submitted to the queue. Potential deadlock') + logger.debug( + "No tasks are being run, and no jobs can " + "be submitted to the queue. Potential deadlock" + ) return - jobids = self._sort_jobs( - jobids, scheduler=self.plugin_args.get('scheduler')) + jobids = self._sort_jobs(jobids, scheduler=self.plugin_args.get("scheduler")) # Run garbage collector before potentially submitting jobs gc.collect() @@ -266,12 +285,8 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): except Exception: traceback = format_exception(*sys.exc_info()) self._clean_queue( - jobid, - graph, - result={ - 'result': None, - 'traceback': traceback - }) + jobid, graph, result={"result": None, "traceback": traceback} + ) self.proc_pending[jobid] = False continue if num_subnodes > 1: @@ -285,16 +300,26 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # If node does not fit, skip at this moment if next_job_th > free_processors or next_job_gb > free_memory_gb: - logger.debug('Cannot allocate job %d (%0.2fGB, %d threads).', - jobid, next_job_gb, next_job_th) + logger.debug( + "Cannot allocate job %d (%0.2fGB, %d threads).", + jobid, + next_job_gb, + next_job_th, + ) continue free_memory_gb -= next_job_gb free_processors -= next_job_th - logger.debug('Allocating %s ID=%d (%0.2fGB, %d threads). Free: ' - '%0.2fGB, %d threads.', self.procs[jobid].fullname, - jobid, next_job_gb, next_job_th, free_memory_gb, - free_processors) + logger.debug( + "Allocating %s ID=%d (%0.2fGB, %d threads). Free: " + "%0.2fGB, %d threads.", + self.procs[jobid].fullname, + jobid, + next_job_gb, + next_job_th, + free_memory_gb, + free_processors, + ) # change job status in appropriate queues self.proc_done[jobid] = True @@ -306,19 +331,14 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # updatehash and run_without_submitting are also run locally if updatehash or self.procs[jobid].run_without_submitting: - logger.debug('Running node %s on master thread', - self.procs[jobid]) + logger.debug("Running node %s on master thread", self.procs[jobid]) try: self.procs[jobid].run(updatehash=updatehash) except Exception: traceback = format_exception(*sys.exc_info()) self._clean_queue( - jobid, - graph, - result={ - 'result': None, - 'traceback': traceback - }) + jobid, graph, result={"result": None, "traceback": traceback} + ) # Release resources self._task_finished_cb(jobid) @@ -335,9 +355,8 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # Task should be submitted to workers # Send job to task manager and add to pending tasks if self._status_callback: - self._status_callback(self.procs[jobid], 'start') - tid = self._submit_job( - deepcopy(self.procs[jobid]), updatehash=updatehash) + self._status_callback(self.procs[jobid], "start") + tid = self._submit_job(deepcopy(self.procs[jobid]), updatehash=updatehash) if tid is None: self.proc_done[jobid] = False self.proc_pending[jobid] = False @@ -346,10 +365,10 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # Display stats next loop self._stats = None - def _sort_jobs(self, jobids, scheduler='tsort'): - if scheduler == 'mem_thread': + def _sort_jobs(self, jobids, scheduler="tsort"): + if scheduler == "mem_thread": return sorted( jobids, - key=lambda item: (self.procs[item].mem_gb, self.procs[item].n_procs) + key=lambda item: (self.procs[item].mem_gb, self.procs[item].n_procs), ) return jobids diff --git a/nipype/pipeline/plugins/oar.py b/nipype/pipeline/plugins/oar.py index 5ce718c250..4ce64305eb 100644 --- a/nipype/pipeline/plugins/oar.py +++ b/nipype/pipeline/plugins/oar.py @@ -10,7 +10,8 @@ from ... import logging from ...interfaces.base import CommandLine from .base import SGELikeBatchManagerBase, logger -iflogger = logging.getLogger('nipype.interface') + +iflogger = logging.getLogger("nipype.interface") class OARPlugin(SGELikeBatchManagerBase): @@ -28,7 +29,7 @@ class OARPlugin(SGELikeBatchManagerBase): # Addtional class variables _max_jobname_len = 15 - _oarsub_args = '' + _oarsub_args = "" def __init__(self, **kwargs): template = """ @@ -37,71 +38,75 @@ def __init__(self, **kwargs): self._retry_timeout = 2 self._max_tries = 2 self._max_jobname_length = 15 - if 'plugin_args' in kwargs and kwargs['plugin_args']: - if 'oarsub_args' in kwargs['plugin_args']: - self._oarsub_args = kwargs['plugin_args']['oarsub_args'] - if 'retry_timeout' in kwargs['plugin_args']: - self._retry_timeout = kwargs['plugin_args']['retry_timeout'] - if 'max_tries' in kwargs['plugin_args']: - self._max_tries = kwargs['plugin_args']['max_tries'] - if 'max_jobname_len' in kwargs['plugin_args']: - self._max_jobname_len = \ - kwargs['plugin_args']['max_jobname_len'] + if "plugin_args" in kwargs and kwargs["plugin_args"]: + if "oarsub_args" in kwargs["plugin_args"]: + self._oarsub_args = kwargs["plugin_args"]["oarsub_args"] + if "retry_timeout" in kwargs["plugin_args"]: + self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] + if "max_tries" in kwargs["plugin_args"]: + self._max_tries = kwargs["plugin_args"]["max_tries"] + if "max_jobname_len" in kwargs["plugin_args"]: + self._max_jobname_len = kwargs["plugin_args"]["max_jobname_len"] super(OARPlugin, self).__init__(template, **kwargs) def _is_pending(self, taskid): # subprocess.Popen requires taskid to be a string proc = subprocess.Popen( - ['oarstat', '-J', '-s', '-j', taskid], + ["oarstat", "-J", "-s", "-j", taskid], stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + stderr=subprocess.PIPE, + ) o, e = proc.communicate() parsed_result = json.loads(o)[taskid].lower() - is_pending = (('error' not in parsed_result) - and ('terminated' not in parsed_result)) + is_pending = ("error" not in parsed_result) and ( + "terminated" not in parsed_result + ) return is_pending def _submit_batchtask(self, scriptfile, node): cmd = CommandLine( - 'oarsub', + "oarsub", environ=dict(os.environ), resource_monitor=False, - terminal_output='allatonce') + terminal_output="allatonce", + ) path = os.path.dirname(scriptfile) - oarsubargs = '' + oarsubargs = "" if self._oarsub_args: oarsubargs = self._oarsub_args - if 'oarsub_args' in node.plugin_args: - if ('overwrite' in node.plugin_args - and node.plugin_args['overwrite']): - oarsubargs = node.plugin_args['oarsub_args'] + if "oarsub_args" in node.plugin_args: + if "overwrite" in node.plugin_args and node.plugin_args["overwrite"]: + oarsubargs = node.plugin_args["oarsub_args"] else: - oarsubargs += (" " + node.plugin_args['oarsub_args']) + oarsubargs += " " + node.plugin_args["oarsub_args"] if node._hierarchy: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._hierarchy, - node._id)) + jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._id)) - jobnameitems = jobname.split('.') + jobname = ".".join((dict(os.environ)["LOGNAME"], node._id)) + jobnameitems = jobname.split(".") jobnameitems.reverse() - jobname = '.'.join(jobnameitems) - jobname = jobname[0:self._max_jobname_len] - - if '-O' not in oarsubargs: - oarsubargs = '%s -O %s' % (oarsubargs, - os.path.join(path, jobname + '.stdout')) - if '-E' not in oarsubargs: - oarsubargs = '%s -E %s' % (oarsubargs, - os.path.join(path, jobname + '.stderr')) - if '-J' not in oarsubargs: - oarsubargs = '%s -J' % (oarsubargs) + jobname = ".".join(jobnameitems) + jobname = jobname[0 : self._max_jobname_len] + + if "-O" not in oarsubargs: + oarsubargs = "%s -O %s" % ( + oarsubargs, + os.path.join(path, jobname + ".stdout"), + ) + if "-E" not in oarsubargs: + oarsubargs = "%s -E %s" % ( + oarsubargs, + os.path.join(path, jobname + ".stderr"), + ) + if "-J" not in oarsubargs: + oarsubargs = "%s -J" % (oarsubargs) os.chmod(scriptfile, stat.S_IEXEC | stat.S_IREAD | stat.S_IWRITE) - cmd.inputs.args = '%s -n %s -S %s' % (oarsubargs, jobname, scriptfile) + cmd.inputs.args = "%s -n %s -S %s" % (oarsubargs, jobname, scriptfile) oldlevel = iflogger.level - iflogger.setLevel(logging.getLevelName('CRITICAL')) + iflogger.setLevel(logging.getLevelName("CRITICAL")) tries = 0 while True: try: @@ -113,24 +118,29 @@ def _submit_batchtask(self, scriptfile, node): # sleep 2 seconds and try again. else: iflogger.setLevel(oldlevel) - raise RuntimeError('\n'.join((('Could not submit OAR task' - ' for node %s') % node._id, - str(e)))) + raise RuntimeError( + "\n".join( + ( + ("Could not submit OAR task" " for node %s") % node._id, + str(e), + ) + ) + ) else: break iflogger.setLevel(oldlevel) # retrieve OAR taskid - o = '' + o = "" add = False for line in result.runtime.stdout.splitlines(): - if line.strip().startswith('{'): + if line.strip().startswith("{"): add = True if add: - o += line + '\n' - if line.strip().startswith('}'): + o += line + "\n" + if line.strip().startswith("}"): break - taskid = json.loads(o)['job_id'] + taskid = json.loads(o)["job_id"] self._pending[taskid] = node.output_dir() - logger.debug('submitted OAR task: %s for node %s' % (taskid, node._id)) + logger.debug("submitted OAR task: %s for node %s" % (taskid, node._id)) return taskid diff --git a/nipype/pipeline/plugins/pbs.py b/nipype/pipeline/plugins/pbs.py index cc5bbed93b..b322d88743 100644 --- a/nipype/pipeline/plugins/pbs.py +++ b/nipype/pipeline/plugins/pbs.py @@ -8,7 +8,7 @@ from ...interfaces.base import CommandLine from .base import SGELikeBatchManagerBase, logger -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") class PBSPlugin(SGELikeBatchManagerBase): @@ -34,65 +34,65 @@ def __init__(self, **kwargs): self._retry_timeout = 2 self._max_tries = 2 self._max_jobname_length = 15 - if 'plugin_args' in kwargs and kwargs['plugin_args']: - if 'retry_timeout' in kwargs['plugin_args']: - self._retry_timeout = kwargs['plugin_args']['retry_timeout'] - if 'max_tries' in kwargs['plugin_args']: - self._max_tries = kwargs['plugin_args']['max_tries'] - if 'max_jobname_len' in kwargs['plugin_args']: - self._max_jobname_len = kwargs['plugin_args'][ - 'max_jobname_len'] + if "plugin_args" in kwargs and kwargs["plugin_args"]: + if "retry_timeout" in kwargs["plugin_args"]: + self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] + if "max_tries" in kwargs["plugin_args"]: + self._max_tries = kwargs["plugin_args"]["max_tries"] + if "max_jobname_len" in kwargs["plugin_args"]: + self._max_jobname_len = kwargs["plugin_args"]["max_jobname_len"] super(PBSPlugin, self).__init__(template, **kwargs) def _is_pending(self, taskid): - result = CommandLine('qstat -f {}'.format(taskid), - environ=dict(os.environ), - terminal_output='file_split', - resource_monitor=False, - ignore_exception=True).run() + result = CommandLine( + "qstat -f {}".format(taskid), + environ=dict(os.environ), + terminal_output="file_split", + resource_monitor=False, + ignore_exception=True, + ).run() stdout = result.runtime.stdout stderr = result.runtime.stderr - errmsg = 'Unknown Job Id' - success = 'Job has finished' - if (success in stderr) or ('job_state = C' in stdout): + errmsg = "Unknown Job Id" + success = "Job has finished" + if (success in stderr) or ("job_state = C" in stdout): return False else: return errmsg not in stderr def _submit_batchtask(self, scriptfile, node): cmd = CommandLine( - 'qsub', + "qsub", environ=dict(os.environ), resource_monitor=False, - terminal_output='allatonce') + terminal_output="allatonce", + ) path = os.path.dirname(scriptfile) - qsubargs = '' + qsubargs = "" if self._qsub_args: qsubargs = self._qsub_args - if 'qsub_args' in node.plugin_args: - if 'overwrite' in node.plugin_args and \ - node.plugin_args['overwrite']: - qsubargs = node.plugin_args['qsub_args'] + if "qsub_args" in node.plugin_args: + if "overwrite" in node.plugin_args and node.plugin_args["overwrite"]: + qsubargs = node.plugin_args["qsub_args"] else: - qsubargs += (" " + node.plugin_args['qsub_args']) - if '-o' not in qsubargs: - qsubargs = '%s -o %s' % (qsubargs, path) - if '-e' not in qsubargs: - qsubargs = '%s -e %s' % (qsubargs, path) + qsubargs += " " + node.plugin_args["qsub_args"] + if "-o" not in qsubargs: + qsubargs = "%s -o %s" % (qsubargs, path) + if "-e" not in qsubargs: + qsubargs = "%s -e %s" % (qsubargs, path) if node._hierarchy: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._hierarchy, - node._id)) + jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._id)) - jobnameitems = jobname.split('.') + jobname = ".".join((dict(os.environ)["LOGNAME"], node._id)) + jobnameitems = jobname.split(".") jobnameitems.reverse() - jobname = '.'.join(jobnameitems) - jobname = jobname[0:self._max_jobname_len] - cmd.inputs.args = '%s -N %s %s' % (qsubargs, jobname, scriptfile) + jobname = ".".join(jobnameitems) + jobname = jobname[0 : self._max_jobname_len] + cmd.inputs.args = "%s -N %s %s" % (qsubargs, jobname, scriptfile) oldlevel = iflogger.level - iflogger.setLevel(logging.getLevelName('CRITICAL')) + iflogger.setLevel(logging.getLevelName("CRITICAL")) tries = 0 while True: try: @@ -105,15 +105,14 @@ def _submit_batchtask(self, scriptfile, node): else: iflogger.setLevel(oldlevel) raise RuntimeError( - 'Could not submit pbs task for node {}\n{}'.format( - node._id, e)) + "Could not submit pbs task for node {}\n{}".format(node._id, e) + ) else: break iflogger.setLevel(oldlevel) # retrieve pbs taskid - taskid = result.runtime.stdout.split('.')[0] + taskid = result.runtime.stdout.split(".")[0] self._pending[taskid] = node.output_dir() - logger.debug('submitted pbs task: {} for node {}'.format( - taskid, node._id)) + logger.debug("submitted pbs task: {} for node {}".format(taskid, node._id)) return taskid diff --git a/nipype/pipeline/plugins/pbsgraph.py b/nipype/pipeline/plugins/pbsgraph.py index 9b6e9da755..6304e715b7 100644 --- a/nipype/pipeline/plugins/pbsgraph.py +++ b/nipype/pipeline/plugins/pbsgraph.py @@ -19,43 +19,44 @@ class PBSGraphPlugin(SGEGraphPlugin): qsub call """ + _template = """ #PBS -V """ def _submit_graph(self, pyfiles, dependencies, nodes): batch_dir, _ = os.path.split(pyfiles[0]) - submitjobsfile = os.path.join(batch_dir, 'submit_jobs.sh') - with open(submitjobsfile, 'wt') as fp: - fp.writelines('#!/usr/bin/env sh\n') + submitjobsfile = os.path.join(batch_dir, "submit_jobs.sh") + with open(submitjobsfile, "wt") as fp: + fp.writelines("#!/usr/bin/env sh\n") for idx, pyscript in enumerate(pyfiles): node = nodes[idx] - template, qsub_args = self._get_args(node, - ["template", "qsub_args"]) + template, qsub_args = self._get_args(node, ["template", "qsub_args"]) batch_dir, name = os.path.split(pyscript) - name = '.'.join(name.split('.')[:-1]) - batchscript = '\n'.join((template, '%s %s' % (sys.executable, - pyscript))) - batchscriptfile = os.path.join(batch_dir, - 'batchscript_%s.sh' % name) - with open(batchscriptfile, 'wt') as batchfp: + name = ".".join(name.split(".")[:-1]) + batchscript = "\n".join( + (template, "%s %s" % (sys.executable, pyscript)) + ) + batchscriptfile = os.path.join(batch_dir, "batchscript_%s.sh" % name) + with open(batchscriptfile, "wt") as batchfp: batchfp.writelines(batchscript) batchfp.close() - deps = '' + deps = "" if idx in dependencies: - values = [ - '$job%05d' % jobid for jobid in dependencies[idx] - ] + values = ["$job%05d" % jobid for jobid in dependencies[idx]] if len(values): - deps = '-W depend=afterok:%s' % ':'.join(values) - fp.writelines('job%05d=`qsub %s %s %s`\n' % - (idx, deps, qsub_args, batchscriptfile)) + deps = "-W depend=afterok:%s" % ":".join(values) + fp.writelines( + "job%05d=`qsub %s %s %s`\n" + % (idx, deps, qsub_args, batchscriptfile) + ) cmd = CommandLine( - 'sh', + "sh", environ=dict(os.environ), resource_monitor=False, - terminal_output='allatonce') - cmd.inputs.args = '%s' % submitjobsfile + terminal_output="allatonce", + ) + cmd.inputs.args = "%s" % submitjobsfile cmd.run() - logger.info('submitted all jobs to queue') + logger.info("submitted all jobs to queue") diff --git a/nipype/pipeline/plugins/semaphore_singleton.py b/nipype/pipeline/plugins/semaphore_singleton.py index fe25cf2c61..12fa7c7777 100644 --- a/nipype/pipeline/plugins/semaphore_singleton.py +++ b/nipype/pipeline/plugins/semaphore_singleton.py @@ -1,3 +1,4 @@ # -*- coding: utf-8 -*- import threading + semaphore = threading.Semaphore(0) diff --git a/nipype/pipeline/plugins/sge.py b/nipype/pipeline/plugins/sge.py index 61b127e188..17a5093ae2 100644 --- a/nipype/pipeline/plugins/sge.py +++ b/nipype/pipeline/plugins/sge.py @@ -14,7 +14,8 @@ from ... import logging from ...interfaces.base import CommandLine from .base import SGELikeBatchManagerBase, logger -iflogger = logging.getLogger('nipype.interface') + +iflogger = logging.getLogger("nipype.interface") DEBUGGING_PREFIX = str(int(random.uniform(100, 999))) @@ -31,28 +32,39 @@ class QJobInfo(object): :author Hans J. Johnson """ - def __init__(self, job_num, job_queue_state, job_time, job_queue_name, - job_slots, qsub_command_line): + def __init__( + self, + job_num, + job_queue_state, + job_time, + job_queue_name, + job_slots, + qsub_command_line, + ): # self._jobName = None # Ascii text name of job not unique self._job_num = int( job_num ) # The primary unique identifier for this job, must be an integer! # self._jobOwn = None # Who owns this job - self._job_queue_state = str( - job_queue_state) # ["running","zombie",...??] + self._job_queue_state = str(job_queue_state) # ["running","zombie",...??] # self._jobActionState = str(jobActionState) # ['r','qw','S',...??] self._job_time = job_time # The job start time - self._job_info_creation_time = time.time( + self._job_info_creation_time = ( + time.time() ) # When this job was created (for comparing against initalization) self._job_queue_name = job_queue_name # Where the job is running self._job_slots = int(job_slots) # How many slots are being used self._qsub_command_line = qsub_command_line def __repr__(self): - return '{:<8d}{:12}{:<3d}{:20}{:8}{}'.format( - self._job_num, self._job_queue_state, self._job_slots, + return "{:<8d}{:12}{:<3d}{:20}{:8}{}".format( + self._job_num, + self._job_queue_state, + self._job_slots, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(self._job_time)), - self._job_queue_name, self._qsub_command_line) + self._job_queue_name, + self._qsub_command_line, + ) def is_initializing(self): return self._job_queue_state == "initializing" @@ -69,26 +81,29 @@ def is_pending(self): def is_job_state_pending(self): """ Return True, unless job is in the "zombie" status """ - time_diff = (time.time() - self._job_info_creation_time) + time_diff = time.time() - self._job_info_creation_time if self.is_zombie(): sge_debug_print( - "DONE! QJobInfo.IsPending found in 'zombie' list, returning False so claiming done!\n{0}". - format(self)) + "DONE! QJobInfo.IsPending found in 'zombie' list, returning False so claiming done!\n{0}".format( + self + ) + ) is_pending_status = False # Job explicitly found as being completed! elif self.is_initializing() and (time_diff > 600): # if initializing for more than 5 minute, failure due to # initialization and completion before registration sge_debug_print( "FAILURE! QJobInfo.IsPending found long running at {1} seconds" - "'initializing' returning False for to break loop!\n{0}". - format(self, time_diff)) + "'initializing' returning False for to break loop!\n{0}".format( + self, time_diff + ) + ) is_pending_status = True # Job initialization took too long, so report! else: # self.is_running() || self.is_pending(): is_pending_status = True # Job cache last listed as running return is_pending_status # The job is in one of the hold states - def update_info(self, job_queue_state, job_time, job_queue_name, - job_slots): + def update_info(self, job_queue_state, job_time, job_queue_name, job_slots): self._job_queue_state = job_queue_state self._job_time = job_time self._job_queue_name = job_queue_name @@ -102,9 +117,9 @@ class QstatSubstitute(object): """A wrapper for Qstat to avoid overloading the SGE/OGS server with rapid continuous qstat requests""" - def __init__(self, - qstat_instant_executable='qstat', - qstat_cached_executable='qstat'): + def __init__( + self, qstat_instant_executable="qstat", qstat_cached_executable="qstat" + ): """ :param qstat_instant_executable: :param qstat_cached_executable: @@ -112,7 +127,8 @@ def __init__(self, self._qstat_instant_executable = qstat_instant_executable self._qstat_cached_executable = qstat_cached_executable self._out_of_scope_jobs = list() # Initialize first - self._task_dictionary = dict( + self._task_dictionary = ( + dict() ) # {'taskid': QJobInfo(), .... } The dictionaryObject self._remove_old_jobs() @@ -132,20 +148,22 @@ def add_startup_job(self, taskid, qsub_command_line): :return: NONE """ taskid = int(taskid) # Ensure that it is an integer - self._task_dictionary[taskid] = QJobInfo(taskid, "initializing", - time.time(), "noQueue", 1, - qsub_command_line) + self._task_dictionary[taskid] = QJobInfo( + taskid, "initializing", time.time(), "noQueue", 1, qsub_command_line + ) @staticmethod def _qacct_verified_complete(taskid): """ request definitive job completion information for the current job from the qacct report """ - sge_debug_print("WARNING: " - "CONTACTING qacct for finished jobs, " - "{0}: {1}".format(time.time(), "Verifying Completion")) + sge_debug_print( + "WARNING: " + "CONTACTING qacct for finished jobs, " + "{0}: {1}".format(time.time(), "Verifying Completion") + ) - this_command = 'qacct' + this_command = "qacct" qacct_retries = 10 is_complete = False while qacct_retries > 0: @@ -153,17 +171,19 @@ def _qacct_verified_complete(taskid): try: proc = subprocess.Popen( [ - this_command, '-o', - pwd.getpwuid(os.getuid())[0], '-j', - str(taskid) + this_command, + "-o", + pwd.getpwuid(os.getuid())[0], + "-j", + str(taskid), ], stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + stderr=subprocess.PIPE, + ) qacct_result, _ = proc.communicate() if qacct_result.find(str(taskid)): is_complete = True - sge_debug_print( - "NOTE: qacct for jobs\n{0}".format(qacct_result)) + sge_debug_print("NOTE: qacct for jobs\n{0}".format(qacct_result)) break except: sge_debug_print("NOTE: qacct call failed") @@ -178,26 +198,36 @@ def _parse_qstat_job_list(self, xml_job_list): # jobown = # current_job_element.getElementsByTagName('JB_owner')[0].childNodes[0].data try: - job_queue_name = current_job_element.getElementsByTagName( - 'queue_name')[0].childNodes[0].data + job_queue_name = ( + current_job_element.getElementsByTagName("queue_name")[0] + .childNodes[0] + .data + ) except: job_queue_name = "unknown" try: job_slots = int( - current_job_element.getElementsByTagName('slots')[0] - .childNodes[0].data) + current_job_element.getElementsByTagName("slots")[0] + .childNodes[0] + .data + ) except: job_slots = -1 - job_queue_state = current_job_element.getAttribute('state') + job_queue_state = current_job_element.getAttribute("state") job_num = int( - current_job_element.getElementsByTagName('JB_job_number')[0] - .childNodes[0].data) + current_job_element.getElementsByTagName("JB_job_number")[0] + .childNodes[0] + .data + ) try: - job_time_text = current_job_element.getElementsByTagName( - 'JAT_start_time')[0].childNodes[0].data + job_time_text = ( + current_job_element.getElementsByTagName("JAT_start_time")[0] + .childNodes[0] + .data + ) job_time = float( - time.mktime( - time.strptime(job_time_text, "%Y-%m-%dT%H:%M:%S"))) + time.mktime(time.strptime(job_time_text, "%Y-%m-%dT%H:%M:%S")) + ) except: job_time = float(0.0) # Make job entry @@ -205,9 +235,11 @@ def _parse_qstat_job_list(self, xml_job_list): task_id = int(job_num) if task_id in self._task_dictionary: self._task_dictionary[task_id].update_info( - job_queue_state, job_time, job_queue_name, job_slots) - sge_debug_print("Updating job: {0}".format( - self._task_dictionary[task_id])) + job_queue_state, job_time, job_queue_name, job_slots + ) + sge_debug_print( + "Updating job: {0}".format(self._task_dictionary[task_id]) + ) current_jobs_parsed.append(task_id) # Changed from job_num as "in" is used to check which does not cast else: @@ -227,10 +259,12 @@ def _parse_qstat_job_list(self, xml_job_list): if is_completed: self._task_dictionary[dictionary_job].set_state("zombie") else: - sge_debug_print("ERROR: Job not in current parselist, " - "and not in done list {0}: {1}".format( - dictionary_job, - self._task_dictionary[dictionary_job])) + sge_debug_print( + "ERROR: Job not in current parselist, " + "and not in done list {0}: {1}".format( + dictionary_job, self._task_dictionary[dictionary_job] + ) + ) pass if self._task_dictionary[dictionary_job].is_initializing(): is_completed = self._qacct_verified_complete(dictionary_job) @@ -240,8 +274,9 @@ def _parse_qstat_job_list(self, xml_job_list): sge_debug_print( "ERROR: Job not in still in intializing mode, " "and not in done list {0}: {1}".format( - dictionary_job, - self._task_dictionary[dictionary_job])) + dictionary_job, self._task_dictionary[dictionary_job] + ) + ) pass def _run_qstat(self, reason_for_qstat, force_instant=True): @@ -252,8 +287,10 @@ def _run_qstat(self, reason_for_qstat, force_instant=True): -s z gives recently completed jobs (**recently** is very ambiguous) -s s suspended jobs """ - sge_debug_print("WARNING: CONTACTING qmaster for jobs, " - "{0}: {1}".format(time.time(), reason_for_qstat)) + sge_debug_print( + "WARNING: CONTACTING qmaster for jobs, " + "{0}: {1}".format(time.time(), reason_for_qstat) + ) if force_instant: this_command = self._qstat_instant_executable else: @@ -265,22 +302,27 @@ def _run_qstat(self, reason_for_qstat, force_instant=True): try: proc = subprocess.Popen( [ - this_command, '-u', - pwd.getpwuid(os.getuid())[0], '-xml', '-s', 'psrz' + this_command, + "-u", + pwd.getpwuid(os.getuid())[0], + "-xml", + "-s", + "psrz", ], stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + stderr=subprocess.PIPE, + ) qstat_xml_result, _ = proc.communicate() dom = xml.dom.minidom.parseString(qstat_xml_result) - jobs = dom.getElementsByTagName('job_info') + jobs = dom.getElementsByTagName("job_info") run = jobs[0] - runjobs = run.getElementsByTagName('job_list') + runjobs = run.getElementsByTagName("job_list") self._parse_qstat_job_list(runjobs) break except Exception as inst: exception_message = "QstatParsingError:\n\t{0}\n\t{1}\n".format( type(inst), # the exception instance - inst # __str__ allows args to printed directly + inst, # __str__ allows args to printed directly ) sge_debug_print(exception_message) time.sleep(5) @@ -296,36 +338,37 @@ def is_job_pending(self, task_id): # Check if the task is in the dictionary first (before running qstat) if task_id in self._task_dictionary: # Trust the cache, only False if state='zombie' - job_is_pending = self._task_dictionary[ - task_id].is_job_state_pending() + job_is_pending = self._task_dictionary[task_id].is_job_state_pending() # Double check pending jobs in case of change (since we don't check at the beginning) if job_is_pending: self._run_qstat( - "checking job pending status {0}".format(task_id), False) - job_is_pending = self._task_dictionary[ - task_id].is_job_state_pending() + "checking job pending status {0}".format(task_id), False + ) + job_is_pending = self._task_dictionary[task_id].is_job_state_pending() else: - self._run_qstat("checking job pending status {0}".format(task_id), - True) + self._run_qstat("checking job pending status {0}".format(task_id), True) if task_id in self._task_dictionary: # Trust the cache, only False if state='zombie' - job_is_pending = self._task_dictionary[ - task_id].is_job_state_pending() + job_is_pending = self._task_dictionary[task_id].is_job_state_pending() else: - sge_debug_print("ERROR: Job {0} not in task list, " - "even after forced qstat!".format(task_id)) + sge_debug_print( + "ERROR: Job {0} not in task list, " + "even after forced qstat!".format(task_id) + ) job_is_pending = False if not job_is_pending: - sge_debug_print( - "DONE! Returning for {0} claiming done!".format(task_id)) + sge_debug_print("DONE! Returning for {0} claiming done!".format(task_id)) if task_id in self._task_dictionary: sge_debug_print( - "NOTE: Adding {0} to OutOfScopeJobs list!".format(task_id)) + "NOTE: Adding {0} to OutOfScopeJobs list!".format(task_id) + ) self._out_of_scope_jobs.append(int(task_id)) self._task_dictionary.pop(task_id) else: - sge_debug_print("ERROR: Job {0} not in task list, " - "but attempted to be removed!".format(task_id)) + sge_debug_print( + "ERROR: Job {0} not in task list, " + "but attempted to be removed!".format(task_id) + ) return job_is_pending @@ -342,7 +385,7 @@ def qsub_sanitize_job_name(testjobname): if testjobname[0].isalpha(): return testjobname else: - return 'J' + testjobname + return "J" + testjobname class SGEPlugin(SGELikeBatchManagerBase): @@ -364,18 +407,18 @@ def __init__(self, **kwargs): """ self._retry_timeout = 2 self._max_tries = 2 - instant_qstat = 'qstat' - cached_qstat = 'qstat' - - if 'plugin_args' in kwargs and kwargs['plugin_args']: - if 'retry_timeout' in kwargs['plugin_args']: - self._retry_timeout = kwargs['plugin_args']['retry_timeout'] - if 'max_tries' in kwargs['plugin_args']: - self._max_tries = kwargs['plugin_args']['max_tries'] - if 'qstatProgramPath' in kwargs['plugin_args']: - instant_qstat = kwargs['plugin_args']['qstatProgramPath'] - if 'qstatCachedProgramPath' in kwargs['plugin_args']: - cached_qstat = kwargs['plugin_args']['qstatCachedProgramPath'] + instant_qstat = "qstat" + cached_qstat = "qstat" + + if "plugin_args" in kwargs and kwargs["plugin_args"]: + if "retry_timeout" in kwargs["plugin_args"]: + self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] + if "max_tries" in kwargs["plugin_args"]: + self._max_tries = kwargs["plugin_args"]["max_tries"] + if "qstatProgramPath" in kwargs["plugin_args"]: + instant_qstat = kwargs["plugin_args"]["qstatProgramPath"] + if "qstatCachedProgramPath" in kwargs["plugin_args"]: + cached_qstat = kwargs["plugin_args"]["qstatCachedProgramPath"] self._refQstatSubstitute = QstatSubstitute(instant_qstat, cached_qstat) super(SGEPlugin, self).__init__(template, **kwargs) @@ -385,36 +428,35 @@ def _is_pending(self, taskid): def _submit_batchtask(self, scriptfile, node): cmd = CommandLine( - 'qsub', + "qsub", environ=dict(os.environ), resource_monitor=False, - terminal_output='allatonce') + terminal_output="allatonce", + ) path = os.path.dirname(scriptfile) - qsubargs = '' + qsubargs = "" if self._qsub_args: qsubargs = self._qsub_args - if 'qsub_args' in node.plugin_args: - if 'overwrite' in node.plugin_args and \ - node.plugin_args['overwrite']: - qsubargs = node.plugin_args['qsub_args'] + if "qsub_args" in node.plugin_args: + if "overwrite" in node.plugin_args and node.plugin_args["overwrite"]: + qsubargs = node.plugin_args["qsub_args"] else: - qsubargs += (" " + node.plugin_args['qsub_args']) - if '-o' not in qsubargs: - qsubargs = '%s -o %s' % (qsubargs, path) - if '-e' not in qsubargs: - qsubargs = '%s -e %s' % (qsubargs, path) + qsubargs += " " + node.plugin_args["qsub_args"] + if "-o" not in qsubargs: + qsubargs = "%s -o %s" % (qsubargs, path) + if "-e" not in qsubargs: + qsubargs = "%s -e %s" % (qsubargs, path) if node._hierarchy: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._hierarchy, - node._id)) + jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._id)) - jobnameitems = jobname.split('.') + jobname = ".".join((dict(os.environ)["LOGNAME"], node._id)) + jobnameitems = jobname.split(".") jobnameitems.reverse() - jobname = '.'.join(jobnameitems) + jobname = ".".join(jobnameitems) jobname = qsub_sanitize_job_name(jobname) - cmd.inputs.args = '%s -N %s %s' % (qsubargs, jobname, scriptfile) + cmd.inputs.args = "%s -N %s %s" % (qsubargs, jobname, scriptfile) oldlevel = iflogger.level - iflogger.setLevel(logging.getLevelName('CRITICAL')) + iflogger.setLevel(logging.getLevelName("CRITICAL")) tries = 0 result = list() while True: @@ -423,23 +465,29 @@ def _submit_batchtask(self, scriptfile, node): except Exception as e: if tries < self._max_tries: tries += 1 - time.sleep( - self._retry_timeout) # sleep 2 seconds and try again. + time.sleep(self._retry_timeout) # sleep 2 seconds and try again. else: iflogger.setLevel(oldlevel) - raise RuntimeError('\n'.join((('Could not submit sge task' - ' for node %s') % node._id, - str(e)))) + raise RuntimeError( + "\n".join( + ( + ("Could not submit sge task" " for node %s") % node._id, + str(e), + ) + ) + ) else: break iflogger.setLevel(oldlevel) # retrieve sge taskid - lines = [line for line in result.runtime.stdout.split('\n') if line] + lines = [line for line in result.runtime.stdout.split("\n") if line] taskid = int( - re.match("Your job ([0-9]*) .* has been submitted", - lines[-1]).groups()[0]) + re.match("Your job ([0-9]*) .* has been submitted", lines[-1]).groups()[0] + ) self._pending[taskid] = node.output_dir() self._refQstatSubstitute.add_startup_job(taskid, cmd.cmdline) - logger.debug('submitted sge task: %d for node %s with %s' % - (taskid, node._id, cmd.cmdline)) + logger.debug( + "submitted sge task: %d for node %s with %s" + % (taskid, node._id, cmd.cmdline) + ) return taskid diff --git a/nipype/pipeline/plugins/sgegraph.py b/nipype/pipeline/plugins/sgegraph.py index 06e5719654..82d1cc0e58 100644 --- a/nipype/pipeline/plugins/sgegraph.py +++ b/nipype/pipeline/plugins/sgegraph.py @@ -5,7 +5,7 @@ import sys from ...interfaces.base import CommandLine -from .base import (GraphPluginBase, logger) +from .base import GraphPluginBase, logger def node_completed_status(checknode): @@ -15,15 +15,15 @@ def node_completed_status(checknode): :return: boolean value True indicates that the node does not need to be run. """ """ TODO: place this in the base.py file and refactor """ - node_state_does_not_require_overwrite = ( - checknode.overwrite is False or - (checknode.overwrite is None and not checknode._interface.always_run)) + node_state_does_not_require_overwrite = checknode.overwrite is False or ( + checknode.overwrite is None and not checknode._interface.always_run + ) hash_exists = False try: hash_exists, _, _, _ = checknode.hash_exists() except Exception: hash_exists = False - return (hash_exists and node_state_does_not_require_overwrite) + return hash_exists and node_state_does_not_require_overwrite class SGEGraphPlugin(GraphPluginBase): @@ -37,6 +37,7 @@ class SGEGraphPlugin(GraphPluginBase): qsub call """ + _template = """ #!/bin/bash #$ -V @@ -44,19 +45,20 @@ class SGEGraphPlugin(GraphPluginBase): """ def __init__(self, **kwargs): - self._qsub_args = '' + self._qsub_args = "" self._dont_resubmit_completed_jobs = False - if 'plugin_args' in kwargs and kwargs['plugin_args']: - plugin_args = kwargs['plugin_args'] - if 'template' in plugin_args: - self._template = plugin_args['template'] + if "plugin_args" in kwargs and kwargs["plugin_args"]: + plugin_args = kwargs["plugin_args"] + if "template" in plugin_args: + self._template = plugin_args["template"] if os.path.isfile(self._template): self._template = open(self._template).read() - if 'qsub_args' in plugin_args: - self._qsub_args = plugin_args['qsub_args'] - if 'dont_resubmit_completed_jobs' in plugin_args: + if "qsub_args" in plugin_args: + self._qsub_args = plugin_args["qsub_args"] + if "dont_resubmit_completed_jobs" in plugin_args: self._dont_resubmit_completed_jobs = plugin_args[ - 'dont_resubmit_completed_jobs'] + "dont_resubmit_completed_jobs" + ] super(SGEGraphPlugin, self).__init__(**kwargs) def _submit_graph(self, pyfiles, dependencies, nodes): @@ -66,17 +68,18 @@ def make_job_name(jobnumber, nodeslist): - nodeslist: The name of the node being processed - return: A string representing this job to be displayed by SGE """ - job_name = 'j{0}_{1}'.format(jobnumber, nodeslist[jobnumber]._id) + job_name = "j{0}_{1}".format(jobnumber, nodeslist[jobnumber]._id) # Condition job_name to be a valid bash identifier (i.e. - is invalid) - job_name = job_name.replace('-', '_').replace('.', '_').replace( - ':', '_') + job_name = job_name.replace("-", "_").replace(".", "_").replace(":", "_") return job_name batch_dir, _ = os.path.split(pyfiles[0]) - submitjobsfile = os.path.join(batch_dir, 'submit_jobs.sh') + submitjobsfile = os.path.join(batch_dir, "submit_jobs.sh") cache_doneness_per_node = dict() - if self._dont_resubmit_completed_jobs: # A future parameter for controlling this behavior could be added here + if ( + self._dont_resubmit_completed_jobs + ): # A future parameter for controlling this behavior could be added here for idx, pyscript in enumerate(pyfiles): node = nodes[idx] node_status_done = node_completed_status(node) @@ -86,73 +89,80 @@ def make_job_name(jobnumber, nodeslist): if node_status_done and idx in dependencies: for child_idx in dependencies[idx]: if child_idx in cache_doneness_per_node: - child_status_done = cache_doneness_per_node[ - child_idx] + child_status_done = cache_doneness_per_node[child_idx] else: - child_status_done = node_completed_status( - nodes[child_idx]) + child_status_done = node_completed_status(nodes[child_idx]) node_status_done = node_status_done and child_status_done cache_doneness_per_node[idx] = node_status_done - with open(submitjobsfile, 'wt') as fp: - fp.writelines('#!/usr/bin/env bash\n') - fp.writelines('# Condense format attempted\n') + with open(submitjobsfile, "wt") as fp: + fp.writelines("#!/usr/bin/env bash\n") + fp.writelines("# Condense format attempted\n") for idx, pyscript in enumerate(pyfiles): node = nodes[idx] if cache_doneness_per_node.get(idx, False): continue else: template, qsub_args = self._get_args( - node, ["template", "qsub_args"]) + node, ["template", "qsub_args"] + ) batch_dir, name = os.path.split(pyscript) - name = '.'.join(name.split('.')[:-1]) - batchscript = '\n'.join( - (template, '%s %s' % (sys.executable, pyscript))) - batchscriptfile = os.path.join(batch_dir, - 'batchscript_%s.sh' % name) - - batchscriptoutfile = batchscriptfile + '.o' - batchscripterrfile = batchscriptfile + '.e' - - with open(batchscriptfile, 'wt') as batchfp: + name = ".".join(name.split(".")[:-1]) + batchscript = "\n".join( + (template, "%s %s" % (sys.executable, pyscript)) + ) + batchscriptfile = os.path.join( + batch_dir, "batchscript_%s.sh" % name + ) + + batchscriptoutfile = batchscriptfile + ".o" + batchscripterrfile = batchscriptfile + ".e" + + with open(batchscriptfile, "wt") as batchfp: batchfp.writelines(batchscript) batchfp.close() - deps = '' + deps = "" if idx in dependencies: - values = ' ' + values = " " for jobid in dependencies[idx]: # Avoid dependancies of done jobs - if not self._dont_resubmit_completed_jobs or not cache_doneness_per_node[jobid]: + if ( + not self._dont_resubmit_completed_jobs + or not cache_doneness_per_node[jobid] + ): values += "${{{0}}},".format( - make_job_name(jobid, nodes)) - if values != ' ': # i.e. if some jobs were added to dependency list - values = values.rstrip(',') - deps = '-hold_jid%s' % values + make_job_name(jobid, nodes) + ) + if ( + values != " " + ): # i.e. if some jobs were added to dependency list + values = values.rstrip(",") + deps = "-hold_jid%s" % values jobname = make_job_name(idx, nodes) # Do not use default output locations if they are set in self._qsub_args - stderrFile = '' - if self._qsub_args.count('-e ') == 0: - stderrFile = '-e {errFile}'.format( - errFile=batchscripterrfile) - stdoutFile = '' - if self._qsub_args.count('-o ') == 0: - stdoutFile = '-o {outFile}'.format( - outFile=batchscriptoutfile) - full_line = '{jobNm}=$(qsub {outFileOption} {errFileOption} {extraQSubArgs} {dependantIndex} -N {jobNm} {batchscript} | awk \'{{print $3}}\')\n'.format( + stderrFile = "" + if self._qsub_args.count("-e ") == 0: + stderrFile = "-e {errFile}".format(errFile=batchscripterrfile) + stdoutFile = "" + if self._qsub_args.count("-o ") == 0: + stdoutFile = "-o {outFile}".format(outFile=batchscriptoutfile) + full_line = "{jobNm}=$(qsub {outFileOption} {errFileOption} {extraQSubArgs} {dependantIndex} -N {jobNm} {batchscript} | awk '{{print $3}}')\n".format( jobNm=jobname, outFileOption=stdoutFile, errFileOption=stderrFile, extraQSubArgs=qsub_args, dependantIndex=deps, - batchscript=batchscriptfile) + batchscript=batchscriptfile, + ) fp.writelines(full_line) cmd = CommandLine( - 'bash', + "bash", environ=dict(os.environ), resource_monitor=False, - terminal_output='allatonce') - cmd.inputs.args = '%s' % submitjobsfile + terminal_output="allatonce", + ) + cmd.inputs.args = "%s" % submitjobsfile cmd.run() - logger.info('submitted all jobs to queue') + logger.info("submitted all jobs to queue") diff --git a/nipype/pipeline/plugins/slurm.py b/nipype/pipeline/plugins/slurm.py index ffcbc42b63..44cdac70d5 100644 --- a/nipype/pipeline/plugins/slurm.py +++ b/nipype/pipeline/plugins/slurm.py @@ -1,10 +1,10 @@ -''' +""" Created on Aug 2, 2013 @author: chadcumba Parallel workflow execution with SLURM -''' +""" import os import re from time import sleep @@ -13,11 +13,11 @@ from ...interfaces.base import CommandLine from .base import SGELikeBatchManagerBase, logger -iflogger = logging.getLogger('nipype.interface') +iflogger = logging.getLogger("nipype.interface") class SLURMPlugin(SGELikeBatchManagerBase): - ''' + """ Execute using SLURM The plugin_args input to run can be used to control the SLURM execution. @@ -28,7 +28,7 @@ class SLURMPlugin(SGELikeBatchManagerBase): - sbatch_args: arguments to pass prepend to the sbatch call - ''' + """ def __init__(self, **kwargs): @@ -40,42 +40,46 @@ def __init__(self, **kwargs): self._sbatch_args = None self._jobid_re = "Submitted batch job ([0-9]*)" - if 'plugin_args' in kwargs and kwargs['plugin_args']: - if 'retry_timeout' in kwargs['plugin_args']: - self._retry_timeout = kwargs['plugin_args']['retry_timeout'] - if 'max_tries' in kwargs['plugin_args']: - self._max_tries = kwargs['plugin_args']['max_tries'] - if 'jobid_re' in kwargs['plugin_args']: - self._jobid_re = kwargs['plugin_args']['jobid_re'] - if 'template' in kwargs['plugin_args']: - self._template = kwargs['plugin_args']['template'] + if "plugin_args" in kwargs and kwargs["plugin_args"]: + if "retry_timeout" in kwargs["plugin_args"]: + self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] + if "max_tries" in kwargs["plugin_args"]: + self._max_tries = kwargs["plugin_args"]["max_tries"] + if "jobid_re" in kwargs["plugin_args"]: + self._jobid_re = kwargs["plugin_args"]["jobid_re"] + if "template" in kwargs["plugin_args"]: + self._template = kwargs["plugin_args"]["template"] if os.path.isfile(self._template): with open(self._template) as f: self._template = f.read() - if 'sbatch_args' in kwargs['plugin_args']: - self._sbatch_args = kwargs['plugin_args']['sbatch_args'] + if "sbatch_args" in kwargs["plugin_args"]: + self._sbatch_args = kwargs["plugin_args"]["sbatch_args"] self._pending = {} super(SLURMPlugin, self).__init__(self._template, **kwargs) def _is_pending(self, taskid): try: res = CommandLine( - 'squeue', - args=' '.join(['-j', '%s' % taskid]), + "squeue", + args=" ".join(["-j", "%s" % taskid]), resource_monitor=False, - terminal_output='allatonce').run() + terminal_output="allatonce", + ).run() return res.runtime.stdout.find(str(taskid)) > -1 except RuntimeError as e: - if any(ss in str(e) for ss - in ['Socket timed out', 'not available at the moment']): + if any( + ss in str(e) + for ss in ["Socket timed out", "not available at the moment"] + ): # do not raise error and allow recheck logger.warning( "SLURM timeout encountered while checking job status," - " treating job %d as pending", taskid + " treating job %d as pending", + taskid, ) return True - if 'Invalid job id' not in str(e): - raise(e) + if "Invalid job id" not in str(e): + raise (e) return False def _submit_batchtask(self, scriptfile, node): @@ -85,38 +89,35 @@ def _submit_batchtask(self, scriptfile, node): formatting/processing """ cmd = CommandLine( - 'sbatch', + "sbatch", environ=dict(os.environ), resource_monitor=False, - terminal_output='allatonce') + terminal_output="allatonce", + ) path = os.path.dirname(scriptfile) - sbatch_args = '' + sbatch_args = "" if self._sbatch_args: sbatch_args = self._sbatch_args - if 'sbatch_args' in node.plugin_args: - if 'overwrite' in node.plugin_args and\ - node.plugin_args['overwrite']: - sbatch_args = node.plugin_args['sbatch_args'] + if "sbatch_args" in node.plugin_args: + if "overwrite" in node.plugin_args and node.plugin_args["overwrite"]: + sbatch_args = node.plugin_args["sbatch_args"] else: - sbatch_args += (" " + node.plugin_args['sbatch_args']) - if '-o' not in sbatch_args: - sbatch_args = '%s -o %s' % (sbatch_args, - os.path.join(path, 'slurm-%j.out')) - if '-e' not in sbatch_args: - sbatch_args = '%s -e %s' % (sbatch_args, - os.path.join(path, 'slurm-%j.out')) + sbatch_args += " " + node.plugin_args["sbatch_args"] + if "-o" not in sbatch_args: + sbatch_args = "%s -o %s" % (sbatch_args, os.path.join(path, "slurm-%j.out")) + if "-e" not in sbatch_args: + sbatch_args = "%s -e %s" % (sbatch_args, os.path.join(path, "slurm-%j.out")) if node._hierarchy: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._hierarchy, - node._id)) + jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: - jobname = '.'.join((dict(os.environ)['LOGNAME'], node._id)) - jobnameitems = jobname.split('.') + jobname = ".".join((dict(os.environ)["LOGNAME"], node._id)) + jobnameitems = jobname.split(".") jobnameitems.reverse() - jobname = '.'.join(jobnameitems) - cmd.inputs.args = '%s -J %s %s' % (sbatch_args, jobname, scriptfile) + jobname = ".".join(jobnameitems) + cmd.inputs.args = "%s -J %s %s" % (sbatch_args, jobname, scriptfile) oldlevel = iflogger.level - iflogger.setLevel(logging.getLevelName('CRITICAL')) + iflogger.setLevel(logging.getLevelName("CRITICAL")) tries = 0 while True: try: @@ -128,17 +129,22 @@ def _submit_batchtask(self, scriptfile, node): sleep(self._retry_timeout) else: iflogger.setLevel(oldlevel) - raise RuntimeError('\n'.join( - (('Could not submit sbatch task' - ' for node %s') % node._id, str(e)))) + raise RuntimeError( + "\n".join( + ( + ("Could not submit sbatch task" " for node %s") + % node._id, + str(e), + ) + ) + ) else: break - logger.debug('Ran command ({0})'.format(cmd.cmdline)) + logger.debug("Ran command ({0})".format(cmd.cmdline)) iflogger.setLevel(oldlevel) # retrieve taskid - lines = [line for line in result.runtime.stdout.split('\n') if line] + lines = [line for line in result.runtime.stdout.split("\n") if line] taskid = int(re.match(self._jobid_re, lines[-1]).groups()[0]) self._pending[taskid] = node.output_dir() - logger.debug('submitted sbatch task: %d for node %s' % (taskid, - node._id)) + logger.debug("submitted sbatch task: %d for node %s" % (taskid, node._id)) return taskid diff --git a/nipype/pipeline/plugins/slurmgraph.py b/nipype/pipeline/plugins/slurmgraph.py index 1b62177457..9468c76ba1 100644 --- a/nipype/pipeline/plugins/slurmgraph.py +++ b/nipype/pipeline/plugins/slurmgraph.py @@ -5,7 +5,7 @@ import sys from ...interfaces.base import CommandLine -from .base import (GraphPluginBase, logger) +from .base import GraphPluginBase, logger def node_completed_status(checknode): @@ -15,15 +15,15 @@ def node_completed_status(checknode): :return: boolean value True indicates that the node does not need to be run. """ """ TODO: place this in the base.py file and refactor """ - node_state_does_not_require_overwrite = ( - checknode.overwrite is False or - (checknode.overwrite is None and not checknode._interface.always_run)) + node_state_does_not_require_overwrite = checknode.overwrite is False or ( + checknode.overwrite is None and not checknode._interface.always_run + ) hash_exists = False try: hash_exists, _, _, _ = checknode.hash_exists() except Exception: hash_exists = False - return (hash_exists and node_state_does_not_require_overwrite) + return hash_exists and node_state_does_not_require_overwrite class SLURMGraphPlugin(GraphPluginBase): @@ -37,24 +37,26 @@ class SLURMGraphPlugin(GraphPluginBase): qsub call """ + _template = "#!/bin/bash" def __init__(self, **kwargs): - self._sbatch_args = '' - if 'plugin_args' in kwargs and kwargs['plugin_args']: - if 'retry_timeout' in kwargs['plugin_args']: - self._retry_timeout = kwargs['plugin_args']['retry_timeout'] - if 'max_tries' in kwargs['plugin_args']: - self._max_tries = kwargs['plugin_args']['max_tries'] - if 'template' in kwargs['plugin_args']: - self._template = kwargs['plugin_args']['template'] + self._sbatch_args = "" + if "plugin_args" in kwargs and kwargs["plugin_args"]: + if "retry_timeout" in kwargs["plugin_args"]: + self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] + if "max_tries" in kwargs["plugin_args"]: + self._max_tries = kwargs["plugin_args"]["max_tries"] + if "template" in kwargs["plugin_args"]: + self._template = kwargs["plugin_args"]["template"] if os.path.isfile(self._template): self._template = open(self._template).read() - if 'sbatch_args' in kwargs['plugin_args']: - self._sbatch_args = kwargs['plugin_args']['sbatch_args'] - if 'dont_resubmit_completed_jobs' in kwargs['plugin_args']: - self._dont_resubmit_completed_jobs = kwargs['plugin_args'][ - 'dont_resubmit_completed_jobs'] + if "sbatch_args" in kwargs["plugin_args"]: + self._sbatch_args = kwargs["plugin_args"]["sbatch_args"] + if "dont_resubmit_completed_jobs" in kwargs["plugin_args"]: + self._dont_resubmit_completed_jobs = kwargs["plugin_args"][ + "dont_resubmit_completed_jobs" + ] else: self._dont_resubmit_completed_jobs = False super(SLURMGraphPlugin, self).__init__(**kwargs) @@ -66,17 +68,18 @@ def make_job_name(jobnumber, nodeslist): - nodeslist: The name of the node being processed - return: A string representing this job to be displayed by SLURM """ - job_name = 'j{0}_{1}'.format(jobnumber, nodeslist[jobnumber]._id) + job_name = "j{0}_{1}".format(jobnumber, nodeslist[jobnumber]._id) # Condition job_name to be a valid bash identifier (i.e. - is invalid) - job_name = job_name.replace('-', '_').replace('.', '_').replace( - ':', '_') + job_name = job_name.replace("-", "_").replace(".", "_").replace(":", "_") return job_name batch_dir, _ = os.path.split(pyfiles[0]) - submitjobsfile = os.path.join(batch_dir, 'submit_jobs.sh') + submitjobsfile = os.path.join(batch_dir, "submit_jobs.sh") cache_doneness_per_node = dict() - if self._dont_resubmit_completed_jobs: # A future parameter for controlling this behavior could be added here + if ( + self._dont_resubmit_completed_jobs + ): # A future parameter for controlling this behavior could be added here for idx, pyscript in enumerate(pyfiles): node = nodes[idx] node_status_done = node_completed_status(node) @@ -86,73 +89,80 @@ def make_job_name(jobnumber, nodeslist): if node_status_done and idx in dependencies: for child_idx in dependencies[idx]: if child_idx in cache_doneness_per_node: - child_status_done = cache_doneness_per_node[ - child_idx] + child_status_done = cache_doneness_per_node[child_idx] else: - child_status_done = node_completed_status( - nodes[child_idx]) + child_status_done = node_completed_status(nodes[child_idx]) node_status_done = node_status_done and child_status_done cache_doneness_per_node[idx] = node_status_done - with open(submitjobsfile, 'wt') as fp: - fp.writelines('#!/usr/bin/env bash\n') - fp.writelines('# Condense format attempted\n') + with open(submitjobsfile, "wt") as fp: + fp.writelines("#!/usr/bin/env bash\n") + fp.writelines("# Condense format attempted\n") for idx, pyscript in enumerate(pyfiles): node = nodes[idx] if cache_doneness_per_node.get(idx, False): continue else: template, sbatch_args = self._get_args( - node, ["template", "sbatch_args"]) + node, ["template", "sbatch_args"] + ) batch_dir, name = os.path.split(pyscript) - name = '.'.join(name.split('.')[:-1]) - batchscript = '\n'.join( - (template, '%s %s' % (sys.executable, pyscript))) - batchscriptfile = os.path.join(batch_dir, - 'batchscript_%s.sh' % name) - - batchscriptoutfile = batchscriptfile + '.o' - batchscripterrfile = batchscriptfile + '.e' - - with open(batchscriptfile, 'wt') as batchfp: + name = ".".join(name.split(".")[:-1]) + batchscript = "\n".join( + (template, "%s %s" % (sys.executable, pyscript)) + ) + batchscriptfile = os.path.join( + batch_dir, "batchscript_%s.sh" % name + ) + + batchscriptoutfile = batchscriptfile + ".o" + batchscripterrfile = batchscriptfile + ".e" + + with open(batchscriptfile, "wt") as batchfp: batchfp.writelines(batchscript) batchfp.close() - deps = '' + deps = "" if idx in dependencies: - values = '' + values = "" for jobid in dependencies[idx]: # Avoid dependancies of done jobs - if not self._dont_resubmit_completed_jobs or not cache_doneness_per_node[jobid]: + if ( + not self._dont_resubmit_completed_jobs + or not cache_doneness_per_node[jobid] + ): values += "${{{0}}}:".format( - make_job_name(jobid, nodes)) - if values != '': # i.e. if some jobs were added to dependency list - values = values.rstrip(':') - deps = '--dependency=afterok:%s' % values + make_job_name(jobid, nodes) + ) + if ( + values != "" + ): # i.e. if some jobs were added to dependency list + values = values.rstrip(":") + deps = "--dependency=afterok:%s" % values jobname = make_job_name(idx, nodes) # Do not use default output locations if they are set in self._sbatch_args - stderrFile = '' - if self._sbatch_args.count('-e ') == 0: - stderrFile = '-e {errFile}'.format( - errFile=batchscripterrfile) - stdoutFile = '' - if self._sbatch_args.count('-o ') == 0: - stdoutFile = '-o {outFile}'.format( - outFile=batchscriptoutfile) - full_line = '{jobNm}=$(sbatch {outFileOption} {errFileOption} {extraSBatchArgs} {dependantIndex} -J {jobNm} {batchscript} | awk \'/^Submitted/ {{print $4}}\')\n'.format( + stderrFile = "" + if self._sbatch_args.count("-e ") == 0: + stderrFile = "-e {errFile}".format(errFile=batchscripterrfile) + stdoutFile = "" + if self._sbatch_args.count("-o ") == 0: + stdoutFile = "-o {outFile}".format(outFile=batchscriptoutfile) + full_line = "{jobNm}=$(sbatch {outFileOption} {errFileOption} {extraSBatchArgs} {dependantIndex} -J {jobNm} {batchscript} | awk '/^Submitted/ {{print $4}}')\n".format( jobNm=jobname, outFileOption=stdoutFile, errFileOption=stderrFile, extraSBatchArgs=sbatch_args, dependantIndex=deps, - batchscript=batchscriptfile) + batchscript=batchscriptfile, + ) fp.writelines(full_line) cmd = CommandLine( - 'bash', + "bash", environ=dict(os.environ), resource_monitor=False, - terminal_output='allatonce') - cmd.inputs.args = '%s' % submitjobsfile + terminal_output="allatonce", + ) + cmd.inputs.args = "%s" % submitjobsfile cmd.run() - logger.info('submitted all jobs to queue') + logger.info("submitted all jobs to queue") diff --git a/nipype/pipeline/plugins/somaflow.py b/nipype/pipeline/plugins/somaflow.py index e31a901169..d621c7967a 100644 --- a/nipype/pipeline/plugins/somaflow.py +++ b/nipype/pipeline/plugins/somaflow.py @@ -5,12 +5,11 @@ import os import sys -from .base import (GraphPluginBase, logger) +from .base import GraphPluginBase, logger soma_not_loaded = False try: - from soma.workflow.client import (Job, Workflow, WorkflowController, - Helper) + from soma.workflow.client import Job, Workflow, WorkflowController, Helper except: soma_not_loaded = True @@ -21,7 +20,7 @@ class SomaFlowPlugin(GraphPluginBase): def __init__(self, plugin_args=None): if soma_not_loaded: - raise ImportError('SomaFlow could not be imported') + raise ImportError("SomaFlow could not be imported") super(SomaFlowPlugin, self).__init__(plugin_args=plugin_args) def _submit_graph(self, pyfiles, dependencies, nodes): @@ -35,9 +34,9 @@ def _submit_graph(self, pyfiles, dependencies, nodes): soma_deps.append((jobs[val], jobs[key])) wf = Workflow(jobs, soma_deps) - logger.info('serializing workflow') - Helper.serialize('workflow', wf) + logger.info("serializing workflow") + Helper.serialize("workflow", wf) controller = WorkflowController() - logger.info('submitting workflow') + logger.info("submitting workflow") wf_id = controller.submit_workflow(wf) Helper.wait_workflow(wf_id, controller) diff --git a/nipype/pipeline/plugins/tests/test_base.py b/nipype/pipeline/plugins/tests/test_base.py index cdc55b668b..fddcfa2368 100644 --- a/nipype/pipeline/plugins/tests/test_base.py +++ b/nipype/pipeline/plugins/tests/test_base.py @@ -14,7 +14,7 @@ def test_scipy_sparse(): assert foo[0, 1] == 0 -''' +""" Can use the following code to test that a mapnode crash continues successfully Need to put this into a unit-test with a timeout @@ -38,4 +38,4 @@ def func(arg1): wf.base_dir = '/tmp' wf.run(plugin='MultiProc') -''' +""" diff --git a/nipype/pipeline/plugins/tests/test_callback.py b/nipype/pipeline/plugins/tests/test_callback.py index b7eb827b74..8baa356fdd 100644 --- a/nipype/pipeline/plugins/tests/test_callback.py +++ b/nipype/pipeline/plugins/tests/test_callback.py @@ -25,42 +25,39 @@ def callback(self, node, status, result=None): self.statuses.append((node.name, status)) -@pytest.mark.parametrize("plugin", ['Linear', 'MultiProc', 'LegacyMultiProc']) +@pytest.mark.parametrize("plugin", ["Linear", "MultiProc", "LegacyMultiProc"]) def test_callback_normal(tmpdir, plugin): tmpdir.chdir() so = Status() - wf = pe.Workflow(name='test', base_dir=tmpdir.strpath) + wf = pe.Workflow(name="test", base_dir=tmpdir.strpath) f_node = pe.Node( - niu.Function(function=func, input_names=[], output_names=[]), - name='f_node') + niu.Function(function=func, input_names=[], output_names=[]), name="f_node" + ) wf.add_nodes([f_node]) - wf.config['execution'] = { - 'crashdump_dir': wf.base_dir, - 'poll_sleep_duration': 2 - } - wf.run(plugin=plugin, plugin_args={'status_callback': so.callback}) - assert so.statuses == [('f_node', 'start'), ('f_node', 'end')] + wf.config["execution"] = {"crashdump_dir": wf.base_dir, "poll_sleep_duration": 2} + wf.run(plugin=plugin, plugin_args={"status_callback": so.callback}) + assert so.statuses == [("f_node", "start"), ("f_node", "end")] -@pytest.mark.parametrize("plugin", ['Linear', 'MultiProc', 'LegacyMultiProc']) +@pytest.mark.parametrize("plugin", ["Linear", "MultiProc", "LegacyMultiProc"]) @pytest.mark.parametrize("stop_on_first_crash", [False, True]) def test_callback_exception(tmpdir, plugin, stop_on_first_crash): tmpdir.chdir() so = Status() - wf = pe.Workflow(name='test', base_dir=tmpdir.strpath) + wf = pe.Workflow(name="test", base_dir=tmpdir.strpath) f_node = pe.Node( - niu.Function(function=bad_func, input_names=[], output_names=[]), - name='f_node') + niu.Function(function=bad_func, input_names=[], output_names=[]), name="f_node" + ) wf.add_nodes([f_node]) - wf.config['execution'] = { - 'crashdump_dir': wf.base_dir, - 'stop_on_first_crash': stop_on_first_crash, - 'poll_sleep_duration': 2 + wf.config["execution"] = { + "crashdump_dir": wf.base_dir, + "stop_on_first_crash": stop_on_first_crash, + "poll_sleep_duration": 2, } with pytest.raises(Exception): - wf.run(plugin=plugin, plugin_args={'status_callback': so.callback}) + wf.run(plugin=plugin, plugin_args={"status_callback": so.callback}) sleep(0.5) # Wait for callback to be called (python 2.7) - assert so.statuses == [('f_node', 'start'), ('f_node', 'exception')] + assert so.statuses == [("f_node", "start"), ("f_node", "exception")] diff --git a/nipype/pipeline/plugins/tests/test_debug.py b/nipype/pipeline/plugins/tests/test_debug.py index bd06ecb775..82361a0228 100644 --- a/nipype/pipeline/plugins/tests/test_debug.py +++ b/nipype/pipeline/plugins/tests/test_debug.py @@ -7,12 +7,12 @@ class InputSpec(nib.TraitedSpec): - input1 = nib.traits.Int(desc='a random int') - input2 = nib.traits.Int(desc='a random int') + input1 = nib.traits.Int(desc="a random int") + input2 = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): - output1 = nib.traits.List(nib.traits.Int, desc='outputs') + output1 = nib.traits.List(nib.traits.Int, desc="outputs") class DebugTestInterface(nib.BaseInterface): @@ -25,7 +25,7 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['output1'] = [1, self.inputs.input1] + outputs["output1"] = [1, self.inputs.input1] return outputs @@ -36,11 +36,11 @@ def callme(node, graph): def test_debug(tmpdir): tmpdir.chdir() - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(DebugTestInterface(), name='mod1') - mod2 = pe.MapNode(DebugTestInterface(), iterfield=['input1'], name='mod2') + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(DebugTestInterface(), name="mod1") + mod2 = pe.MapNode(DebugTestInterface(), iterfield=["input1"], name="mod2") - pipe.connect([(mod1, mod2, [('output1', 'input1')])]) + pipe.connect([(mod1, mod2, [("output1", "input1")])]) pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 @@ -50,8 +50,8 @@ def test_debug(tmpdir): exc = None try: - pipe.run(plugin="Debug", plugin_args={'callable': callme}) + pipe.run(plugin="Debug", plugin_args={"callable": callme}) except Exception as e: exc = e - assert exc is None, 'unexpected exception caught' + assert exc is None, "unexpected exception caught" diff --git a/nipype/pipeline/plugins/tests/test_legacymultiproc_nondaemon.py b/nipype/pipeline/plugins/tests/test_legacymultiproc_nondaemon.py index 3c4e673f55..f490729485 100644 --- a/nipype/pipeline/plugins/tests/test_legacymultiproc_nondaemon.py +++ b/nipype/pipeline/plugins/tests/test_legacymultiproc_nondaemon.py @@ -15,9 +15,9 @@ def mytestFunction(insum=0): - ''' + """ Run a multiprocessing job and spawn child processes. - ''' + """ # need to import here since this is executed as an external process import multiprocessing @@ -37,16 +37,16 @@ def mytestFunction(insum=0): f = [None] * numberOfThreads def dummyFunction(filename): - ''' + """ This function writes the value 45 to the given filename. - ''' + """ j = 0 for i in range(0, 10): j += i # j is now 45 (0+1+2+3+4+5+6+7+8+9) - with open(filename, 'w') as f: + with open(filename, "w") as f: f.write(str(j)) for n in range(numberOfThreads): @@ -55,9 +55,9 @@ def dummyFunction(filename): a[n] = True # create a temp file to use as the data exchange container - tmpFile = tempfile.mkstemp('.txt', 'test_engine_')[1] + tmpFile = tempfile.mkstemp(".txt", "test_engine_")[1] f[n] = tmpFile # keep track of the temp file - t[n] = multiprocessing.Process(target=dummyFunction, args=(tmpFile, )) + t[n] = multiprocessing.Process(target=dummyFunction, args=(tmpFile,)) # fire up the job t[n].start() @@ -88,65 +88,61 @@ def dummyFunction(filename): def run_multiproc_nondaemon_with_flag(nondaemon_flag): - ''' + """ Start a pipe with two nodes using the resource multiproc plugin and passing the nondaemon_flag. - ''' + """ cur_dir = os.getcwd() - temp_dir = mkdtemp(prefix='test_engine_') + temp_dir = mkdtemp(prefix="test_engine_") os.chdir(temp_dir) - pipe = pe.Workflow(name='pipe') + pipe = pe.Workflow(name="pipe") f1 = pe.Node( interface=Function( - function=mytestFunction, - input_names=['insum'], - output_names=['sum_out']), - name='f1') + function=mytestFunction, input_names=["insum"], output_names=["sum_out"] + ), + name="f1", + ) f2 = pe.Node( interface=Function( - function=mytestFunction, - input_names=['insum'], - output_names=['sum_out']), - name='f2') + function=mytestFunction, input_names=["insum"], output_names=["sum_out"] + ), + name="f2", + ) - pipe.connect([(f1, f2, [('sum_out', 'insum')])]) + pipe.connect([(f1, f2, [("sum_out", "insum")])]) pipe.base_dir = os.getcwd() f1.inputs.insum = 0 - pipe.config['execution']['stop_on_first_crash'] = True + pipe.config["execution"]["stop_on_first_crash"] = True # execute the pipe using the LegacyMultiProc plugin with 2 processes and the # non_daemon flag to enable child processes which start other # multiprocessing jobs execgraph = pipe.run( plugin="LegacyMultiProc", - plugin_args={ - 'n_procs': 2, - 'non_daemon': nondaemon_flag - }) - - names = [ - '.'.join((node._hierarchy, node.name)) for node in execgraph.nodes() - ] - node = list(execgraph.nodes())[names.index('pipe.f2')] - result = node.get_output('sum_out') + plugin_args={"n_procs": 2, "non_daemon": nondaemon_flag}, + ) + + names = [".".join((node._hierarchy, node.name)) for node in execgraph.nodes()] + node = list(execgraph.nodes())[names.index("pipe.f2")] + result = node.get_output("sum_out") os.chdir(cur_dir) rmtree(temp_dir) return result def test_run_multiproc_nondaemon_false(): - ''' + """ This is the entry point for the test. Two times a pipe of several multiprocessing jobs gets executed. First, without the nondaemon flag. Second, with the nondaemon flag. Since the processes of the pipe start child processes, the execution only succeeds when the non_daemon flag is on. - ''' + """ shouldHaveFailed = False try: # with nondaemon_flag = False, the execution should fail diff --git a/nipype/pipeline/plugins/tests/test_linear.py b/nipype/pipeline/plugins/tests/test_linear.py index 6484432baa..9ccb5157fc 100644 --- a/nipype/pipeline/plugins/tests/test_linear.py +++ b/nipype/pipeline/plugins/tests/test_linear.py @@ -6,12 +6,12 @@ class InputSpec(nib.TraitedSpec): - input1 = nib.traits.Int(desc='a random int') - input2 = nib.traits.Int(desc='a random int') + input1 = nib.traits.Int(desc="a random int") + input2 = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): - output1 = nib.traits.List(nib.traits.Int, desc='outputs') + output1 = nib.traits.List(nib.traits.Int, desc="outputs") class LinearTestInterface(nib.BaseInterface): @@ -24,24 +24,23 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['output1'] = [1, self.inputs.input1] + outputs["output1"] = [1, self.inputs.input1] return outputs def test_run_in_series(tmpdir): tmpdir.chdir() - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=LinearTestInterface(), name='mod1') + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(interface=LinearTestInterface(), name="mod1") mod2 = pe.MapNode( - interface=LinearTestInterface(), iterfield=['input1'], name='mod2') - pipe.connect([(mod1, mod2, [('output1', 'input1')])]) + interface=LinearTestInterface(), iterfield=["input1"], name="mod2" + ) + pipe.connect([(mod1, mod2, [("output1", "input1")])]) pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 execgraph = pipe.run(plugin="Linear") - names = [ - '.'.join((node._hierarchy, node.name)) for node in execgraph.nodes() - ] - node = list(execgraph.nodes())[names.index('pipe.mod1')] - result = node.get_output('output1') + names = [".".join((node._hierarchy, node.name)) for node in execgraph.nodes()] + node = list(execgraph.nodes())[names.index("pipe.mod1")] + result = node.get_output("output1") assert result == [1, 1] diff --git a/nipype/pipeline/plugins/tests/test_multiproc.py b/nipype/pipeline/plugins/tests/test_multiproc.py index 7ba9001c39..2e8967cfbe 100644 --- a/nipype/pipeline/plugins/tests/test_multiproc.py +++ b/nipype/pipeline/plugins/tests/test_multiproc.py @@ -11,12 +11,12 @@ class InputSpec(nib.TraitedSpec): - input1 = nib.traits.Int(desc='a random int') - input2 = nib.traits.Int(desc='a random int') + input1 = nib.traits.Int(desc="a random int") + input2 = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): - output1 = nib.traits.List(nib.traits.Int, desc='outputs') + output1 = nib.traits.List(nib.traits.Int, desc="outputs") class MultiprocTestInterface(nib.BaseInterface): @@ -29,35 +29,34 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['output1'] = [1, self.inputs.input1] + outputs["output1"] = [1, self.inputs.input1] return outputs def test_run_multiproc(tmpdir): tmpdir.chdir() - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(MultiprocTestInterface(), name='mod1') - mod2 = pe.MapNode( - MultiprocTestInterface(), iterfield=['input1'], name='mod2') - pipe.connect([(mod1, mod2, [('output1', 'input1')])]) + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(MultiprocTestInterface(), name="mod1") + mod2 = pe.MapNode(MultiprocTestInterface(), iterfield=["input1"], name="mod2") + pipe.connect([(mod1, mod2, [("output1", "input1")])]) pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 - pipe.config['execution']['poll_sleep_duration'] = 2 + pipe.config["execution"]["poll_sleep_duration"] = 2 execgraph = pipe.run(plugin="MultiProc") names = [node.fullname for node in execgraph.nodes()] - node = list(execgraph.nodes())[names.index('pipe.mod1')] - result = node.get_output('output1') + node = list(execgraph.nodes())[names.index("pipe.mod1")] + result = node.get_output("output1") assert result == [1, 1] class InputSpecSingleNode(nib.TraitedSpec): - input1 = nib.traits.Int(desc='a random int') - input2 = nib.traits.Int(desc='a random int') + input1 = nib.traits.Int(desc="a random int") + input2 = nib.traits.Int(desc="a random int") class OutputSpecSingleNode(nib.TraitedSpec): - output1 = nib.traits.Int(desc='a random int') + output1 = nib.traits.Int(desc="a random int") class SingleNodeTestInterface(nib.BaseInterface): @@ -70,68 +69,65 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['output1'] = self.inputs.input1 + outputs["output1"] = self.inputs.input1 return outputs def test_no_more_memory_than_specified(tmpdir): tmpdir.chdir() - pipe = pe.Workflow(name='pipe') - n1 = pe.Node(SingleNodeTestInterface(), name='n1', mem_gb=1) - n2 = pe.Node(SingleNodeTestInterface(), name='n2', mem_gb=1) - n3 = pe.Node(SingleNodeTestInterface(), name='n3', mem_gb=1) - n4 = pe.Node(SingleNodeTestInterface(), name='n4', mem_gb=1) - - pipe.connect(n1, 'output1', n2, 'input1') - pipe.connect(n1, 'output1', n3, 'input1') - pipe.connect(n2, 'output1', n4, 'input1') - pipe.connect(n3, 'output1', n4, 'input2') + pipe = pe.Workflow(name="pipe") + n1 = pe.Node(SingleNodeTestInterface(), name="n1", mem_gb=1) + n2 = pe.Node(SingleNodeTestInterface(), name="n2", mem_gb=1) + n3 = pe.Node(SingleNodeTestInterface(), name="n3", mem_gb=1) + n4 = pe.Node(SingleNodeTestInterface(), name="n4", mem_gb=1) + + pipe.connect(n1, "output1", n2, "input1") + pipe.connect(n1, "output1", n3, "input1") + pipe.connect(n2, "output1", n4, "input1") + pipe.connect(n3, "output1", n4, "input2") n1.inputs.input1 = 1 max_memory = 0.5 with pytest.raises(RuntimeError): pipe.run( - plugin='MultiProc', - plugin_args={ - 'memory_gb': max_memory, - 'n_procs': 2 - }) + plugin="MultiProc", plugin_args={"memory_gb": max_memory, "n_procs": 2} + ) def test_no_more_threads_than_specified(tmpdir): tmpdir.chdir() - pipe = pe.Workflow(name='pipe') - n1 = pe.Node(SingleNodeTestInterface(), name='n1', n_procs=2) - n2 = pe.Node(SingleNodeTestInterface(), name='n2', n_procs=2) - n3 = pe.Node(SingleNodeTestInterface(), name='n3', n_procs=4) - n4 = pe.Node(SingleNodeTestInterface(), name='n4', n_procs=2) + pipe = pe.Workflow(name="pipe") + n1 = pe.Node(SingleNodeTestInterface(), name="n1", n_procs=2) + n2 = pe.Node(SingleNodeTestInterface(), name="n2", n_procs=2) + n3 = pe.Node(SingleNodeTestInterface(), name="n3", n_procs=4) + n4 = pe.Node(SingleNodeTestInterface(), name="n4", n_procs=2) - pipe.connect(n1, 'output1', n2, 'input1') - pipe.connect(n1, 'output1', n3, 'input1') - pipe.connect(n2, 'output1', n4, 'input1') - pipe.connect(n3, 'output1', n4, 'input2') + pipe.connect(n1, "output1", n2, "input1") + pipe.connect(n1, "output1", n3, "input1") + pipe.connect(n2, "output1", n4, "input1") + pipe.connect(n3, "output1", n4, "input2") n1.inputs.input1 = 4 max_threads = 2 with pytest.raises(RuntimeError): - pipe.run(plugin='MultiProc', plugin_args={'n_procs': max_threads}) + pipe.run(plugin="MultiProc", plugin_args={"n_procs": max_threads}) def test_hold_job_until_procs_available(tmpdir): tmpdir.chdir() - pipe = pe.Workflow(name='pipe') - n1 = pe.Node(SingleNodeTestInterface(), name='n1', n_procs=2) - n2 = pe.Node(SingleNodeTestInterface(), name='n2', n_procs=2) - n3 = pe.Node(SingleNodeTestInterface(), name='n3', n_procs=2) - n4 = pe.Node(SingleNodeTestInterface(), name='n4', n_procs=2) + pipe = pe.Workflow(name="pipe") + n1 = pe.Node(SingleNodeTestInterface(), name="n1", n_procs=2) + n2 = pe.Node(SingleNodeTestInterface(), name="n2", n_procs=2) + n3 = pe.Node(SingleNodeTestInterface(), name="n3", n_procs=2) + n4 = pe.Node(SingleNodeTestInterface(), name="n4", n_procs=2) - pipe.connect(n1, 'output1', n2, 'input1') - pipe.connect(n1, 'output1', n3, 'input1') - pipe.connect(n2, 'output1', n4, 'input1') - pipe.connect(n3, 'output1', n4, 'input2') + pipe.connect(n1, "output1", n2, "input1") + pipe.connect(n1, "output1", n3, "input1") + pipe.connect(n2, "output1", n4, "input1") + pipe.connect(n3, "output1", n4, "input2") n1.inputs.input1 = 4 max_threads = 2 - pipe.run(plugin='MultiProc', plugin_args={'n_procs': max_threads}) + pipe.run(plugin="MultiProc", plugin_args={"n_procs": max_threads}) diff --git a/nipype/pipeline/plugins/tests/test_oar.py b/nipype/pipeline/plugins/tests/test_oar.py index fd4f0b950c..1024daaef9 100644 --- a/nipype/pipeline/plugins/tests/test_oar.py +++ b/nipype/pipeline/plugins/tests/test_oar.py @@ -9,12 +9,12 @@ class InputSpec(nib.TraitedSpec): - input1 = nib.traits.Int(desc='a random int') - input2 = nib.traits.Int(desc='a random int') + input1 = nib.traits.Int(desc="a random int") + input2 = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): - output1 = nib.traits.List(nib.traits.Int, desc='outputs') + output1 = nib.traits.List(nib.traits.Int, desc="outputs") class OarTestInterface(nib.BaseInterface): @@ -27,29 +27,26 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['output1'] = [1, self.inputs.input1] + outputs["output1"] = [1, self.inputs.input1] return outputs @pytest.mark.xfail(reason="not known") def test_run_oar(): cur_dir = os.getcwd() - temp_dir = mkdtemp(prefix='test_engine_', dir=os.getcwd()) + temp_dir = mkdtemp(prefix="test_engine_", dir=os.getcwd()) os.chdir(temp_dir) - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=OarTestInterface(), name='mod1') - mod2 = pe.MapNode( - interface=OarTestInterface(), iterfield=['input1'], name='mod2') - pipe.connect([(mod1, mod2, [('output1', 'input1')])]) + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(interface=OarTestInterface(), name="mod1") + mod2 = pe.MapNode(interface=OarTestInterface(), iterfield=["input1"], name="mod2") + pipe.connect([(mod1, mod2, [("output1", "input1")])]) pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 execgraph = pipe.run(plugin="OAR") - names = [ - '.'.join((node._hierarchy, node.name)) for node in execgraph.nodes() - ] - node = list(execgraph.nodes())[names.index('pipe.mod1')] - result = node.get_output('output1') + names = [".".join((node._hierarchy, node.name)) for node in execgraph.nodes()] + node = list(execgraph.nodes())[names.index("pipe.mod1")] + result = node.get_output("output1") assert result == [1, 1] os.chdir(cur_dir) rmtree(temp_dir) diff --git a/nipype/pipeline/plugins/tests/test_pbs.py b/nipype/pipeline/plugins/tests/test_pbs.py index f6aa6c88e0..bb85443940 100644 --- a/nipype/pipeline/plugins/tests/test_pbs.py +++ b/nipype/pipeline/plugins/tests/test_pbs.py @@ -10,12 +10,12 @@ class InputSpec(nib.TraitedSpec): - input1 = nib.traits.Int(desc='a random int') - input2 = nib.traits.Int(desc='a random int') + input1 = nib.traits.Int(desc="a random int") + input2 = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): - output1 = nib.traits.List(nib.traits.Int, desc='outputs') + output1 = nib.traits.List(nib.traits.Int, desc="outputs") class PbsTestInterface(nib.BaseInterface): @@ -28,29 +28,26 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['output1'] = [1, self.inputs.input1] + outputs["output1"] = [1, self.inputs.input1] return outputs @pytest.mark.xfail(reason="not known") def test_run_pbsgraph(): cur_dir = os.getcwd() - temp_dir = mkdtemp(prefix='test_engine_') + temp_dir = mkdtemp(prefix="test_engine_") os.chdir(temp_dir) - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=PbsTestInterface(), name='mod1') - mod2 = pe.MapNode( - interface=PbsTestInterface(), iterfield=['input1'], name='mod2') - pipe.connect([(mod1, mod2, [('output1', 'input1')])]) + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(interface=PbsTestInterface(), name="mod1") + mod2 = pe.MapNode(interface=PbsTestInterface(), iterfield=["input1"], name="mod2") + pipe.connect([(mod1, mod2, [("output1", "input1")])]) pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 execgraph = pipe.run(plugin="PBSGraph") - names = [ - '.'.join((node._hierarchy, node.name)) for node in execgraph.nodes() - ] - node = list(execgraph.nodes())[names.index('pipe.mod1')] - result = node.get_output('output1') + names = [".".join((node._hierarchy, node.name)) for node in execgraph.nodes()] + node = list(execgraph.nodes())[names.index("pipe.mod1")] + result = node.get_output("output1") assert result == [1, 1] os.chdir(cur_dir) rmtree(temp_dir) diff --git a/nipype/pipeline/plugins/tests/test_somaflow.py b/nipype/pipeline/plugins/tests/test_somaflow.py index 68cefcdc17..5fe5935e1d 100644 --- a/nipype/pipeline/plugins/tests/test_somaflow.py +++ b/nipype/pipeline/plugins/tests/test_somaflow.py @@ -10,12 +10,12 @@ class InputSpec(nib.TraitedSpec): - input1 = nib.traits.Int(desc='a random int') - input2 = nib.traits.Int(desc='a random int') + input1 = nib.traits.Int(desc="a random int") + input2 = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): - output1 = nib.traits.List(nib.traits.Int, desc='outputs') + output1 = nib.traits.List(nib.traits.Int, desc="outputs") class SomaTestInterface(nib.BaseInterface): @@ -28,7 +28,7 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - outputs['output1'] = [1, self.inputs.input1] + outputs["output1"] = [1, self.inputs.input1] return outputs @@ -36,17 +36,14 @@ def _list_outputs(self): def test_run_somaflow(tmpdir): tmpdir.chdir() - pipe = pe.Workflow(name='pipe') - mod1 = pe.Node(interface=SomaTestInterface(), name='mod1') - mod2 = pe.MapNode( - interface=SomaTestInterface(), iterfield=['input1'], name='mod2') - pipe.connect([(mod1, mod2, [('output1', 'input1')])]) + pipe = pe.Workflow(name="pipe") + mod1 = pe.Node(interface=SomaTestInterface(), name="mod1") + mod2 = pe.MapNode(interface=SomaTestInterface(), iterfield=["input1"], name="mod2") + pipe.connect([(mod1, mod2, [("output1", "input1")])]) pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 execgraph = pipe.run(plugin="SomaFlow") - names = [ - '.'.join((node._hierarchy, node.name)) for node in execgraph.nodes() - ] - node = list(execgraph.nodes())[names.index('pipe.mod1')] - result = node.get_output('output1') + names = [".".join((node._hierarchy, node.name)) for node in execgraph.nodes()] + node = list(execgraph.nodes())[names.index("pipe.mod1")] + result = node.get_output("output1") assert result == [1, 1] diff --git a/nipype/pipeline/plugins/tests/test_tools.py b/nipype/pipeline/plugins/tests/test_tools.py index ee9b2e4de7..e34c937fc1 100644 --- a/nipype/pipeline/plugins/tests/test_tools.py +++ b/nipype/pipeline/plugins/tests/test_tools.py @@ -13,30 +13,28 @@ def test_report_crash(): - with mock.patch('pickle.dump', mock.MagicMock()) as mock_pickle_dump: - with mock.patch('nipype.pipeline.plugins.tools.format_exception', - mock.MagicMock()): # see iss 1517 + with mock.patch("pickle.dump", mock.MagicMock()) as mock_pickle_dump: + with mock.patch( + "nipype.pipeline.plugins.tools.format_exception", mock.MagicMock() + ): # see iss 1517 mock_pickle_dump.return_value = True - mock_node = mock.MagicMock(name='mock_node') - mock_node._id = 'an_id' + mock_node = mock.MagicMock(name="mock_node") + mock_node._id = "an_id" mock_node.config = { - 'execution': { - 'crashdump_dir': '.', - 'crashfile_format': 'pklz', - } + "execution": {"crashdump_dir": ".", "crashfile_format": "pklz",} } actual_crashfile = report_crash(mock_node) - expected_crashfile = re.compile( - '.*/crash-.*-an_id-[0-9a-f\-]*.pklz') + expected_crashfile = re.compile(".*/crash-.*-an_id-[0-9a-f\-]*.pklz") - assert expected_crashfile.match( - actual_crashfile).group() == actual_crashfile + assert ( + expected_crashfile.match(actual_crashfile).group() == actual_crashfile + ) assert mock_pickle_dump.call_count == 1 -''' +""" Can use the following code to test that a mapnode crash continues successfully Need to put this into a unit-test with a timeout @@ -60,4 +58,4 @@ def func(arg1): wf.base_dir = '/tmp' wf.run(plugin='MultiProc') -''' +""" diff --git a/nipype/pipeline/plugins/tools.py b/nipype/pipeline/plugins/tools.py index c06d5eea63..2bb31de564 100644 --- a/nipype/pipeline/plugins/tools.py +++ b/nipype/pipeline/plugins/tools.py @@ -14,7 +14,7 @@ from ... import logging from ...utils.filemanip import savepkl, crash2txt -logger = logging.getLogger('nipype.workflow') +logger = logging.getLogger("nipype.workflow") def report_crash(node, traceback=None, hostname=None): @@ -30,14 +30,20 @@ def report_crash(node, traceback=None, hostname=None): traceback += """ When creating this crashfile, the results file corresponding -to the node could not be found.""".splitlines(keepends=True) +to the node could not be found.""".splitlines( + keepends=True + ) except Exception as exc: traceback += """ During the creation of this crashfile triggered by the above exception, -another exception occurred:\n\n{}.""".format(exc).splitlines(keepends=True) +another exception occurred:\n\n{}.""".format( + exc + ).splitlines( + keepends=True + ) else: - if getattr(result, 'runtime', None): + if getattr(result, "runtime", None): if isinstance(result.runtime, list): host = result.runtime[0].hostname else: @@ -45,30 +51,28 @@ def report_crash(node, traceback=None, hostname=None): # Try everything to fill in the host host = host or hostname or gethostname() - logger.error('Node %s failed to run on host %s.', name, host) - timeofcrash = strftime('%Y%m%d-%H%M%S') + logger.error("Node %s failed to run on host %s.", name, host) + timeofcrash = strftime("%Y%m%d-%H%M%S") try: login_name = getpass.getuser() except KeyError: - login_name = 'UID{:d}'.format(os.getuid()) - crashfile = 'crash-%s-%s-%s-%s' % (timeofcrash, login_name, name, - str(uuid.uuid4())) - crashdir = node.config['execution'].get('crashdump_dir', os.getcwd()) + login_name = "UID{:d}".format(os.getuid()) + crashfile = "crash-%s-%s-%s-%s" % (timeofcrash, login_name, name, str(uuid.uuid4())) + crashdir = node.config["execution"].get("crashdump_dir", os.getcwd()) os.makedirs(crashdir, exist_ok=True) crashfile = os.path.join(crashdir, crashfile) - if node.config['execution']['crashfile_format'].lower() in ('text', 'txt', '.txt'): - crashfile += '.txt' + if node.config["execution"]["crashfile_format"].lower() in ("text", "txt", ".txt"): + crashfile += ".txt" else: - crashfile += '.pklz' + crashfile += ".pklz" - logger.error('Saving crash info to %s\n%s', crashfile, ''.join(traceback)) - if crashfile.endswith('.txt'): + logger.error("Saving crash info to %s\n%s", crashfile, "".join(traceback)) + if crashfile.endswith(".txt"): crash2txt(crashfile, dict(node=node, traceback=traceback)) else: - savepkl(crashfile, dict(node=node, traceback=traceback), - versioning=True) + savepkl(crashfile, dict(node=node, traceback=traceback), versioning=True) return crashfile @@ -81,30 +85,32 @@ def report_nodes_not_run(notrun): if notrun: logger.info("***********************************") for info in notrun: - logger.error("could not run node: %s" % '.'.join( - (info['node']._hierarchy, info['node']._id))) - logger.info("crashfile: %s" % info['crashfile']) + logger.error( + "could not run node: %s" + % ".".join((info["node"]._hierarchy, info["node"]._id)) + ) + logger.info("crashfile: %s" % info["crashfile"]) logger.debug("The following dependent nodes were not run") - for subnode in info['dependents']: + for subnode in info["dependents"]: logger.debug(subnode._id) logger.info("***********************************") - raise RuntimeError(('Workflow did not execute cleanly. ' - 'Check log for details')) + raise RuntimeError( + ("Workflow did not execute cleanly. " "Check log for details") + ) def create_pyscript(node, updatehash=False, store_exception=True): # pickle node - timestamp = strftime('%Y%m%d_%H%M%S') + timestamp = strftime("%Y%m%d_%H%M%S") if node._hierarchy: - suffix = '%s_%s_%s' % (timestamp, node._hierarchy, node._id) - batch_dir = os.path.join(node.base_dir, - node._hierarchy.split('.')[0], 'batch') + suffix = "%s_%s_%s" % (timestamp, node._hierarchy, node._id) + batch_dir = os.path.join(node.base_dir, node._hierarchy.split(".")[0], "batch") else: - suffix = '%s_%s' % (timestamp, node._id) - batch_dir = os.path.join(node.base_dir, 'batch') + suffix = "%s_%s" % (timestamp, node._id) + batch_dir = os.path.join(node.base_dir, "batch") if not os.path.exists(batch_dir): os.makedirs(batch_dir) - pkl_file = os.path.join(batch_dir, 'node_%s.pklz' % suffix) + pkl_file = os.path.join(batch_dir, "node_%s.pklz" % suffix) savepkl(pkl_file, dict(node=node, updatehash=updatehash)) mpl_backend = node.config["execution"]["matplotlib_backend"] # create python script to load and trap exception @@ -167,7 +173,7 @@ def create_pyscript(node, updatehash=False, store_exception=True): raise Exception(e) """ cmdstr = cmdstr % (mpl_backend, pkl_file, batch_dir, node.config, suffix) - pyscript = os.path.join(batch_dir, 'pyscript_%s.py' % suffix) - with open(pyscript, 'wt') as fp: + pyscript = os.path.join(batch_dir, "pyscript_%s.py" % suffix) + with open(pyscript, "wt") as fp: fp.writelines(cmdstr) return pyscript diff --git a/nipype/pkg_info.py b/nipype/pkg_info.py index 7367e4d8e5..fbba2f138c 100644 --- a/nipype/pkg_info.py +++ b/nipype/pkg_info.py @@ -5,11 +5,11 @@ import sys import subprocess -COMMIT_INFO_FNAME = 'COMMIT_INFO.txt' +COMMIT_INFO_FNAME = "COMMIT_INFO.txt" def pkg_commit_hash(pkg_path): - ''' Get short form of commit hash given directory `pkg_path` + """ Get short form of commit hash given directory `pkg_path` There should be a file called 'COMMIT_INFO.txt' in `pkg_path`. This is a file in INI file format, with at least one section: ``commit hash`` and two @@ -37,35 +37,36 @@ def pkg_commit_hash(pkg_path): Where we got the hash from - description hash_str : str short form of hash - ''' + """ # Try and get commit from written commit text file pth = os.path.join(pkg_path, COMMIT_INFO_FNAME) if not os.path.isfile(pth): - raise IOError('Missing commit info file %s' % pth) + raise IOError("Missing commit info file %s" % pth) cfg_parser = configparser.RawConfigParser() - with open(pth, encoding='utf-8') as fp: + with open(pth, encoding="utf-8") as fp: cfg_parser.read_file(fp) - archive_subst = cfg_parser.get('commit hash', 'archive_subst_hash') - if not archive_subst.startswith('$Format'): # it has been substituted - return 'archive substitution', archive_subst - install_subst = cfg_parser.get('commit hash', 'install_hash') - if install_subst != '': - return 'installation', install_subst + archive_subst = cfg_parser.get("commit hash", "archive_subst_hash") + if not archive_subst.startswith("$Format"): # it has been substituted + return "archive substitution", archive_subst + install_subst = cfg_parser.get("commit hash", "install_hash") + if install_subst != "": + return "installation", install_subst # maybe we are in a repository proc = subprocess.Popen( - 'git rev-parse --short HEAD', + "git rev-parse --short HEAD", stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=pkg_path, - shell=True) + shell=True, + ) repo_commit, _ = proc.communicate() if repo_commit: - return 'repository', repo_commit.decode().strip() - return '(none found)', '' + return "repository", repo_commit.decode().strip() + return "(none found)", "" def get_pkg_info(pkg_path): - ''' Return dict describing the context of this package + """ Return dict describing the context of this package Parameters ---------- @@ -76,7 +77,7 @@ def get_pkg_info(pkg_path): ------- context : dict with named parameters of interest - ''' + """ src, hsh = pkg_commit_hash(pkg_path) from .info import VERSION import networkx @@ -84,6 +85,7 @@ def get_pkg_info(pkg_path): import numpy import scipy import traits + return dict( pkg_path=pkg_path, commit_source=src, @@ -96,4 +98,5 @@ def get_pkg_info(pkg_path): scipy_version=scipy.__version__, networkx_version=networkx.__version__, nibabel_version=nibabel.__version__, - traits_version=traits.__version__) + traits_version=traits.__version__, + ) diff --git a/nipype/scripts/cli.py b/nipype/scripts/cli.py index ae21a789cb..73b599a978 100644 --- a/nipype/scripts/cli.py +++ b/nipype/scripts/cli.py @@ -25,13 +25,14 @@ def cli(): @cli.command(context_settings=CONTEXT_SETTINGS) -@click.argument('logdir', type=ExistingDirPath, callback=check_not_none) +@click.argument("logdir", type=ExistingDirPath, callback=check_not_none) @click.option( - '-r', - '--regex', + "-r", + "--regex", type=RegularExpression(), callback=check_not_none, - help='Regular expression to be searched in each traceback.') + help="Regular expression to be searched in each traceback.", +) def search(logdir, regex): """Search for tracebacks content. @@ -52,26 +53,27 @@ def search(logdir, regex): @cli.command(context_settings=CONTEXT_SETTINGS) -@click.argument('crashfile', type=ExistingFilePath, callback=check_not_none) +@click.argument("crashfile", type=ExistingFilePath, callback=check_not_none) @click.option( - '-r', '--rerun', is_flag=True, flag_value=True, help='Rerun crashed node.') + "-r", "--rerun", is_flag=True, flag_value=True, help="Rerun crashed node." +) @click.option( - '-d', - '--debug', + "-d", + "--debug", is_flag=True, flag_value=True, - help='Enable Python debugger when re-executing.') + help="Enable Python debugger when re-executing.", +) @click.option( - '-i', - '--ipydebug', + "-i", + "--ipydebug", is_flag=True, flag_value=True, - help='Enable IPython debugger when re-executing.') + help="Enable IPython debugger when re-executing.", +) @click.option( - '-w', - '--dir', - type=ExistingDirPath, - help='Directory where to run the node in.') + "-w", "--dir", type=ExistingDirPath, help="Directory where to run the node in." +) def crash(crashfile, rerun, debug, ipydebug, dir): """Display Nipype crash files. @@ -83,17 +85,19 @@ def crash(crashfile, rerun, debug, ipydebug, dir): """ from .crash_files import display_crash_file - debug = 'ipython' if ipydebug else debug - if debug == 'ipython': + debug = "ipython" if ipydebug else debug + if debug == "ipython": import sys from IPython.core import ultratb + sys.excepthook = ultratb.FormattedTB( - mode='Verbose', color_scheme='Linux', call_pdb=1) + mode="Verbose", color_scheme="Linux", call_pdb=1 + ) display_crash_file(crashfile, rerun, debug, dir) @cli.command(context_settings=CONTEXT_SETTINGS) -@click.argument('pklz_file', type=ExistingFilePath, callback=check_not_none) +@click.argument("pklz_file", type=ExistingFilePath, callback=check_not_none) def show(pklz_file): """Print the content of Nipype node .pklz file. @@ -108,20 +112,17 @@ def show(pklz_file): @cli.command(context_settings=UNKNOWN_OPTIONS) -@click.argument( - 'module', type=PythonModule(), required=False, callback=check_not_none) -@click.argument('interface', type=str, required=False) +@click.argument("module", type=PythonModule(), required=False, callback=check_not_none) +@click.argument("interface", type=str, required=False) @click.option( - '--list', + "--list", is_flag=True, flag_value=True, - help='List the available Interfaces inside the given module.') + help="List the available Interfaces inside the given module.", +) @click.option( - '-h', - '--help', - is_flag=True, - flag_value=True, - help='Show help message and exit.') + "-h", "--help", is_flag=True, flag_value=True, help="Show help message and exit." +) @click.pass_context def run(ctx, module, interface, list, help): """Run a Nipype Interface. @@ -142,18 +143,16 @@ def run(ctx, module, interface, list, help): # print the list of available interfaces for the given module elif (module_given and list) or (module_given and not interface): iface_names = list_interfaces(module) - click.echo('Available Interfaces:') + click.echo("Available Interfaces:") for if_name in iface_names: - click.echo(' {}'.format(if_name)) + click.echo(" {}".format(if_name)) # check the interface - elif (module_given and interface): + elif module_given and interface: # create the argument parser description = "Run {}".format(interface) - prog = " ".join( - [ctx.command_path, module.__name__, interface] + ctx.args) - iface_parser = argparse.ArgumentParser( - description=description, prog=prog) + prog = " ".join([ctx.command_path, module.__name__, interface] + ctx.args) + iface_parser = argparse.ArgumentParser(description=description, prog=prog) # instantiate the interface node = getattr(module, interface)() @@ -164,8 +163,10 @@ def run(ctx, module, interface, list, help): try: iface_parser.print_help() except: - print('An error ocurred when trying to print the full' - 'command help, printing usage.') + print( + "An error ocurred when trying to print the full" + "command help, printing usage." + ) finally: iface_parser.print_usage() else: @@ -192,65 +193,77 @@ def convert(): "--interface", type=str, required=True, - help="Name of the Nipype interface to export.") + help="Name of the Nipype interface to export.", +) @click.option( "-m", "--module", type=PythonModule(), required=True, callback=check_not_none, - help="Module where the interface is defined.") + help="Module where the interface is defined.", +) @click.option( "-o", "--output", type=UnexistingFilePath, required=True, callback=check_not_none, - help="JSON file name where the Boutiques descriptor will be " - "written.") + help="JSON file name where the Boutiques descriptor will be " "written.", +) @click.option( "-c", "--container-image", required=True, type=str, - help="Name of the container image where the tool is installed.") + help="Name of the container image where the tool is installed.", +) @click.option( "-p", "--container-type", required=True, type=str, - help="Type of container image (Docker or Singularity).") + help="Type of container image (Docker or Singularity).", +) @click.option( "-x", "--container-index", type=str, help="Optional index where the image is available (e.g. " - "http://index.docker.io).") + "http://index.docker.io).", +) @click.option( "-g", "--ignore-inputs", type=str, multiple=True, - help="List of interface inputs to not include in the descriptor.") + help="List of interface inputs to not include in the descriptor.", +) @click.option( - "-v", - "--verbose", - is_flag=True, - flag_value=True, - help="Print information messages.") + "-v", "--verbose", is_flag=True, flag_value=True, help="Print information messages." +) @click.option( - "-a", - "--author", - type=str, - help="Author of the tool (required for publishing).") + "-a", "--author", type=str, help="Author of the tool (required for publishing)." +) @click.option( "-t", "--tags", type=str, help="JSON string containing tags to include in the descriptor," - "e.g. \"{\"key1\": \"value1\"}\"") -def boutiques(module, interface, container_image, container_type, output, - container_index, verbose, author, ignore_inputs, tags): + 'e.g. "{"key1": "value1"}"', +) +def boutiques( + module, + interface, + container_image, + container_type, + output, + container_index, + verbose, + author, + ignore_inputs, + tags, +): """Nipype to Boutiques exporter. See Boutiques specification at https://github.com/boutiques/schema. @@ -259,5 +272,15 @@ def boutiques(module, interface, container_image, container_type, output, # Generates JSON string and saves it to file generate_boutiques_descriptor( - module, interface, container_image, container_type, container_index, - verbose, True, output, author, ignore_inputs, tags) + module, + interface, + container_image, + container_type, + container_index, + verbose, + True, + output, + author, + ignore_inputs, + tags, + ) diff --git a/nipype/scripts/crash_files.py b/nipype/scripts/crash_files.py index b7b83dff5c..84464ccddb 100644 --- a/nipype/scripts/crash_files.py +++ b/nipype/scripts/crash_files.py @@ -18,7 +18,7 @@ def load_pklz_traceback(crash_filepath): except: raise else: - return '\n'.join(data['traceback']) + return "\n".join(data["traceback"]) def iter_tracebacks(logdir): @@ -38,7 +38,7 @@ def iter_tracebacks(logdir): traceback: str """ - crash_files = sorted(glob(op.join(logdir, '*.pkl*'))) + crash_files = sorted(glob(op.join(logdir, "*.pkl*"))) for cf in crash_files: yield cf, load_pklz_traceback(cf) @@ -50,9 +50,9 @@ def display_crash_file(crashfile, rerun, debug, directory): crash_data = loadcrash(crashfile) node = None - if 'node' in crash_data: - node = crash_data['node'] - tb = crash_data['traceback'] + if "node" in crash_data: + node = crash_data["node"] + tb = crash_data["traceback"] print("\n") print("File: %s" % crashfile) @@ -67,7 +67,7 @@ def display_crash_file(crashfile, rerun, debug, directory): print(node.inputs) print("\n") print("Traceback: ") - print(''.join(tb)) + print("".join(tb)) print("\n") if rerun: @@ -76,12 +76,13 @@ def display_crash_file(crashfile, rerun, debug, directory): return print("Rerunning node") node.base_dir = directory - node.config = {'execution': {'crashdump_dir': '/tmp'}} + node.config = {"execution": {"crashdump_dir": "/tmp"}} try: node.run() except: - if debug and debug != 'ipython': + if debug and debug != "ipython": import pdb + pdb.post_mortem() else: raise diff --git a/nipype/scripts/instance.py b/nipype/scripts/instance.py index 1f44a43bda..0d736de796 100644 --- a/nipype/scripts/instance.py +++ b/nipype/scripts/instance.py @@ -29,8 +29,7 @@ def import_module(module_path): try: mod = importlib.import_module(module_path) except: - raise ImportError( - 'Error when importing object {}.'.format(module_path)) + raise ImportError("Error when importing object {}.".format(module_path)) else: return mod diff --git a/nipype/scripts/utils.py b/nipype/scripts/utils.py index 0315bfd64e..28e11cd8f6 100644 --- a/nipype/scripts/utils.py +++ b/nipype/scripts/utils.py @@ -4,7 +4,6 @@ """ - import re import click import json @@ -14,7 +13,7 @@ from ..interfaces.base.support import get_trait_desc # different context options -CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help']) +CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) UNKNOWN_OPTIONS = dict(allow_extra_args=True, ignore_unknown_options=True) # specification of existing ParamTypes @@ -26,32 +25,31 @@ # validators def check_not_none(ctx, param, value): if value is None: - raise click.BadParameter('got {}.'.format(value)) + raise click.BadParameter("got {}.".format(value)) return value # declare custom click.ParamType class RegularExpression(click.ParamType): - name = 'regex' + name = "regex" def convert(self, value, param, ctx): try: rex = re.compile(value, re.IGNORECASE) except ValueError: - self.fail('%s is not a valid regular expression.' % value, param, - ctx) + self.fail("%s is not a valid regular expression." % value, param, ctx) else: return rex class PythonModule(click.ParamType): - name = 'Python module path' + name = "Python module path" def convert(self, value, param, ctx): try: module = import_module(value) except ValueError: - self.fail('%s is not a valid Python module.' % value, param, ctx) + self.fail("%s is not a valid Python module." % value, param, ctx) else: return module @@ -60,15 +58,15 @@ def add_args_options(arg_parser, interface): """Add arguments to `arg_parser` to create a CLI for `interface`.""" inputs = interface.input_spec() for name, spec in sorted(interface.inputs.traits(transient=None).items()): - desc = "\n".join(get_trait_desc(inputs, name, spec))[len(name) + 2:] + desc = "\n".join(get_trait_desc(inputs, name, spec))[len(name) + 2 :] # Escape any % signs with a % - desc = desc.replace('%', '%%') + desc = desc.replace("%", "%%") args = {} has_multiple_inner_traits = False if spec.is_trait_type(traits.Bool): args["default"] = getattr(inputs, name) - args["action"] = 'store_true' + args["action"] = "store_true" # current support is for simple trait types if not spec.inner_traits: @@ -93,8 +91,9 @@ def add_args_options(arg_parser, interface): if spec.is_trait_type(InputMultiPath): args["nargs"] = "+" elif spec.is_trait_type(traits.List): - if (spec.trait_type.minlen == spec.trait_type.maxlen) and \ - spec.trait_type.maxlen: + if ( + spec.trait_type.minlen == spec.trait_type.maxlen + ) and spec.trait_type.maxlen: args["nargs"] = spec.trait_type.maxlen else: args["nargs"] = "+" @@ -103,22 +102,25 @@ def add_args_options(arg_parser, interface): if has_multiple_inner_traits: raise NotImplementedError( - ('This interface cannot be used. via the' - ' command line as multiple inner traits' - ' are currently not supported for mandatory' - ' argument: {}.'.format(name))) + ( + "This interface cannot be used. via the" + " command line as multiple inner traits" + " are currently not supported for mandatory" + " argument: {}.".format(name) + ) + ) arg_parser.add_argument(name, help=desc, **args) else: if spec.is_trait_type(InputMultiPath): args["nargs"] = "*" elif spec.is_trait_type(traits.List): - if (spec.trait_type.minlen == spec.trait_type.maxlen) and \ - spec.trait_type.maxlen: + if ( + spec.trait_type.minlen == spec.trait_type.maxlen + ) and spec.trait_type.maxlen: args["nargs"] = spec.trait_type.maxlen else: args["nargs"] = "*" if not has_multiple_inner_traits: - arg_parser.add_argument( - "--%s" % name, dest=name, help=desc, **args) + arg_parser.add_argument("--%s" % name, dest=name, help=desc, **args) return arg_parser diff --git a/nipype/sphinxext/plot_workflow.py b/nipype/sphinxext/plot_workflow.py index 740c121926..832e163b2f 100644 --- a/nipype/sphinxext/plot_workflow.py +++ b/nipype/sphinxext/plot_workflow.py @@ -120,6 +120,7 @@ try: from docutils.parsers.rst import directives from docutils.parsers.rst.directives.images import Image + align = Image.align except ImportError as e: missing_imports = [str(e)] @@ -130,6 +131,8 @@ def format_template(template, **kw): return jinja2.Template(template).render(**kw) + + except ImportError as e: missing_imports.append(str(e)) try: @@ -143,13 +146,21 @@ def format_template(template, **kw): missing_imports.append(str(e)) - -def wf_directive(name, arguments, options, content, lineno, content_offset, - block_text, state, state_machine): +def wf_directive( + name, + arguments, + options, + content, + lineno, + content_offset, + block_text, + state, + state_machine, +): if len(missing_imports) == 0: return run(arguments, content, options, state_machine, state, lineno) else: - raise ImportError('\n'.join(missing_imports)) + raise ImportError("\n".join(missing_imports)) wf_directive.__doc__ = __doc__ @@ -159,32 +170,32 @@ def _option_boolean(arg): if not arg or not arg.strip(): # no argument given, assume used as a flag return True - elif arg.strip().lower() in ('no', '0', 'false'): + elif arg.strip().lower() in ("no", "0", "false"): return False - elif arg.strip().lower() in ('yes', '1', 'true'): + elif arg.strip().lower() in ("yes", "1", "true"): return True else: raise ValueError('"%s" unknown boolean' % arg) def _option_graph2use(arg): - return directives.choice( - arg, ('hierarchical', 'colored', 'flat', 'orig', 'exec')) + return directives.choice(arg, ("hierarchical", "colored", "flat", "orig", "exec")) def _option_context(arg): - if arg in [None, 'reset', 'close-figs']: + if arg in [None, "reset", "close-figs"]: return arg raise ValueError("argument should be None or 'reset' or 'close-figs'") def _option_format(arg): - return directives.choice(arg, ('python', 'doctest')) + return directives.choice(arg, ("python", "doctest")) def _option_align(arg): return directives.choice( - arg, ("top", "middle", "bottom", "left", "center", "right")) + arg, ("top", "middle", "bottom", "left", "center", "right") + ) def mark_wf_labels(app, document): @@ -200,21 +211,24 @@ def mark_wf_labels(app, document): if labelid is None: continue node = document.ids[labelid] - if node.tagname in ('html_only', 'latex_only'): + if node.tagname in ("html_only", "latex_only"): for n in node: - if n.tagname == 'figure': + if n.tagname == "figure": sectname = name for c in n: - if c.tagname == 'caption': + if c.tagname == "caption": sectname = c.astext() break - node['ids'].remove(labelid) - node['names'].remove(name) - n['ids'].append(labelid) - n['names'].append(name) - document.settings.env.labels[name] = \ - document.settings.env.docname, labelid, sectname + node["ids"].remove(labelid) + node["names"].remove(name) + n["ids"].append(labelid) + n["names"].append(name) + document.settings.env.labels[name] = ( + document.settings.env.docname, + labelid, + sectname, + ) break @@ -224,38 +238,38 @@ def setup(app): setup.confdir = app.confdir options = { - 'alt': directives.unchanged, - 'height': directives.length_or_unitless, - 'width': directives.length_or_percentage_or_unitless, - 'scale': directives.nonnegative_int, - 'align': _option_align, - 'class': directives.class_option, - 'include-source': _option_boolean, - 'format': _option_format, - 'context': _option_context, - 'nofigs': directives.flag, - 'encoding': directives.encoding, - 'graph2use': _option_graph2use, - 'simple_form': _option_boolean + "alt": directives.unchanged, + "height": directives.length_or_unitless, + "width": directives.length_or_percentage_or_unitless, + "scale": directives.nonnegative_int, + "align": _option_align, + "class": directives.class_option, + "include-source": _option_boolean, + "format": _option_format, + "context": _option_context, + "nofigs": directives.flag, + "encoding": directives.encoding, + "graph2use": _option_graph2use, + "simple_form": _option_boolean, } - app.add_directive('workflow', wf_directive, True, (0, 2, False), **options) - app.add_config_value('graph2use', 'hierarchical', 'html') - app.add_config_value('simple_form', True, 'html') - app.add_config_value('wf_pre_code', None, True) - app.add_config_value('wf_include_source', False, True) - app.add_config_value('wf_html_show_source_link', True, True) - app.add_config_value('wf_formats', ['png', 'svg', 'pdf'], True) - app.add_config_value('wf_basedir', None, True) - app.add_config_value('wf_html_show_formats', True, True) - app.add_config_value('wf_rcparams', {}, True) - app.add_config_value('wf_apply_rcparams', False, True) - app.add_config_value('wf_working_directory', None, True) - app.add_config_value('wf_template', None, True) - - app.connect('doctree-read', mark_wf_labels) - - metadata = {'parallel_read_safe': True, 'parallel_write_safe': True} + app.add_directive("workflow", wf_directive, True, (0, 2, False), **options) + app.add_config_value("graph2use", "hierarchical", "html") + app.add_config_value("simple_form", True, "html") + app.add_config_value("wf_pre_code", None, True) + app.add_config_value("wf_include_source", False, True) + app.add_config_value("wf_html_show_source_link", True, True) + app.add_config_value("wf_formats", ["png", "svg", "pdf"], True) + app.add_config_value("wf_basedir", None, True) + app.add_config_value("wf_html_show_formats", True, True) + app.add_config_value("wf_rcparams", {}, True) + app.add_config_value("wf_apply_rcparams", False, True) + app.add_config_value("wf_working_directory", None, True) + app.add_config_value("wf_template", None, True) + + app.connect("doctree-read", mark_wf_labels) + + metadata = {"parallel_read_safe": True, "parallel_write_safe": True} return metadata @@ -267,11 +281,11 @@ def setup(app): def contains_doctest(text): try: # check if it's valid Python as-is - compile(text, '', 'exec') + compile(text, "", "exec") return False except SyntaxError: pass - r = re.compile(r'^\s*>>>', re.M) + r = re.compile(r"^\s*>>>", re.M) m = r.search(text) return bool(m) @@ -286,7 +300,7 @@ def unescape_doctest(text): code = "" for line in text.split("\n"): - m = re.match(r'^\s*(>>>|\.\.\.) (.*)$', line) + m = re.match(r"^\s*(>>>|\.\.\.) (.*)$", line) if m: code += m.group(2) + "\n" elif line.strip(): @@ -389,9 +403,10 @@ def out_of_date(original, derived): Returns True if derivative is out-of-date wrt original, both of which are full file paths. """ - return (not os.path.exists(derived) - or (os.path.exists(original) - and os.stat(derived).st_mtime < os.stat(original).st_mtime)) + return not os.path.exists(derived) or ( + os.path.exists(original) + and os.stat(derived).st_mtime < os.stat(original).st_mtime + ) class GraphError(RuntimeError): @@ -414,14 +429,16 @@ def run_code(code, code_path, ns=None, function_name=None): os.chdir(setup.config.wf_working_directory) except OSError as err: raise OSError( - str(err) + '\n`wf_working_directory` option in' - 'Sphinx configuration file must be a valid ' - 'directory path') + str(err) + "\n`wf_working_directory` option in" + "Sphinx configuration file must be a valid " + "directory path" + ) except TypeError as err: raise TypeError( - str(err) + '\n`wf_working_directory` option in ' - 'Sphinx configuration file must be a string or ' - 'None') + str(err) + "\n`wf_working_directory` option in " + "Sphinx configuration file must be a string or " + "None" + ) sys.path.insert(0, setup.config.wf_working_directory) elif code_path is not None: dirname = os.path.abspath(os.path.dirname(code_path)) @@ -450,7 +467,7 @@ def _dummy_print(*arg, **kwarg): if not ns: if setup.config.wf_pre_code is not None: exec(str(setup.config.wf_pre_code), ns) - ns['print'] = _dummy_print + ns["print"] = _dummy_print if "__main__" in code: exec("__name__ = '__main__'", ns) code = remove_coding(code) @@ -468,18 +485,18 @@ def _dummy_print(*arg, **kwarg): def get_wf_formats(config): - default_dpi = {'png': 80, 'hires.png': 200, 'pdf': 200} + default_dpi = {"png": 80, "hires.png": 200, "pdf": 200} formats = [] wf_formats = config.wf_formats if isinstance(wf_formats, (str, bytes)): # String Sphinx < 1.3, Split on , to mimic # Sphinx 1.3 and later. Sphinx 1.3 always # returns a list. - wf_formats = wf_formats.split(',') + wf_formats = wf_formats.split(",") for fmt in wf_formats: if isinstance(fmt, (str, bytes)): - if ':' in fmt: - suffix, dpi = fmt.split(':') + if ":" in fmt: + suffix, dpi = fmt.split(":") formats.append((str(suffix), int(dpi))) else: formats.append((fmt, default_dpi.get(fmt, 80))) @@ -490,17 +507,19 @@ def get_wf_formats(config): return formats -def render_figures(code, - code_path, - output_dir, - output_base, - context, - function_name, - config, - graph2use, - simple_form, - context_reset=False, - close_figs=False): +def render_figures( + code, + code_path, + output_dir, + output_base, + context, + function_name, + config, + graph2use, + simple_form, + context_reset=False, + close_figs=False, +): """ Run a nipype workflow creation script and save the graph in *output_dir*. Save the images under *output_dir* with file names derived from @@ -518,12 +537,10 @@ def render_figures(code, try: img_path = img.filename(fmt) imgname, ext = os.path.splitext(os.path.basename(img_path)) - ns['wf'].base_dir = output_dir - src = ns['wf'].write_graph( - imgname, - format=ext[1:], - graph2use=graph2use, - simple_form=simple_form) + ns["wf"].base_dir = output_dir + src = ns["wf"].write_graph( + imgname, format=ext[1:], graph2use=graph2use, simple_form=simple_form + ) shutil.move(src, img_path) except Exception: raise GraphError(traceback.format_exc()) @@ -536,31 +553,33 @@ def render_figures(code, def run(arguments, content, options, state_machine, state, lineno): document = state_machine.document config = document.settings.env.config - nofigs = 'nofigs' in options + nofigs = "nofigs" in options formats = get_wf_formats(config) default_fmt = formats[0][0] - graph2use = options.get('graph2use', 'hierarchical') - simple_form = options.get('simple_form', True) + graph2use = options.get("graph2use", "hierarchical") + simple_form = options.get("simple_form", True) - options.setdefault('include-source', config.wf_include_source) - keep_context = 'context' in options - context_opt = None if not keep_context else options['context'] + options.setdefault("include-source", config.wf_include_source) + keep_context = "context" in options + context_opt = None if not keep_context else options["context"] - rst_file = document.attributes['source'] + rst_file = document.attributes["source"] rst_dir = os.path.dirname(rst_file) if len(arguments): if not config.wf_basedir: - source_file_name = os.path.join(setup.app.builder.srcdir, - directives.uri(arguments[0])) + source_file_name = os.path.join( + setup.app.builder.srcdir, directives.uri(arguments[0]) + ) else: - source_file_name = os.path.join(setup.confdir, config.wf_basedir, - directives.uri(arguments[0])) + source_file_name = os.path.join( + setup.confdir, config.wf_basedir, directives.uri(arguments[0]) + ) # If there is content, it will be passed as a caption. - caption = '\n'.join(content) + caption = "\n".join(content) # If the optional function name is provided, use it if len(arguments) == 2: @@ -568,32 +587,32 @@ def run(arguments, content, options, state_machine, state, lineno): else: function_name = None - with io.open(source_file_name, 'r', encoding='utf-8') as fd: + with io.open(source_file_name, "r", encoding="utf-8") as fd: code = fd.read() output_base = os.path.basename(source_file_name) else: source_file_name = rst_file code = textwrap.dedent("\n".join([str(c) for c in content])) - counter = document.attributes.get('_wf_counter', 0) + 1 - document.attributes['_wf_counter'] = counter + counter = document.attributes.get("_wf_counter", 0) + 1 + document.attributes["_wf_counter"] = counter base, _ = os.path.splitext(os.path.basename(source_file_name)) - output_base = '%s-%d.py' % (base, counter) + output_base = "%s-%d.py" % (base, counter) function_name = None - caption = '' + caption = "" base, source_ext = os.path.splitext(output_base) - if source_ext in ('.py', '.rst', '.txt'): + if source_ext in (".py", ".rst", ".txt"): output_base = base else: - source_ext = '' + source_ext = "" # ensure that LaTeX includegraphics doesn't choke in foo.bar.pdf filenames - output_base = output_base.replace('.', '-') + output_base = output_base.replace(".", "-") # is it in doctest format? is_doctest = contains_doctest(code) - if 'format' in options: - if options['format'] == 'python': + if "format" in options: + if options["format"] == "python": is_doctest = False else: is_doctest = True @@ -606,7 +625,8 @@ def run(arguments, content, options, state_machine, state, lineno): # build_dir: where to place output files (temporarily) build_dir = os.path.join( - os.path.dirname(setup.app.doctreedir), 'wf_directive', source_rel_dir) + os.path.dirname(setup.app.doctreedir), "wf_directive", source_rel_dir + ) # get rid of .. in paths, also changes pathsep # see note in Python docs for warning about symbolic links on Windows. # need to compare source and dest paths at end @@ -616,22 +636,21 @@ def run(arguments, content, options, state_machine, state, lineno): os.makedirs(build_dir) # output_dir: final location in the builder's directory - dest_dir = os.path.abspath( - os.path.join(setup.app.builder.outdir, source_rel_dir)) + dest_dir = os.path.abspath(os.path.join(setup.app.builder.outdir, source_rel_dir)) if not os.path.exists(dest_dir): os.makedirs(dest_dir) # no problem here for me, but just use built-ins # how to link to files from the RST file dest_dir_link = os.path.join( - relpath(setup.confdir, rst_dir), source_rel_dir).replace( - os.path.sep, '/') + relpath(setup.confdir, rst_dir), source_rel_dir + ).replace(os.path.sep, "/") try: - build_dir_link = relpath(build_dir, rst_dir).replace(os.path.sep, '/') + build_dir_link = relpath(build_dir, rst_dir).replace(os.path.sep, "/") except ValueError: # on Windows, relpath raises ValueError when path and start are on # different mounts/drives build_dir_link = build_dir - source_link = dest_dir_link + '/' + output_base + source_ext + source_link = dest_dir_link + "/" + output_base + source_ext # make figures try: @@ -645,35 +664,34 @@ def run(arguments, content, options, state_machine, state, lineno): config, graph2use, simple_form, - context_reset=context_opt == 'reset', - close_figs=context_opt == 'close-figs') + context_reset=context_opt == "reset", + close_figs=context_opt == "close-figs", + ) errors = [] except GraphError as err: reporter = state.memo.reporter sm = reporter.system_message( 2, - "Exception occurred in plotting %s\n from %s:\n%s" % - (output_base, source_file_name, err), - line=lineno) + "Exception occurred in plotting %s\n from %s:\n%s" + % (output_base, source_file_name, err), + line=lineno, + ) results = [(code, [])] errors = [sm] # Properly indent the caption - caption = '\n'.join( - ' ' + line.strip() for line in caption.split('\n')) + caption = "\n".join(" " + line.strip() for line in caption.split("\n")) # generate output restructuredtext total_lines = [] for j, (code_piece, images) in enumerate(results): - if options['include-source']: + if options["include-source"]: if is_doctest: - lines = [''] - lines += [row.rstrip() for row in code_piece.split('\n')] + lines = [""] + lines += [row.rstrip() for row in code_piece.split("\n")] else: - lines = ['.. code-block:: python', ''] - lines += [ - ' %s' % row.rstrip() for row in code_piece.split('\n') - ] + lines = [".. code-block:: python", ""] + lines += [" %s" % row.rstrip() for row in code_piece.split("\n")] source_code = "\n".join(lines) else: source_code = "" @@ -682,8 +700,9 @@ def run(arguments, content, options, state_machine, state, lineno): images = [] opts = [ - ':%s: %s' % (key, val) for key, val in list(options.items()) - if key in ('alt', 'height', 'width', 'scale', 'align', 'class') + ":%s: %s" % (key, val) + for key, val in list(options.items()) + if key in ("alt", "height", "width", "scale", "align", "class") ] only_html = ".. only:: html" @@ -711,7 +730,8 @@ def run(arguments, content, options, state_machine, state, lineno): images=images, source_code=source_code, html_show_formats=config.wf_html_show_formats and len(images), - caption=caption) + caption=caption, + ) total_lines.extend(result.split("\n")) total_lines.extend("\n") @@ -730,7 +750,7 @@ def run(arguments, content, options, state_machine, state, lineno): # copy script (if necessary) target_name = os.path.join(dest_dir, output_base + source_ext) - with io.open(target_name, 'w', encoding="utf-8") as f: + with io.open(target_name, "w", encoding="utf-8") as f: if source_file_name == rst_file: code_escaped = unescape_doctest(code) else: diff --git a/nipype/testing/__init__.py b/nipype/testing/__init__.py index 2167e7e54a..c22de2cc7a 100644 --- a/nipype/testing/__init__.py +++ b/nipype/testing/__init__.py @@ -11,8 +11,8 @@ filepath = os.path.abspath(__file__) basedir = os.path.dirname(filepath) -funcfile = os.path.join(basedir, 'data', 'functional.nii') -anatfile = os.path.join(basedir, 'data', 'structural.nii') +funcfile = os.path.join(basedir, "data", "functional.nii") +anatfile = os.path.join(basedir, "data", "structural.nii") template = funcfile transfm = funcfile @@ -22,14 +22,14 @@ skipif = decorators.dec.skipif -def example_data(infile='functional.nii'): +def example_data(infile="functional.nii"): """returns path to empty example data files for doc tests it will raise an exception if filename is not in the directory""" filepath = os.path.abspath(__file__) basedir = os.path.dirname(filepath) - outfile = os.path.join(basedir, 'data', infile) + outfile = os.path.join(basedir, "data", infile) if not os.path.exists(outfile): - raise IOError('%s empty data file does NOT exist' % outfile) + raise IOError("%s empty data file does NOT exist" % outfile) return outfile diff --git a/nipype/testing/fixtures.py b/nipype/testing/fixtures.py index dabf3b3307..6f5b12495c 100644 --- a/nipype/testing/fixtures.py +++ b/nipype/testing/fixtures.py @@ -26,14 +26,13 @@ def analyze_pair_image_files(outdir, filelist, shape): def nifti_image_files(outdir, filelist, shape): for f in ensure_list(filelist): img = np.random.random(shape) - nb.Nifti1Image(img, np.eye(4), None).to_filename( - os.path.join(outdir, f)) + nb.Nifti1Image(img, np.eye(4), None).to_filename(os.path.join(outdir, f)) @pytest.fixture() def create_files_in_directory(request, tmpdir): cwd = tmpdir.chdir() - filelist = ['a.nii', 'b.nii'] + filelist = ["a.nii", "b.nii"] nifti_image_files(tmpdir.strpath, filelist, shape=(3, 3, 3, 4)) def change_directory(): @@ -46,7 +45,7 @@ def change_directory(): @pytest.fixture() def create_analyze_pair_file_in_directory(request, tmpdir): cwd = tmpdir.chdir() - filelist = ['a.hdr'] + filelist = ["a.hdr"] analyze_pair_image_files(tmpdir.strpath, filelist, shape=(3, 3, 3, 4)) def change_directory(): @@ -59,11 +58,11 @@ def change_directory(): @pytest.fixture() def create_files_in_directory_plus_dummy_file(request, tmpdir): cwd = tmpdir.chdir() - filelist = ['a.nii', 'b.nii'] + filelist = ["a.nii", "b.nii"] nifti_image_files(tmpdir.strpath, filelist, shape=(3, 3, 3, 4)) - tmpdir.join('reg.dat').write('dummy file') - filelist.append('reg.dat') + tmpdir.join("reg.dat").write("dummy file") + filelist.append("reg.dat") def change_directory(): cwd.chdir() @@ -75,7 +74,7 @@ def change_directory(): @pytest.fixture() def create_surf_file_in_directory(request, tmpdir): cwd = tmpdir.chdir() - surf = 'lh.a.nii' + surf = "lh.a.nii" nifti_image_files(tmpdir.strpath, filelist=surf, shape=(1, 100, 1)) def change_directory(): @@ -86,12 +85,12 @@ def change_directory(): def set_output_type(fsl_output_type): - prev_output_type = os.environ.get('FSLOUTPUTTYPE', None) + prev_output_type = os.environ.get("FSLOUTPUTTYPE", None) if fsl_output_type is not None: - os.environ['FSLOUTPUTTYPE'] = fsl_output_type - elif 'FSLOUTPUTTYPE' in os.environ: - del os.environ['FSLOUTPUTTYPE'] + os.environ["FSLOUTPUTTYPE"] = fsl_output_type + elif "FSLOUTPUTTYPE" in os.environ: + del os.environ["FSLOUTPUTTYPE"] FSLCommand.set_default_output_type(Info.output_type()) return prev_output_type @@ -101,7 +100,7 @@ def set_output_type(fsl_output_type): def create_files_in_directory_plus_output_type(request, tmpdir): func_prev_type = set_output_type(request.param) origdir = tmpdir.chdir() - filelist = ['a.nii', 'b.nii'] + filelist = ["a.nii", "b.nii"] nifti_image_files(tmpdir.strpath, filelist, shape=(3, 3, 3, 4)) out_ext = Info.output_type_to_ext(Info.output_type()) diff --git a/nipype/testing/tests/test_utils.py b/nipype/testing/tests/test_utils.py index 798f640805..fb2992b7e6 100644 --- a/nipype/testing/tests/test_utils.py +++ b/nipype/testing/tests/test_utils.py @@ -22,8 +22,9 @@ def test_tempfatfs(): @patch( - 'subprocess.check_call', - MagicMock(side_effect=subprocess.CalledProcessError('', ''))) + "subprocess.check_call", + MagicMock(side_effect=subprocess.CalledProcessError("", "")), +) def test_tempfatfs_calledprocesserror(): try: TempFATFS() @@ -34,8 +35,8 @@ def test_tempfatfs_calledprocesserror(): assert False -@patch('subprocess.check_call', MagicMock()) -@patch('subprocess.Popen', MagicMock(side_effect=OSError())) +@patch("subprocess.check_call", MagicMock()) +@patch("subprocess.Popen", MagicMock(side_effect=OSError())) def test_tempfatfs_oserror(): try: TempFATFS() diff --git a/nipype/testing/utils.py b/nipype/testing/utils.py index 6c36ce514e..e666a7586f 100644 --- a/nipype/testing/utils.py +++ b/nipype/testing/utils.py @@ -12,7 +12,7 @@ from tempfile import mkdtemp from ..utils.misc import package_check -__docformat__ = 'restructuredtext' +__docformat__ = "restructuredtext" import numpy as np import nibabel as nb @@ -37,28 +37,30 @@ def __init__(self, size_in_mbytes=8, delay=0.5): """ self.delay = delay self.tmpdir = mkdtemp() - self.dev_null = open(os.devnull, 'wb') + self.dev_null = open(os.devnull, "wb") - vfatfile = os.path.join(self.tmpdir, 'vfatblock') - self.vfatmount = os.path.join(self.tmpdir, 'vfatmount') - self.canary = os.path.join(self.vfatmount, '.canary') + vfatfile = os.path.join(self.tmpdir, "vfatblock") + self.vfatmount = os.path.join(self.tmpdir, "vfatmount") + self.canary = os.path.join(self.vfatmount, ".canary") - with open(vfatfile, 'wb') as fobj: - fobj.write(b'\x00' * (int(size_in_mbytes) << 20)) + with open(vfatfile, "wb") as fobj: + fobj.write(b"\x00" * (int(size_in_mbytes) << 20)) os.mkdir(self.vfatmount) - mkfs_args = ['mkfs.vfat', vfatfile] - mount_args = ['fusefat', '-o', 'rw+', '-f', vfatfile, self.vfatmount] + mkfs_args = ["mkfs.vfat", vfatfile] + mount_args = ["fusefat", "-o", "rw+", "-f", vfatfile, self.vfatmount] try: subprocess.check_call( - args=mkfs_args, stdout=self.dev_null, stderr=self.dev_null) + args=mkfs_args, stdout=self.dev_null, stderr=self.dev_null + ) except CalledProcessError as e: raise IOError("mkfs.vfat failed") from e try: self.fusefat = subprocess.Popen( - args=mount_args, stdout=self.dev_null, stderr=self.dev_null) + args=mount_args, stdout=self.dev_null, stderr=self.dev_null + ) except OSError as e: raise IOError("fusefat is not installed") from e @@ -67,7 +69,7 @@ def __init__(self, size_in_mbytes=8, delay=0.5): if self.fusefat.poll() is not None: raise IOError("fusefat terminated too soon") - open(self.canary, 'wb').close() + open(self.canary, "wb").close() def __enter__(self): return self.vfatmount diff --git a/nipype/tests/test_nipype.py b/nipype/tests/test_nipype.py index 01fd081bc9..ab3499c8db 100644 --- a/nipype/tests/test_nipype.py +++ b/nipype/tests/test_nipype.py @@ -12,8 +12,10 @@ def test_nipype_info(): assert exception_not_raised -@pytest.mark.skipif(not get_nipype_gitversion(), - reason="not able to get version from get_nipype_gitversion") +@pytest.mark.skipif( + not get_nipype_gitversion(), + reason="not able to get version from get_nipype_gitversion", +) def test_git_hash(): # removing the first "g" from gitversion - get_nipype_gitversion()[1:] == get_info()['commit_hash'] + get_nipype_gitversion()[1:] == get_info()["commit_hash"] diff --git a/nipype/utils/config.py b/nipype/utils/config.py index 79515a5b7f..4537fdadc8 100644 --- a/nipype/utils/config.py +++ b/nipype/utils/config.py @@ -1,14 +1,14 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -''' +""" Created on 20 Apr 2010 logging options : INFO, DEBUG hash_method : content, timestamp @author: Chris Filo Gorgolewski -''' +""" import os import sys import errno @@ -24,11 +24,11 @@ from filelock import SoftFileLock CONFIG_DEPRECATIONS = { - 'profile_runtime': ('monitoring.enabled', '1.0'), - 'filemanip_level': ('logging.utils_level', '1.0'), + "profile_runtime": ("monitoring.enabled", "1.0"), + "filemanip_level": ("logging.utils_level", "1.0"), } -NUMPY_MMAP = LooseVersion(np.__version__) >= LooseVersion('1.12.0') +NUMPY_MMAP = LooseVersion(np.__version__) >= LooseVersion("1.12.0") DEFAULT_CONFIG_TPL = """\ [logging] @@ -91,25 +91,22 @@ def __init__(self, *args, **kwargs): self._config = configparser.ConfigParser() self._cwd = None - config_dir = os.path.expanduser('~/.nipype') - self.data_file = os.path.join(config_dir, 'nipype.json') + config_dir = os.path.expanduser("~/.nipype") + self.data_file = os.path.join(config_dir, "nipype.json") self.set_default_config() self._display = None self._resource_monitor = None - self._config.read( - [os.path.join(config_dir, 'nipype.cfg'), 'nipype.cfg']) + self._config.read([os.path.join(config_dir, "nipype.cfg"), "nipype.cfg"]) for option in CONFIG_DEPRECATIONS: - for section in ['execution', 'logging', 'monitoring']: + for section in ["execution", "logging", "monitoring"]: if self.has_option(section, option): - new_section, new_option = CONFIG_DEPRECATIONS[option][ - 0].split('.') + new_section, new_option = CONFIG_DEPRECATIONS[option][0].split(".") if not self.has_option(new_section, new_option): # Warn implicit in get - self.set(new_section, new_option, - self.get(section, option)) + self.set(new_section, new_option, self.get(section, option)) @property def cwd(self): @@ -120,34 +117,39 @@ def cwd(self): try: self._cwd = os.getcwd() except OSError: - warn('Trying to run Nipype from a nonexistent directory "{}".'. - format(os.getenv('PWD', 'unknown')), RuntimeWarning) + warn( + 'Trying to run Nipype from a nonexistent directory "{}".'.format( + os.getenv("PWD", "unknown") + ), + RuntimeWarning, + ) raise return self._cwd def set_default_config(self): """Read default settings template and set into config object""" default_cfg = DEFAULT_CONFIG_TPL( - log_dir=os.path.expanduser( - '~'), # Get $HOME in a platform-agnostic way - crashdump_dir=self.cwd # Read cached cwd + log_dir=os.path.expanduser("~"), # Get $HOME in a platform-agnostic way + crashdump_dir=self.cwd, # Read cached cwd ) try: self._config.read_string(default_cfg) # Python >= 3.2 except AttributeError: from io import StringIO + self._config.readfp(StringIO(default_cfg)) def enable_debug_mode(self): """Enables debug configuration""" from .. import logging - self._config.set('execution', 'stop_on_first_crash', 'true') - self._config.set('execution', 'remove_unnecessary_outputs', 'false') - self._config.set('execution', 'keep_inputs', 'true') - self._config.set('logging', 'workflow_level', 'DEBUG') - self._config.set('logging', 'interface_level', 'DEBUG') - self._config.set('logging', 'utils_level', 'DEBUG') + + self._config.set("execution", "stop_on_first_crash", "true") + self._config.set("execution", "remove_unnecessary_outputs", "false") + self._config.set("execution", "keep_inputs", "true") + self._config.set("logging", "workflow_level", "DEBUG") + self._config.set("logging", "interface_level", "DEBUG") + self._config.set("logging", "utils_level", "DEBUG") logging.update_logging(self._config) def set_log_dir(self, log_dir): @@ -156,17 +158,17 @@ def set_log_dir(self, log_dir): This should be the first thing that is done before any nipype class with logging is imported. """ - self._config.set('logging', 'log_directory', log_dir) + self._config.set("logging", "log_directory", log_dir) def get(self, section, option, default=None): """Get an option""" if option in CONFIG_DEPRECATIONS: - msg = ('Config option "%s" has been deprecated as of nipype %s. ' - 'Please use "%s" instead.') % ( - option, CONFIG_DEPRECATIONS[option][1], - CONFIG_DEPRECATIONS[option][0]) + msg = ( + 'Config option "%s" has been deprecated as of nipype %s. ' + 'Please use "%s" instead.' + ) % (option, CONFIG_DEPRECATIONS[option][1], CONFIG_DEPRECATIONS[option][0]) warn(msg) - section, option = CONFIG_DEPRECATIONS[option][0].split('.') + section, option = CONFIG_DEPRECATIONS[option][0].split(".") if self._config.has_option(section, option): return self._config.get(section, option) @@ -178,12 +180,12 @@ def set(self, section, option, value): value = str(value) if option in CONFIG_DEPRECATIONS: - msg = ('Config option "%s" has been deprecated as of nipype %s. ' - 'Please use "%s" instead.') % ( - option, CONFIG_DEPRECATIONS[option][1], - CONFIG_DEPRECATIONS[option][0]) + msg = ( + 'Config option "%s" has been deprecated as of nipype %s. ' + 'Please use "%s" instead.' + ) % (option, CONFIG_DEPRECATIONS[option][1], CONFIG_DEPRECATIONS[option][0]) warn(msg) - section, option = CONFIG_DEPRECATIONS[option][0].split('.') + section, option = CONFIG_DEPRECATIONS[option][0].split(".") return self._config.set(section, option, value) @@ -203,8 +205,8 @@ def get_data(self, key): """Read options file""" if not os.path.exists(self.data_file): return None - with SoftFileLock('%s.lock' % self.data_file): - with open(self.data_file, 'rt') as file: + with SoftFileLock("%s.lock" % self.data_file): + with open(self.data_file, "rt") as file: datadict = load(file) if key in datadict: return datadict[key] @@ -214,35 +216,36 @@ def save_data(self, key, value): """Store config flie""" datadict = {} if os.path.exists(self.data_file): - with SoftFileLock('%s.lock' % self.data_file): - with open(self.data_file, 'rt') as file: + with SoftFileLock("%s.lock" % self.data_file): + with open(self.data_file, "rt") as file: datadict = load(file) else: dirname = os.path.dirname(self.data_file) if not os.path.exists(dirname): mkdir_p(dirname) - with SoftFileLock('%s.lock' % self.data_file): - with open(self.data_file, 'wt') as file: + with SoftFileLock("%s.lock" % self.data_file): + with open(self.data_file, "wt") as file: datadict[key] = value dump(datadict, file) def update_config(self, config_dict): """Extend internal dictionary with config_dict""" - for section in ['execution', 'logging', 'check']: + for section in ["execution", "logging", "check"]: if section in config_dict: for key, val in list(config_dict[section].items()): - if not key.startswith('__'): + if not key.startswith("__"): self._config.set(section, key, str(val)) def update_matplotlib(self): """Set backend on matplotlib from options""" import matplotlib - matplotlib.use(self.get('execution', 'matplotlib_backend')) + + matplotlib.use(self.get("execution", "matplotlib_backend")) def enable_provenance(self): """Sets provenance storing on""" - self._config.set('execution', 'write_provenance', 'true') - self._config.set('execution', 'hash_method', 'content') + self._config.set("execution", "write_provenance", "true") + self._config.set("execution", "hash_method", "content") @property def resource_monitor(self): @@ -251,8 +254,9 @@ def resource_monitor(self): return self._resource_monitor # Cache config from nipype config - self.resource_monitor = str2bool( - self._config.get('monitoring', 'enabled')) or False + self.resource_monitor = ( + str2bool(self._config.get("monitoring", "enabled")) or False + ) return self._resource_monitor @resource_monitor.setter @@ -270,16 +274,21 @@ def resource_monitor(self, value): self._resource_monitor = False try: import psutil + self._resource_monitor = LooseVersion( - psutil.__version__) >= LooseVersion('5.0') + psutil.__version__ + ) >= LooseVersion("5.0") except ImportError: pass finally: if not self._resource_monitor: - warn('Could not enable the resource monitor: ' - 'psutil>=5.0 could not be imported.') - self._config.set('monitoring', 'enabled', - ('%s' % self._resource_monitor).lower()) + warn( + "Could not enable the resource monitor: " + "psutil>=5.0 could not be imported." + ) + self._config.set( + "monitoring", "enabled", ("%s" % self._resource_monitor).lower() + ) def enable_resource_monitor(self): """Sets the resource monitor on""" @@ -300,13 +309,13 @@ def get_display(self): # shell=True, stdout=sp.DEVNULL)) if self._display is not None: - return ':%d' % self._display.new_display + return ":%d" % self._display.new_display sysdisplay = None - if self._config.has_option('execution', 'display_variable'): - sysdisplay = self._config.get('execution', 'display_variable') + if self._config.has_option("execution", "display_variable"): + sysdisplay = self._config.get("execution", "display_variable") - sysdisplay = sysdisplay or os.getenv('DISPLAY') + sysdisplay = sysdisplay or os.getenv("DISPLAY") if sysdisplay: from collections import namedtuple @@ -314,49 +323,51 @@ def _mock(): pass # Store a fake Xvfb object. Format - :[.] - ndisp = sysdisplay.split(':')[-1].split('.')[0] - Xvfb = namedtuple('Xvfb', ['new_display', 'stop']) + ndisp = sysdisplay.split(":")[-1].split(".")[0] + Xvfb = namedtuple("Xvfb", ["new_display", "stop"]) self._display = Xvfb(int(ndisp), _mock) return self.get_display() else: - if 'darwin' in sys.platform: + if "darwin" in sys.platform: raise RuntimeError( - 'Xvfb requires root permissions to run in OSX. Please ' - 'make sure that an X server is listening and set the ' - 'appropriate config on either $DISPLAY or nipype\'s ' + "Xvfb requires root permissions to run in OSX. Please " + "make sure that an X server is listening and set the " + "appropriate config on either $DISPLAY or nipype's " '"display_variable" config. Valid X servers include ' - 'VNC, XQuartz, or manually started Xvfb.') + "VNC, XQuartz, or manually started Xvfb." + ) # If $DISPLAY is empty, it confuses Xvfb so unset - if sysdisplay == '': - del os.environ['DISPLAY'] + if sysdisplay == "": + del os.environ["DISPLAY"] try: from xvfbwrapper import Xvfb except ImportError: raise RuntimeError( - 'A display server was required, but $DISPLAY is not ' - 'defined and Xvfb could not be imported.') + "A display server was required, but $DISPLAY is not " + "defined and Xvfb could not be imported." + ) - self._display = Xvfb(nolisten='tcp') + self._display = Xvfb(nolisten="tcp") self._display.start() # Older versions of xvfbwrapper used vdisplay_num - if not hasattr(self._display, 'new_display'): - setattr(self._display, 'new_display', - self._display.vdisplay_num) + if not hasattr(self._display, "new_display"): + setattr(self._display, "new_display", self._display.vdisplay_num) return self.get_display() def stop_display(self): """Closes the display if started""" if self._display is not None: from .. import logging + self._display.stop() - logging.getLogger('nipype.interface').debug( - 'Closing display (if virtual)') + logging.getLogger("nipype.interface").debug("Closing display (if virtual)") @atexit.register def free_display(): """Stop virtual display (if it is up)""" from .. import config + config.stop_display() diff --git a/nipype/utils/docparse.py b/nipype/utils/docparse.py index 041e3ad5cf..576a235892 100644 --- a/nipype/utils/docparse.py +++ b/nipype/utils/docparse.py @@ -35,11 +35,12 @@ def grab_doc(cmd, trap_error=True): """ proc = subprocess.Popen( - cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) + cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True + ) stdout, stderr = proc.communicate() if trap_error and proc.returncode: - msg = 'Attempting to run %s. Returned Error: %s' % (cmd, stderr) + msg = "Attempting to run %s. Returned Error: %s" % (cmd, stderr) raise IOError(msg) if stderr: @@ -76,7 +77,7 @@ def reverse_opt_map(opt_map): # The value is a tuple where the first element is the # format string and the second element is a docstring. value = value[0] - if (key != 'flags' and value is not None): + if key != "flags" and value is not None: revdict[value.split()[0]] = key return revdict @@ -104,21 +105,21 @@ def format_params(paramlist, otherlist=None): The formatted docstring. """ - hdr = 'Parameters' - delim = '----------' + hdr = "Parameters" + delim = "----------" paramlist.insert(0, delim) paramlist.insert(0, hdr) - params = '\n'.join(paramlist) + params = "\n".join(paramlist) otherparams = [] - doc = ''.join(params) + doc = "".join(params) if otherlist: - hdr = 'Others Parameters' - delim = '-----------------' + hdr = "Others Parameters" + delim = "-----------------" otherlist.insert(0, delim) otherlist.insert(0, hdr) - otherlist.insert(0, '\n') - otherparams = '\n'.join(otherlist) - doc = ''.join([doc, otherparams]) + otherlist.insert(0, "\n") + otherparams = "\n".join(otherlist) + doc = "".join([doc, otherparams]) return doc @@ -159,7 +160,7 @@ def insert_doc(doc, new_items): """ # Insert new_items after the Parameters header - doclist = doc.split('\n') + doclist = doc.split("\n") tmpdoc = doclist[:2] # Add new_items tmpdoc.extend(new_items) @@ -169,10 +170,10 @@ def insert_doc(doc, new_items): newdoc = [] for line in tmpdoc: newdoc.append(line) - newdoc.append('\n') + newdoc.append("\n") # We add one too many newlines, remove it. newdoc.pop(-1) - return ''.join(newdoc) + return "".join(newdoc) def build_doc(doc, opts): @@ -196,7 +197,7 @@ def build_doc(doc, opts): # Split doc into line elements. Generally, each line is an # individual flag/option. - doclist = doc.split('\n') + doclist = doc.split("\n") newdoc = [] flags_doc = [] for line in doclist: @@ -205,17 +206,17 @@ def build_doc(doc, opts): # Probably an empty line continue # For lines we care about, the first item is the flag - if ',' in linelist[0]: # sometimes flags are only seperated by comma - flag = linelist[0].split(',')[0] + if "," in linelist[0]: # sometimes flags are only seperated by comma + flag = linelist[0].split(",")[0] else: flag = linelist[0] attr = opts.get(flag) if attr is not None: # newline = line.replace(flag, attr) # Replace the flag with our attribute name - linelist[0] = '%s :\n ' % str(attr) + linelist[0] = "%s :\n " % str(attr) # Add some line formatting - newline = ' '.join(linelist) + newline = " ".join(linelist) newdoc.append(newline) else: if line[0].isspace(): @@ -249,20 +250,21 @@ def get_doc(cmd, opt_map, help_flag=None, trap_error=True): """ res = CommandLine( - 'which %s' % cmd.split(' ')[0], + "which %s" % cmd.split(" ")[0], resource_monitor=False, - terminal_output='allatonce').run() + terminal_output="allatonce", + ).run() cmd_path = res.runtime.stdout.strip() - if cmd_path == '': - raise Exception('Command %s not found' % cmd.split(' ')[0]) + if cmd_path == "": + raise Exception("Command %s not found" % cmd.split(" ")[0]) if help_flag: - cmd = ' '.join((cmd, help_flag)) + cmd = " ".join((cmd, help_flag)) doc = grab_doc(cmd, trap_error) opts = reverse_opt_map(opt_map) return build_doc(doc, opts) -def _parse_doc(doc, style=['--']): +def _parse_doc(doc, style=["--"]): """Parses a help doc for inputs Parameters @@ -279,16 +281,16 @@ def _parse_doc(doc, style=['--']): # Split doc into line elements. Generally, each line is an # individual flag/option. - doclist = doc.split('\n') + doclist = doc.split("\n") optmap = {} if isinstance(style, (str, bytes)): style = [style] for line in doclist: linelist = line.split() flag = [ - item for i, item in enumerate(linelist) - if i < 2 and any([item.startswith(s) - for s in style]) and len(item) > 1 + item + for i, item in enumerate(linelist) + if i < 2 and any([item.startswith(s) for s in style]) and len(item) > 1 ] if flag: if len(flag) == 1: @@ -303,11 +305,11 @@ def _parse_doc(doc, style=['--']): break flag = flag[style_idx.index(min(style_idx))] style_idx = min(style_idx) - optmap[flag.split(style[style_idx])[1]] = '%s %%s' % flag + optmap[flag.split(style[style_idx])[1]] = "%s %%s" % flag return optmap -def get_params_from_doc(cmd, style='--', help_flag=None, trap_error=True): +def get_params_from_doc(cmd, style="--", help_flag=None, trap_error=True): """Auto-generate option map from command line help Parameters @@ -329,14 +331,15 @@ def get_params_from_doc(cmd, style='--', help_flag=None, trap_error=True): """ res = CommandLine( - 'which %s' % cmd.split(' ')[0], + "which %s" % cmd.split(" ")[0], resource_monitor=False, - terminal_output='allatonce').run() + terminal_output="allatonce", + ).run() cmd_path = res.runtime.stdout.strip() - if cmd_path == '': - raise Exception('Command %s not found' % cmd.split(' ')[0]) + if cmd_path == "": + raise Exception("Command %s not found" % cmd.split(" ")[0]) if help_flag: - cmd = ' '.join((cmd, help_flag)) + cmd = " ".join((cmd, help_flag)) doc = grab_doc(cmd, trap_error) return _parse_doc(doc, style) diff --git a/nipype/utils/draw_gantt_chart.py b/nipype/utils/draw_gantt_chart.py index 6a037d34e0..0da078af84 100644 --- a/nipype/utils/draw_gantt_chart.py +++ b/nipype/utils/draw_gantt_chart.py @@ -17,13 +17,15 @@ try: import pandas as pd except ImportError: - print('Pandas not found; in order for full functionality of this module ' - 'install the pandas package') + print( + "Pandas not found; in order for full functionality of this module " + "install the pandas package" + ) pass def create_event_dict(start_time, nodes_list): - ''' + """ Function to generate a dictionary of event (start/finish) nodes from the nodes list @@ -39,7 +41,7 @@ def create_event_dict(start_time, nodes_list): events : dictionary a dictionary where the key is the timedelta from the start of the pipeline execution to the value node it accompanies - ''' + """ # Import packages import copy @@ -47,28 +49,28 @@ def create_event_dict(start_time, nodes_list): events = {} for node in nodes_list: # Format node fields - estimated_threads = node.get('num_threads', 1) - estimated_memory_gb = node.get('estimated_memory_gb', 1.0) - runtime_threads = node.get('runtime_threads', 0) - runtime_memory_gb = node.get('runtime_memory_gb', 0.0) + estimated_threads = node.get("num_threads", 1) + estimated_memory_gb = node.get("estimated_memory_gb", 1.0) + runtime_threads = node.get("runtime_threads", 0) + runtime_memory_gb = node.get("runtime_memory_gb", 0.0) # Init and format event-based nodes - node['estimated_threads'] = estimated_threads - node['estimated_memory_gb'] = estimated_memory_gb - node['runtime_threads'] = runtime_threads - node['runtime_memory_gb'] = runtime_memory_gb + node["estimated_threads"] = estimated_threads + node["estimated_memory_gb"] = estimated_memory_gb + node["runtime_threads"] = runtime_threads + node["runtime_memory_gb"] = runtime_memory_gb start_node = node finish_node = copy.deepcopy(node) - start_node['event'] = 'start' - finish_node['event'] = 'finish' + start_node["event"] = "start" + finish_node["event"] = "finish" # Get dictionary key - start_delta = (node['start'] - start_time).total_seconds() - finish_delta = (node['finish'] - start_time).total_seconds() + start_delta = (node["start"] - start_time).total_seconds() + finish_delta = (node["finish"] - start_time).total_seconds() # Populate dictionary if events.get(start_delta) or events.get(finish_delta): - err_msg = 'Event logged twice or events started at exact same time!' + err_msg = "Event logged twice or events started at exact same time!" raise KeyError(err_msg) events[start_delta] = start_node events[finish_delta] = finish_node @@ -78,7 +80,7 @@ def create_event_dict(start_time, nodes_list): def log_to_dict(logfile): - ''' + """ Function to extract log node dictionaries into a list of python dictionaries and return the list as well as the final node @@ -93,10 +95,10 @@ def log_to_dict(logfile): nodes_list : list a list of python dictionaries containing the runtime info for each nipype node - ''' + """ # Init variables - with open(logfile, 'r') as content: + with open(logfile, "r") as content: # read file separating each line lines = content.readlines() @@ -107,7 +109,7 @@ def log_to_dict(logfile): def calculate_resource_timeseries(events, resource): - ''' + """ Given as event dictionary, calculate the resources used as a timeseries @@ -125,7 +127,7 @@ def calculate_resource_timeseries(events, resource): time_series : pandas Series a pandas Series object that contains timestamps as the indices and the resource amount as values - ''' + """ # Import packages import pandas as pd @@ -136,14 +138,14 @@ def calculate_resource_timeseries(events, resource): # Iterate through the events for _, event in sorted(events.items()): - if event['event'] == "start": - if resource in event and event[resource] != 'Unknown': + if event["event"] == "start": + if resource in event and event[resource] != "Unknown": all_res += float(event[resource]) - current_time = event['start'] - elif event['event'] == "finish": - if resource in event and event[resource] != 'Unknown': + current_time = event["start"] + elif event["event"] == "finish": + if resource in event and event[resource] != "Unknown": all_res -= float(event[resource]) - current_time = event['finish'] + current_time = event["finish"] res[current_time] = all_res # Formulate the pandas timeseries @@ -157,7 +159,7 @@ def calculate_resource_timeseries(events, resource): def draw_lines(start, total_duration, minute_scale, scale): - ''' + """ Function to draw the minute line markers and timestamps Parameters @@ -178,10 +180,10 @@ def draw_lines(start, total_duration, minute_scale, scale): result : string the html-formatted string for producing the minutes-based time line markers - ''' + """ # Init variables - result = '' + result = "" next_line = 220 next_time = start num_lines = int(((total_duration // 60) // minute_scale) + 2) @@ -192,8 +194,11 @@ def draw_lines(start, total_duration, minute_scale, scale): new_line = "
" % next_line result += new_line # Time digits - time = "

%02d:%02d

" % \ - (next_line-20, next_time.hour, next_time.minute) + time = "

%02d:%02d

" % ( + next_line - 20, + next_time.hour, + next_time.minute, + ) result += time # Increment line spacing and digits next_line += minute_scale * scale @@ -203,9 +208,8 @@ def draw_lines(start, total_duration, minute_scale, scale): return result -def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, - colors): - ''' +def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, colors): + """ Function to return the html-string of the node drawings for the gantt chart @@ -235,28 +239,30 @@ def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, result : string the html-formatted string for producing the minutes-based time line markers - ''' + """ # Init variables - result = '' + result = "" scale = space_between_minutes / minute_scale space_between_minutes = space_between_minutes / scale end_times = [ - datetime.datetime(start.year, start.month, start.day, start.hour, - start.minute, start.second) for core in range(cores) + datetime.datetime( + start.year, start.month, start.day, start.hour, start.minute, start.second + ) + for core in range(cores) ] # For each node in the pipeline for node in nodes_list: # Get start and finish times - node_start = node['start'] - node_finish = node['finish'] + node_start = node["start"] + node_finish = node["finish"] # Calculate an offset and scale duration - offset = ((node_start - start).total_seconds() / 60) * scale * \ - space_between_minutes + 220 + offset = ( + (node_start - start).total_seconds() / 60 + ) * scale * space_between_minutes + 220 # Scale duration - scale_duration = ( - node['duration'] / 60) * scale * space_between_minutes + scale_duration = (node["duration"] / 60) * scale * space_between_minutes if scale_duration < 5: scale_duration = 5 scale_duration -= 2 @@ -266,32 +272,38 @@ def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, if end_times[core] < node_start: left += core * 30 end_times[core] = datetime.datetime( - node_finish.year, node_finish.month, node_finish.day, - node_finish.hour, node_finish.minute, node_finish.second) + node_finish.year, + node_finish.month, + node_finish.day, + node_finish.hour, + node_finish.minute, + node_finish.second, + ) break # Get color for node object color = random.choice(colors) - if 'error' in node: - color = 'red' + if "error" in node: + color = "red" # Setup dictionary for node html string insertion node_dict = { - 'left': left, - 'offset': offset, - 'scale_duration': scale_duration, - 'color': color, - 'node_name': node['name'], - 'node_dur': node['duration'] / 60.0, - 'node_start': node_start.strftime("%Y-%m-%d %H:%M:%S"), - 'node_finish': node_finish.strftime("%Y-%m-%d %H:%M:%S") + "left": left, + "offset": offset, + "scale_duration": scale_duration, + "color": color, + "node_name": node["name"], + "node_dur": node["duration"] / 60.0, + "node_start": node_start.strftime("%Y-%m-%d %H:%M:%S"), + "node_finish": node_finish.strftime("%Y-%m-%d %H:%M:%S"), } # Create new node string - new_node = "
" % \ - node_dict + new_node = ( + "
" % node_dict + ) # Append to output result result += new_node @@ -300,15 +312,21 @@ def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, return result -def draw_resource_bar(start_time, finish_time, time_series, - space_between_minutes, minute_scale, color, left, - resource): - ''' - ''' +def draw_resource_bar( + start_time, + finish_time, + time_series, + space_between_minutes, + minute_scale, + color, + left, + resource, +): + """ + """ # Memory header - result = "

%s

" \ - % (left, resource) + result = "

%s

" % (left, resource) # Image scaling factors scale = space_between_minutes / minute_scale space_between_minutes = space_between_minutes / scale @@ -323,8 +341,9 @@ def draw_resource_bar(start_time, finish_time, time_series, else: ts_end = finish_time # Calculate offset from start at top - offset = ((ts_start-start_time).total_seconds() / 60.0) * scale * \ - space_between_minutes + 220 + offset = ( + (ts_start - start_time).total_seconds() / 60.0 + ) * scale * space_between_minutes + 220 # Scale duration duration_mins = (ts_end - ts_start).total_seconds() / 60.0 height = duration_mins * scale * space_between_minutes @@ -335,29 +354,31 @@ def draw_resource_bar(start_time, finish_time, time_series, # Bar width is proportional to resource amount width = amount * 20 - if resource.lower() == 'memory': - label = '%.3f GB' % amount + if resource.lower() == "memory": + label = "%.3f GB" % amount else: - label = '%d threads' % amount + label = "%d threads" % amount # Setup dictionary for bar html string insertion bar_dict = { - 'color': color, - 'height': height, - 'width': width, - 'offset': offset, - 'left': left, - 'label': label, - 'duration': duration_mins, - 'start': ts_start.strftime('%Y-%m-%d %H:%M:%S'), - 'finish': ts_end.strftime('%Y-%m-%d %H:%M:%S') + "color": color, + "height": height, + "width": width, + "offset": offset, + "left": left, + "label": label, + "duration": duration_mins, + "start": ts_start.strftime("%Y-%m-%d %H:%M:%S"), + "finish": ts_end.strftime("%Y-%m-%d %H:%M:%S"), } - bar_html = "
" + bar_html = ( + "
" + ) # Add another bar to html line result += bar_html % bar_dict @@ -365,12 +386,14 @@ def draw_resource_bar(start_time, finish_time, time_series, return result -def generate_gantt_chart(logfile, - cores, - minute_scale=10, - space_between_minutes=50, - colors=["#7070FF", "#4E4EB2", "#2D2D66", "#9B9BFF"]): - ''' +def generate_gantt_chart( + logfile, + cores, + minute_scale=10, + space_between_minutes=50, + colors=["#7070FF", "#4E4EB2", "#2D2D66", "#9B9BFF"], +): + """ Generates a gantt chart in html showing the workflow execution based on a callback log file. This script was intended to be used with the MultiprocPlugin. The following code shows how to set up the workflow in order to generate the log file: @@ -418,10 +441,10 @@ def generate_gantt_chart(logfile, # plugin_args={'n_procs':8, 'memory':12, 'status_callback': log_nodes_cb}) # generate_gantt_chart('callback.log', 8) - ''' + """ # add the html header - html_string = ''' + html_string = """